diff --git a/.github/pull_request_template.md b/.github/pull_request_template.md index b11e3cd531..5b3f4336e6 100644 --- a/.github/pull_request_template.md +++ b/.github/pull_request_template.md @@ -1,7 +1,7 @@ ## References _Add references/links to any related issues or PRs. These may include:_ -* Fixes #[issue-number] -* Related to [REST Contract](https://github.com/DSpace/Rest7Contract) +* Fixes #`issue-number` (if this fixes an issue ticket) +* Related to DSpace/RestContract#`pr-number` (if a corresponding REST Contract PR exists) ## Description Short summary of changes (1-2 sentences). @@ -22,5 +22,7 @@ _This checklist provides a reminder of what we are going to look for when review - [ ] My PR passes Checkstyle validation based on the [Code Style Guide](https://wiki.lyrasis.org/display/DSPACE/Code+Style+Guide). - [ ] My PR includes Javadoc for _all new (or modified) public methods and classes_. It also includes Javadoc for large or complex private methods. - [ ] My PR passes all tests and includes new/updated Unit or Integration Tests based on the [Code Testing Guide](https://wiki.lyrasis.org/display/DSPACE/Code+Testing+Guide). -- [ ] If my PR includes new, third-party dependencies (in any `pom.xml`), I've made sure their licenses align with the [DSpace BSD License](https://github.com/DSpace/DSpace/blob/main/LICENSE) based on the [Licensing of Contributions](https://wiki.lyrasis.org/display/DSPACE/Code+Contribution+Guidelines#CodeContributionGuidelines-LicensingofContributions) documentation. -- [ ] If my PR modifies the REST API, I've linked to the REST Contract page (or open PR) related to this change. +- [ ] If my PR includes new libraries/dependencies (in any `pom.xml`), I've made sure their licenses align with the [DSpace BSD License](https://github.com/DSpace/DSpace/blob/main/LICENSE) based on the [Licensing of Contributions](https://wiki.lyrasis.org/display/DSPACE/Code+Contribution+Guidelines#CodeContributionGuidelines-LicensingofContributions) documentation. +- [ ] If my PR modifies REST API endpoints, I've opened a separate [REST Contract](https://github.com/DSpace/RestContract/blob/main/README.md) PR related to this change. +- [ ] If my PR includes new configurations, I've provided basic technical documentation in the PR itself. +- [ ] If my PR fixes an issue ticket, I've [linked them together](https://docs.github.com/en/issues/tracking-your-work-with-issues/linking-a-pull-request-to-an-issue). diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 4060dbd672..52714a8ba2 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -6,14 +6,15 @@ name: Build # Run this Build for all pushes / PRs to current branch on: [push, pull_request] +permissions: + contents: read # to fetch code (actions/checkout) + jobs: tests: runs-on: ubuntu-latest env: # Give Maven 1GB of memory to work with - # Suppress all Maven "downloading" messages in logs (see https://stackoverflow.com/a/35653426) - # This also slightly speeds builds, as there is less logging - MAVEN_OPTS: "-Xmx1024M -Dorg.slf4j.simpleLogger.log.org.apache.maven.cli.transfer.Slf4jMavenTransferListener=warn" + MAVEN_OPTS: "-Xmx1024M" strategy: # Create a matrix of two separate configurations for Unit vs Integration Tests # This will ensure those tasks are run in parallel @@ -44,18 +45,18 @@ jobs: steps: # https://github.com/actions/checkout - name: Checkout codebase - uses: actions/checkout@v2 + uses: actions/checkout@v3 # https://github.com/actions/setup-java - name: Install JDK ${{ matrix.java }} - uses: actions/setup-java@v2 + uses: actions/setup-java@v3 with: java-version: ${{ matrix.java }} distribution: 'temurin' # https://github.com/actions/cache - name: Cache Maven dependencies - uses: actions/cache@v2 + uses: actions/cache@v3 with: # Cache entire ~/.m2/repository path: ~/.m2/repository @@ -67,17 +68,17 @@ jobs: - name: Run Maven ${{ matrix.type }} env: TEST_FLAGS: ${{ matrix.mvnflags }} - run: mvn install -B -V -P-assembly -Pcoverage-report $TEST_FLAGS + run: mvn --no-transfer-progress -V install -P-assembly -Pcoverage-report $TEST_FLAGS # If previous step failed, save results of tests to downloadable artifact for this job # (This artifact is downloadable at the bottom of any job's summary page) - name: Upload Results of ${{ matrix.type }} to Artifact if: ${{ failure() }} - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v3 with: name: ${{ matrix.type }} results path: ${{ matrix.resultsdir }} # https://github.com/codecov/codecov-action - name: Upload coverage to Codecov.io - uses: codecov/codecov-action@v2 + uses: codecov/codecov-action@v3 diff --git a/.github/workflows/codescan.yml b/.github/workflows/codescan.yml new file mode 100644 index 0000000000..7580b4ba3d --- /dev/null +++ b/.github/workflows/codescan.yml @@ -0,0 +1,59 @@ +# DSpace CodeQL code scanning configuration for GitHub +# https://docs.github.com/en/code-security/code-scanning +# +# NOTE: Code scanning must be run separate from our default build.yml +# because CodeQL requires a fresh build with all tests *disabled*. +name: "Code Scanning" + +# Run this code scan for all pushes / PRs to main branch. Also run once a week. +on: + push: + branches: [ main ] + pull_request: + branches: [ main ] + # Don't run if PR is only updating static documentation + paths-ignore: + - '**/*.md' + - '**/*.txt' + schedule: + - cron: "37 0 * * 1" + +jobs: + analyze: + name: Analyze Code + runs-on: ubuntu-latest + # Limit permissions of this GitHub action. Can only write to security-events + permissions: + actions: read + contents: read + security-events: write + + steps: + # https://github.com/actions/checkout + - name: Checkout repository + uses: actions/checkout@v3 + + # https://github.com/actions/setup-java + - name: Install JDK + uses: actions/setup-java@v3 + with: + java-version: 11 + distribution: 'temurin' + + # Initializes the CodeQL tools for scanning. + # https://github.com/github/codeql-action + - name: Initialize CodeQL + uses: github/codeql-action/init@v2 + with: + # Codescan Javascript as well since a few JS files exist in REST API's interface + languages: java, javascript + + # Autobuild attempts to build any compiled languages + # NOTE: Based on testing, this autobuild process works well for DSpace. A custom + # DSpace build w/caching (like in build.yml) was about the same speed as autobuild. + - name: Autobuild + uses: github/codeql-action/autobuild@v2 + + # Perform GitHub Code Scanning. + - name: Perform CodeQL Analysis + uses: github/codeql-action/analyze@v2 diff --git a/.github/workflows/docker.yml b/.github/workflows/docker.yml index 34539abc16..64e12f01aa 100644 --- a/.github/workflows/docker.yml +++ b/.github/workflows/docker.yml @@ -12,6 +12,9 @@ on: - 'dspace-**' pull_request: +permissions: + contents: read # to fetch code (actions/checkout) + jobs: docker: # Ensure this job never runs on forked repos. It's only executed for 'dspace/dspace' @@ -31,21 +34,30 @@ jobs: # We turn off 'latest' tag by default. TAGS_FLAVOR: | latest=false + # Architectures / Platforms for which we will build Docker images + # If this is a PR, we ONLY build for AMD64. For PRs we only do a sanity check test to ensure Docker builds work. + # If this is NOT a PR (e.g. a tag or merge commit), also build for ARM64. NOTE: The ARM64 build takes MUCH + # longer (around 45mins or so) which is why we only run it when pushing a new Docker image. + PLATFORMS: linux/amd64${{ github.event_name != 'pull_request' && ', linux/arm64' || '' }} steps: # https://github.com/actions/checkout - name: Checkout codebase - uses: actions/checkout@v2 + uses: actions/checkout@v3 # https://github.com/docker/setup-buildx-action - name: Setup Docker Buildx - uses: docker/setup-buildx-action@v1 + uses: docker/setup-buildx-action@v2 + + # https://github.com/docker/setup-qemu-action + - name: Set up QEMU emulation to build for multiple architectures + uses: docker/setup-qemu-action@v2 # https://github.com/docker/login-action - name: Login to DockerHub # Only login if not a PR, as PRs only trigger a Docker build and not a push if: github.event_name != 'pull_request' - uses: docker/login-action@v1 + uses: docker/login-action@v2 with: username: ${{ secrets.DOCKER_USERNAME }} password: ${{ secrets.DOCKER_ACCESS_TOKEN }} @@ -57,7 +69,7 @@ jobs: # Get Metadata for docker_build_deps step below - name: Sync metadata (tags, labels) from GitHub to Docker for 'dspace-dependencies' image id: meta_build_deps - uses: docker/metadata-action@v3 + uses: docker/metadata-action@v4 with: images: dspace/dspace-dependencies tags: ${{ env.IMAGE_TAGS }} @@ -66,10 +78,11 @@ jobs: # https://github.com/docker/build-push-action - name: Build and push 'dspace-dependencies' image id: docker_build_deps - uses: docker/build-push-action@v2 + uses: docker/build-push-action@v3 with: context: . file: ./Dockerfile.dependencies + platforms: ${{ env.PLATFORMS }} # For pull requests, we run the Docker build (to ensure no PR changes break the build), # but we ONLY do an image push to DockerHub if it's NOT a PR push: ${{ github.event_name != 'pull_request' }} @@ -83,7 +96,7 @@ jobs: # Get Metadata for docker_build step below - name: Sync metadata (tags, labels) from GitHub to Docker for 'dspace' image id: meta_build - uses: docker/metadata-action@v3 + uses: docker/metadata-action@v4 with: images: dspace/dspace tags: ${{ env.IMAGE_TAGS }} @@ -91,10 +104,11 @@ jobs: - name: Build and push 'dspace' image id: docker_build - uses: docker/build-push-action@v2 + uses: docker/build-push-action@v3 with: context: . file: ./Dockerfile + platforms: ${{ env.PLATFORMS }} # For pull requests, we run the Docker build (to ensure no PR changes break the build), # but we ONLY do an image push to DockerHub if it's NOT a PR push: ${{ github.event_name != 'pull_request' }} @@ -108,7 +122,7 @@ jobs: # Get Metadata for docker_build_test step below - name: Sync metadata (tags, labels) from GitHub to Docker for 'dspace-test' image id: meta_build_test - uses: docker/metadata-action@v3 + uses: docker/metadata-action@v4 with: images: dspace/dspace tags: ${{ env.IMAGE_TAGS }} @@ -119,10 +133,11 @@ jobs: - name: Build and push 'dspace-test' image id: docker_build_test - uses: docker/build-push-action@v2 + uses: docker/build-push-action@v3 with: context: . file: ./Dockerfile.test + platforms: ${{ env.PLATFORMS }} # For pull requests, we run the Docker build (to ensure no PR changes break the build), # but we ONLY do an image push to DockerHub if it's NOT a PR push: ${{ github.event_name != 'pull_request' }} @@ -136,7 +151,7 @@ jobs: # Get Metadata for docker_build_test step below - name: Sync metadata (tags, labels) from GitHub to Docker for 'dspace-cli' image id: meta_build_cli - uses: docker/metadata-action@v3 + uses: docker/metadata-action@v4 with: images: dspace/dspace-cli tags: ${{ env.IMAGE_TAGS }} @@ -144,13 +159,14 @@ jobs: - name: Build and push 'dspace-cli' image id: docker_build_cli - uses: docker/build-push-action@v2 + uses: docker/build-push-action@v3 with: context: . file: ./Dockerfile.cli + platforms: ${{ env.PLATFORMS }} # For pull requests, we run the Docker build (to ensure no PR changes break the build), # but we ONLY do an image push to DockerHub if it's NOT a PR push: ${{ github.event_name != 'pull_request' }} # Use tags / labels provided by 'docker/metadata-action' above tags: ${{ steps.meta_build_cli.outputs.tags }} - labels: ${{ steps.meta_build_cli.outputs.labels }} \ No newline at end of file + labels: ${{ steps.meta_build_cli.outputs.labels }} diff --git a/.github/workflows/issue_opened.yml b/.github/workflows/issue_opened.yml index 3ccdd22a0d..5d7c1c30f7 100644 --- a/.github/workflows/issue_opened.yml +++ b/.github/workflows/issue_opened.yml @@ -5,25 +5,22 @@ on: issues: types: [opened] +permissions: {} jobs: automation: runs-on: ubuntu-latest steps: # Add the new issue to a project board, if it needs triage - # See https://github.com/marketplace/actions/create-project-card-action - - name: Add issue to project board + # See https://github.com/actions/add-to-project + - name: Add issue to triage board # Only add to project board if issue is flagged as "needs triage" or has no labels # NOTE: By default we flag new issues as "needs triage" in our issue template if: (contains(github.event.issue.labels.*.name, 'needs triage') || join(github.event.issue.labels.*.name) == '') - uses: technote-space/create-project-card-action@v1 + uses: actions/add-to-project@v0.3.0 # Note, the authentication token below is an ORG level Secret. - # It must be created/recreated manually via a personal access token with "public_repo" and "admin:org" permissions + # It must be created/recreated manually via a personal access token with admin:org, project, public_repo permissions # See: https://docs.github.com/en/actions/configuring-and-managing-workflows/authenticating-with-the-github_token#permissions-for-the-github_token # This is necessary because the "DSpace Backlog" project is an org level project (i.e. not repo specific) with: - GITHUB_TOKEN: ${{ secrets.ORG_PROJECT_TOKEN }} - PROJECT: DSpace Backlog - COLUMN: Triage - CHECK_ORG_PROJECT: true - # Ignore errors. - continue-on-error: true + github-token: ${{ secrets.TRIAGE_PROJECT_TOKEN }} + project-url: https://github.com/orgs/DSpace/projects/24 diff --git a/.github/workflows/label_merge_conflicts.yml b/.github/workflows/label_merge_conflicts.yml index dcbab18f1b..d71d244c2b 100644 --- a/.github/workflows/label_merge_conflicts.yml +++ b/.github/workflows/label_merge_conflicts.yml @@ -5,21 +5,32 @@ name: Check for merge conflicts # NOTE: This means merge conflicts are only checked for when a PR is merged to main. on: push: - branches: - - main + branches: [ main ] + # So that the `conflict_label_name` is removed if conflicts are resolved, + # we allow this to run for `pull_request_target` so that github secrets are available. + pull_request_target: + types: [ synchronize ] + +permissions: {} jobs: triage: + # Ensure this job never runs on forked repos. It's only executed for 'dspace/dspace' + if: github.repository == 'dspace/dspace' runs-on: ubuntu-latest + permissions: + pull-requests: write steps: - # See: https://github.com/mschilde/auto-label-merge-conflicts/ + # See: https://github.com/prince-chrismc/label-merge-conflicts-action - name: Auto-label PRs with merge conflicts - uses: mschilde/auto-label-merge-conflicts@v2.0 + uses: prince-chrismc/label-merge-conflicts-action@v2 # Add "merge conflict" label if a merge conflict is detected. Remove it when resolved. # Note, the authentication token is created automatically # See: https://docs.github.com/en/actions/configuring-and-managing-workflows/authenticating-with-the-github_token with: - CONFLICT_LABEL_NAME: 'merge conflict' - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - # Ignore errors - continue-on-error: true + conflict_label_name: 'merge conflict' + github_token: ${{ secrets.GITHUB_TOKEN }} + conflict_comment: | + Hi @${author}, + Conflicts have been detected against the base branch. + Please [resolve these conflicts](https://docs.github.com/en/pull-requests/collaborating-with-pull-requests/addressing-merge-conflicts/about-merge-conflicts) as soon as you can. Thanks! diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md new file mode 100644 index 0000000000..45a6af9ce5 --- /dev/null +++ b/CONTRIBUTING.md @@ -0,0 +1,45 @@ +# How to Contribute + +DSpace is a community built and supported project. We do not have a centralized development or support team, but have a dedicated group of volunteers who help us improve the software, documentation, resources, etc. + +* [Contribute new code via a Pull Request](#contribute-new-code-via-a-pull-request) +* [Contribute documentation](#contribute-documentation) +* [Help others on mailing lists or Slack](#help-others-on-mailing-lists-or-slack) +* [Join a working or interest group](#join-a-working-or-interest-group) + +## Contribute new code via a Pull Request + +We accept [GitHub Pull Requests (PRs)](https://docs.github.com/en/pull-requests/collaborating-with-pull-requests/proposing-changes-to-your-work-with-pull-requests/creating-a-pull-request-from-a-fork) at any time from anyone. +Contributors to each release are recognized in our [Release Notes](https://wiki.lyrasis.org/display/DSDOC7x/Release+Notes). + +Code Contribution Checklist +- [ ] PRs _should_ be smaller in size (ideally less than 1,000 lines of code, not including comments & tests) +- [ ] PRs **must** pass Checkstyle validation based on our [Code Style Guide](https://wiki.lyrasis.org/display/DSPACE/Code+Style+Guide). +- [ ] PRs **must** include Javadoc for _all new/modified public methods and classes_. Larger private methods should also have Javadoc +- [ ] PRs **must** pass all automated tests and include new/updated Unit or Integration tests based on our [Code Testing Guide](https://wiki.lyrasis.org/display/DSPACE/Code+Testing+Guide). +- [ ] If a PR includes new libraries/dependencies (in any `pom.xml`), then their software licenses **must** align with the [DSpace BSD License](https://github.com/DSpace/DSpace/blob/main/LICENSE) based on the [Licensing of Contributions](https://wiki.lyrasis.org/display/DSPACE/Code+Contribution+Guidelines#CodeContributionGuidelines-LicensingofContributions) documentation. +- [ ] Basic technical documentation _should_ be provided for any new features or changes to the REST API. REST API changes should be documented in our [Rest Contract](https://github.com/DSpace/RestContract). +- [ ] If a PR fixes an issue ticket, please [link them together](https://docs.github.com/en/issues/tracking-your-work-with-issues/linking-a-pull-request-to-an-issue). + +Additional details on the code contribution process can be found in our [Code Contribution Guidelines](https://wiki.lyrasis.org/display/DSPACE/Code+Contribution+Guidelines) + +## Contribute documentation + +DSpace Documentation is a collaborative effort in a shared Wiki. The latest documentation is at https://wiki.lyrasis.org/display/DSDOC7x + +If you find areas of the DSpace Documentation which you wish to improve, please request a Wiki account by emailing wikihelp@lyrasis.org. +Once you have an account setup, contact @tdonohue (via [Slack](https://wiki.lyrasis.org/display/DSPACE/Slack) or email) for access to edit our Documentation. + +## Help others on mailing lists or Slack + +DSpace has our own [Slack](https://wiki.lyrasis.org/display/DSPACE/Slack) community and [Mailing Lists](https://wiki.lyrasis.org/display/DSPACE/Mailing+Lists) where discussions take place and questions are answered. +Anyone is welcome to join and help others. We just ask you to follow our [Code of Conduct](https://www.lyrasis.org/about/Pages/Code-of-Conduct.aspx) (adopted via LYRASIS). + +## Join a working or interest group + +Most of the work in building/improving DSpace comes via [Working Groups](https://wiki.lyrasis.org/display/DSPACE/DSpace+Working+Groups) or [Interest Groups](https://wiki.lyrasis.org/display/DSPACE/DSpace+Interest+Groups). + +All working/interest groups are open to anyone to join and participate. A few key groups to be aware of include: + +* [DSpace 7 Working Group](https://wiki.lyrasis.org/display/DSPACE/DSpace+7+Working+Group) - This is the main (mostly volunteer) development team. We meet weekly to review our current development [project board](https://github.com/orgs/DSpace/projects), assigning tickets and/or PRs. +* [DSpace Community Advisory Team (DCAT)](https://wiki.lyrasis.org/display/cmtygp/DSpace+Community+Advisory+Team) - This is an interest group for repository managers/administrators. We meet monthly to discuss DSpace, share tips & provide feedback back to developers. \ No newline at end of file diff --git a/Dockerfile b/Dockerfile index 8a21c60a68..444a1bcf0b 100644 --- a/Dockerfile +++ b/Dockerfile @@ -20,7 +20,7 @@ USER dspace ADD --chown=dspace . /app/ # Build DSpace (note: this build doesn't include the optional, deprecated "dspace-rest" webapp) # Copy the dspace-installer directory to /install. Clean up the build to keep the docker image small -RUN mvn package && \ +RUN mvn --no-transfer-progress package && \ mv /app/dspace/target/${TARGET_DIR}/* /install && \ mvn clean diff --git a/Dockerfile.cli b/Dockerfile.cli index e8966f7bb6..76e559fc83 100644 --- a/Dockerfile.cli +++ b/Dockerfile.cli @@ -19,7 +19,7 @@ USER dspace # Copy the DSpace source code (from local machine) into the workdir (excluding .dockerignore contents) ADD --chown=dspace . /app/ # Build DSpace. Copy the dspace-installer directory to /install. Clean up the build to keep the docker image small -RUN mvn package && \ +RUN mvn --no-transfer-progress package && \ mv /app/dspace/target/${TARGET_DIR}/* /install && \ mvn clean diff --git a/Dockerfile.test b/Dockerfile.test index 568ff9b60a..4e9b2b5b43 100644 --- a/Dockerfile.test +++ b/Dockerfile.test @@ -22,7 +22,7 @@ USER dspace ADD --chown=dspace . /app/ # Build DSpace (INCLUDING the optional, deprecated "dspace-rest" webapp) # Copy the dspace-installer directory to /install. Clean up the build to keep the docker image small -RUN mvn package -Pdspace-rest && \ +RUN mvn --no-transfer-progress package -Pdspace-rest && \ mv /app/dspace/target/${TARGET_DIR}/* /install && \ mvn clean @@ -58,9 +58,11 @@ COPY --from=ant_build /dspace $DSPACE_INSTALL # NOTE: secretRequired="false" should only be used when AJP is NOT accessible from an external network. But, secretRequired="true" isn't supported by mod_proxy_ajp until Apache 2.5 RUN sed -i '/Service name="Catalina".*/a \\n ' $TOMCAT_INSTALL/conf/server.xml # Expose Tomcat port and AJP port -EXPOSE 8080 8009 +EXPOSE 8080 8009 8000 # Give java extra memory (2GB) ENV JAVA_OPTS=-Xmx2000m +# Set up debugging +ENV CATALINA_OPTS=-Xrunjdwp:transport=dt_socket,server=y,suspend=n,address=*:8000 # Link the DSpace 'server' webapp into Tomcat's webapps directory. # This ensures that when we start Tomcat, it runs from /server path (e.g. http://localhost:8080/server/) diff --git a/LICENSES_THIRD_PARTY b/LICENSES_THIRD_PARTY index f918af1c3e..92d0b71a70 100644 --- a/LICENSES_THIRD_PARTY +++ b/LICENSES_THIRD_PARTY @@ -21,28 +21,29 @@ https://wiki.lyrasis.org/display/DSPACE/Code+Contribution+Guidelines Apache Software License, Version 2.0: * Ant-Contrib Tasks (ant-contrib:ant-contrib:1.0b3 - http://ant-contrib.sourceforge.net) - * AWS SDK for Java - Core (com.amazonaws:aws-java-sdk-core:1.12.116 - https://aws.amazon.com/sdkforjava) - * AWS Java SDK for AWS KMS (com.amazonaws:aws-java-sdk-kms:1.12.116 - https://aws.amazon.com/sdkforjava) - * AWS Java SDK for Amazon S3 (com.amazonaws:aws-java-sdk-s3:1.12.116 - https://aws.amazon.com/sdkforjava) - * JMES Path Query library (com.amazonaws:jmespath-java:1.12.116 - https://aws.amazon.com/sdkforjava) - * jcommander (com.beust:jcommander:1.78 - https://jcommander.org) + * AWS SDK for Java - Core (com.amazonaws:aws-java-sdk-core:1.12.261 - https://aws.amazon.com/sdkforjava) + * AWS Java SDK for AWS KMS (com.amazonaws:aws-java-sdk-kms:1.12.261 - https://aws.amazon.com/sdkforjava) + * AWS Java SDK for Amazon S3 (com.amazonaws:aws-java-sdk-s3:1.12.261 - https://aws.amazon.com/sdkforjava) + * JMES Path Query library (com.amazonaws:jmespath-java:1.12.261 - https://aws.amazon.com/sdkforjava) * HPPC Collections (com.carrotsearch:hppc:0.8.1 - http://labs.carrotsearch.com/hppc.html/hppc) - * parso (com.epam:parso:2.0.11 - https://github.com/epam/parso) + * com.drewnoakes:metadata-extractor (com.drewnoakes:metadata-extractor:2.16.0 - https://drewnoakes.com/code/exif/) + * parso (com.epam:parso:2.0.14 - https://github.com/epam/parso) + * Esri Geometry API for Java (com.esri.geometry:esri-geometry-api:2.2.0 - https://github.com/Esri/geometry-api-java) * ClassMate (com.fasterxml:classmate:1.3.0 - http://github.com/cowtowncoder/java-classmate) - * Jackson-annotations (com.fasterxml.jackson.core:jackson-annotations:2.12.3 - http://github.com/FasterXML/jackson) - * Jackson-core (com.fasterxml.jackson.core:jackson-core:2.12.3 - https://github.com/FasterXML/jackson-core) - * jackson-databind (com.fasterxml.jackson.core:jackson-databind:2.12.3 - http://github.com/FasterXML/jackson) - * Jackson dataformat: CBOR (com.fasterxml.jackson.dataformat:jackson-dataformat-cbor:2.12.3 - http://github.com/FasterXML/jackson-dataformats-binary) - * Jackson dataformat: Smile (com.fasterxml.jackson.dataformat:jackson-dataformat-smile:2.11.2 - http://github.com/FasterXML/jackson-dataformats-binary) + * Jackson-annotations (com.fasterxml.jackson.core:jackson-annotations:2.12.6 - http://github.com/FasterXML/jackson) + * Jackson-core (com.fasterxml.jackson.core:jackson-core:2.12.6 - https://github.com/FasterXML/jackson-core) + * jackson-databind (com.fasterxml.jackson.core:jackson-databind:2.12.6.1 - http://github.com/FasterXML/jackson) + * Jackson dataformat: CBOR (com.fasterxml.jackson.dataformat:jackson-dataformat-cbor:2.12.6 - http://github.com/FasterXML/jackson-dataformats-binary) + * Jackson dataformat: Smile (com.fasterxml.jackson.dataformat:jackson-dataformat-smile:2.12.3 - http://github.com/FasterXML/jackson-dataformats-binary) * Jackson-dataformat-YAML (com.fasterxml.jackson.dataformat:jackson-dataformat-yaml:2.11.1 - https://github.com/FasterXML/jackson-dataformats-text) - * Jackson datatype: jdk8 (com.fasterxml.jackson.datatype:jackson-datatype-jdk8:2.10.3 - https://github.com/FasterXML/jackson-modules-java8/jackson-datatype-jdk8) - * Jackson datatype: JSR310 (com.fasterxml.jackson.datatype:jackson-datatype-jsr310:2.10.3 - https://github.com/FasterXML/jackson-modules-java8/jackson-datatype-jsr310) + * Jackson datatype: jdk8 (com.fasterxml.jackson.datatype:jackson-datatype-jdk8:2.13.3 - https://github.com/FasterXML/jackson-modules-java8/jackson-datatype-jdk8) * Jackson datatype: JSR310 (com.fasterxml.jackson.datatype:jackson-datatype-jsr310:2.11.1 - https://github.com/FasterXML/jackson-modules-java8/jackson-datatype-jsr310) - * Jackson-module-parameter-names (com.fasterxml.jackson.module:jackson-module-parameter-names:2.10.3 - https://github.com/FasterXML/jackson-modules-java8/jackson-module-parameter-names) + * Jackson datatype: JSR310 (com.fasterxml.jackson.datatype:jackson-datatype-jsr310:2.13.3 - https://github.com/FasterXML/jackson-modules-java8/jackson-datatype-jsr310) + * Jackson-module-parameter-names (com.fasterxml.jackson.module:jackson-module-parameter-names:2.13.3 - https://github.com/FasterXML/jackson-modules-java8/jackson-module-parameter-names) * Java UUID Generator (com.fasterxml.uuid:java-uuid-generator:4.0.1 - https://github.com/cowtowncoder/java-uuid-generator) - * Woodstox (com.fasterxml.woodstox:woodstox-core:5.0.3 - https://github.com/FasterXML/woodstox) + * Woodstox (com.fasterxml.woodstox:woodstox-core:6.2.4 - https://github.com/FasterXML/woodstox) * zjsonpatch (com.flipkart.zjsonpatch:zjsonpatch:0.4.6 - https://github.com/flipkart-incubator/zjsonpatch/) - * Caffeine cache (com.github.ben-manes.caffeine:caffeine:2.8.4 - https://github.com/ben-manes/caffeine) + * Caffeine cache (com.github.ben-manes.caffeine:caffeine:2.9.2 - https://github.com/ben-manes/caffeine) * btf (com.github.java-json-tools:btf:1.3 - https://github.com/java-json-tools/btf) * jackson-coreutils (com.github.java-json-tools:jackson-coreutils:2.0 - https://github.com/java-json-tools/jackson-coreutils) * jackson-coreutils-equivalence (com.github.java-json-tools:jackson-coreutils-equivalence:1.0 - https://github.com/java-json-tools/jackson-coreutils) @@ -50,41 +51,40 @@ https://wiki.lyrasis.org/display/DSPACE/Code+Contribution+Guidelines * json-schema-validator (com.github.java-json-tools:json-schema-validator:2.2.14 - https://github.com/java-json-tools/json-schema-validator) * msg-simple (com.github.java-json-tools:msg-simple:1.2 - https://github.com/java-json-tools/msg-simple) * uri-template (com.github.java-json-tools:uri-template:0.10 - https://github.com/java-json-tools/uri-template) - * Open JSON (com.github.openjson:openjson:1.0.12 - https://github.com/openjson/openjson) * JCIP Annotations under Apache License (com.github.stephenc.jcip:jcip-annotations:1.0-1 - http://stephenc.github.com/jcip-annotations) * Google APIs Client Library for Java (com.google.api-client:google-api-client:1.23.0 - https://github.com/google/google-api-java-client/google-api-client) * Google Analytics API v3-rev145-1.23.0 (com.google.apis:google-api-services-analytics:v3-rev145-1.23.0 - http://nexus.sonatype.org/oss-repository-hosting.html/google-api-services-analytics) * FindBugs-jsr305 (com.google.code.findbugs:jsr305:3.0.1 - http://findbugs.sourceforge.net/) - * Gson (com.google.code.gson:gson:2.8.6 - https://github.com/google/gson/gson) - * error-prone annotations (com.google.errorprone:error_prone_annotations:2.3.4 - http://nexus.sonatype.org/oss-repository-hosting.html/error_prone_parent/error_prone_annotations) + * Gson (com.google.code.gson:gson:2.9.0 - https://github.com/google/gson/gson) + * error-prone annotations (com.google.errorprone:error_prone_annotations:2.7.1 - http://nexus.sonatype.org/oss-repository-hosting.html/error_prone_parent/error_prone_annotations) * Guava InternalFutureFailureAccess and InternalFutures (com.google.guava:failureaccess:1.0.1 - https://github.com/google/guava/failureaccess) - * Guava: Google Core Libraries for Java (com.google.guava:guava:30.0-jre - https://github.com/google/guava/guava) + * Guava: Google Core Libraries for Java (com.google.guava:guava:31.0.1-jre - https://github.com/google/guava) * Guava: Google Core Libraries for Java (JDK5 Backport) (com.google.guava:guava-jdk5:17.0 - http://code.google.com/p/guava-libraries/guava-jdk5) * Guava ListenableFuture only (com.google.guava:listenablefuture:9999.0-empty-to-avoid-conflict-with-guava - https://github.com/google/guava/listenablefuture) * Google HTTP Client Library for Java (com.google.http-client:google-http-client:1.23.0 - https://github.com/google/google-http-java-client/google-http-client) + * GSON extensions to the Google HTTP Client Library for Java. (com.google.http-client:google-http-client-gson:1.41.7 - https://github.com/googleapis/google-http-java-client/google-http-client-gson) * Jackson 2 extensions to the Google HTTP Client Library for Java. (com.google.http-client:google-http-client-jackson2:1.23.0 - https://github.com/google/google-http-java-client/google-http-client-jackson2) * J2ObjC Annotations (com.google.j2objc:j2objc-annotations:1.3 - https://github.com/google/j2objc/) - * Google OAuth Client Library for Java (com.google.oauth-client:google-oauth-client:1.32.1 - https://github.com/googleapis/google-oauth-java-client/google-oauth-client) + * Google OAuth Client Library for Java (com.google.oauth-client:google-oauth-client:1.33.3 - https://github.com/googleapis/google-oauth-java-client/google-oauth-client) * ConcurrentLinkedHashMap (com.googlecode.concurrentlinkedhashmap:concurrentlinkedhashmap-lru:1.4.2 - http://code.google.com/p/concurrentlinkedhashmap) - * JSON.simple (com.googlecode.json-simple:json-simple:1.1.1 - http://code.google.com/p/json-simple/) * libphonenumber (com.googlecode.libphonenumber:libphonenumber:8.11.1 - https://github.com/google/libphonenumber/) - * Jackcess (com.healthmarketscience.jackcess:jackcess:3.0.1 - https://jackcess.sourceforge.io) - * Jackcess Encrypt (com.healthmarketscience.jackcess:jackcess-encrypt:3.0.0 - http://jackcessencrypt.sf.net) - * project ':json-path' (com.jayway.jsonpath:json-path:2.4.0 - https://github.com/jayway/JsonPath) - * project ':json-path-assert' (com.jayway.jsonpath:json-path-assert:2.4.0 - https://github.com/jayway/JsonPath) + * Jackcess (com.healthmarketscience.jackcess:jackcess:4.0.1 - https://jackcess.sourceforge.io) + * Jackcess Encrypt (com.healthmarketscience.jackcess:jackcess-encrypt:4.0.1 - http://jackcessencrypt.sf.net) + * project ':json-path' (com.jayway.jsonpath:json-path:2.6.0 - https://github.com/jayway/JsonPath) + * project ':json-path-assert' (com.jayway.jsonpath:json-path-assert:2.6.0 - https://github.com/jayway/JsonPath) * Disruptor Framework (com.lmax:disruptor:3.4.2 - http://lmax-exchange.github.com/disruptor) * builder-commons (com.lyncode:builder-commons:1.0.2 - http://nexus.sonatype.org/oss-repository-hosting.html/builder-commons) * MaxMind DB Reader (com.maxmind.db:maxmind-db:1.2.2 - http://dev.maxmind.com/) * MaxMind GeoIP2 API (com.maxmind.geoip2:geoip2:2.11.0 - http://dev.maxmind.com/geoip/geoip2/web-services) * Nimbus JOSE+JWT (com.nimbusds:nimbus-jose-jwt:7.9 - https://bitbucket.org/connect2id/nimbus-jose-jwt) - * opencsv (com.opencsv:opencsv:5.2 - http://opencsv.sf.net) + * opencsv (com.opencsv:opencsv:5.6 - http://opencsv.sf.net) * java-libpst (com.pff:java-libpst:0.9.3 - https://github.com/rjohnsondev/java-libpst) - * rome (com.rometools:rome:1.12.2 - http://rometools.com/rome) - * rome-utils (com.rometools:rome-utils:1.12.2 - http://rometools.com/rome-utils) + * rome (com.rometools:rome:1.18.0 - http://rometools.com/rome) + * rome-modules (com.rometools:rome-modules:1.18.0 - http://rometools.com/rome-modules) + * rome-utils (com.rometools:rome-utils:1.18.0 - http://rometools.com/rome-utils) * fastinfoset (com.sun.xml.fastinfoset:FastInfoset:1.2.15 - http://fi.java.net) * T-Digest (com.tdunning:t-digest:3.1 - https://github.com/tdunning/t-digest) * JSON library from Android SDK (com.vaadin.external.google:android-json:0.0.20131108.vaadin1 - http://developer.android.com/sdk) - * HikariCP (com.zaxxer:HikariCP-java7:2.4.13 - https://github.com/brettwooldridge/HikariCP) * SparseBitSet (com.zaxxer:SparseBitSet:1.2 - https://github.com/brettwooldridge/SparseBitSet) * Apache Commons BeanUtils (commons-beanutils:commons-beanutils:1.9.4 - https://commons.apache.org/proper/commons-beanutils/) * Apache Commons CLI (commons-cli:commons-cli:1.4 - http://commons.apache.org/proper/commons-cli/) @@ -98,30 +98,24 @@ https://wiki.lyrasis.org/display/DSPACE/Code+Contribution+Guidelines * Apache Commons Validator (commons-validator:commons-validator:1.5.0 - http://commons.apache.org/proper/commons-validator/) * GeoJson POJOs for Jackson (de.grundid.opendatalab:geojson-jackson:1.14 - https://github.com/opendatalab-de/geojson-jackson) * Boilerpipe -- Boilerplate Removal and Fulltext Extraction from HTML pages (de.l3s.boilerpipe:boilerpipe:1.1.0 - http://code.google.com/p/boilerpipe/) - * SentimentAnalysisParser (edu.usc.ir:sentiment-analysis-parser:0.1 - https://github.com/USCDataScience/SentimentAnalysisParser) * OpenAIRE Funders Model (eu.openaire:funders-model:2.0.0 - https://api.openaire.eu) * Metrics Core (io.dropwizard.metrics:metrics-core:4.1.5 - https://metrics.dropwizard.io/metrics-core) * Graphite Integration for Metrics (io.dropwizard.metrics:metrics-graphite:4.1.5 - https://metrics.dropwizard.io/metrics-graphite) * Metrics Integration for Jetty 9.3 and higher (io.dropwizard.metrics:metrics-jetty9:4.1.5 - https://metrics.dropwizard.io/metrics-jetty9) * Metrics Integration with JMX (io.dropwizard.metrics:metrics-jmx:4.1.5 - https://metrics.dropwizard.io/metrics-jmx) * JVM Integration for Metrics (io.dropwizard.metrics:metrics-jvm:4.1.5 - https://metrics.dropwizard.io/metrics-jvm) - * Netty (io.netty:netty:3.10.6.Final - http://netty.io/) - * Netty/Buffer (io.netty:netty-buffer:4.1.50.Final - https://netty.io/netty-buffer/) + * micrometer-core (io.micrometer:micrometer-core:1.8.6 - https://github.com/micrometer-metrics/micrometer) * Netty/Buffer (io.netty:netty-buffer:4.1.68.Final - https://netty.io/netty-buffer/) - * Netty/Codec (io.netty:netty-codec:4.1.50.Final - https://netty.io/netty-codec/) * Netty/Codec (io.netty:netty-codec:4.1.68.Final - https://netty.io/netty-codec/) * Netty/Codec/HTTP (io.netty:netty-codec-http:4.1.53.Final - https://netty.io/netty-codec-http/) * Netty/Codec/Socks (io.netty:netty-codec-socks:4.1.53.Final - https://netty.io/netty-codec-socks/) - * Netty/Common (io.netty:netty-common:4.1.50.Final - https://netty.io/netty-common/) * Netty/Common (io.netty:netty-common:4.1.68.Final - https://netty.io/netty-common/) - * Netty/Handler (io.netty:netty-handler:4.1.50.Final - https://netty.io/netty-handler/) * Netty/Handler (io.netty:netty-handler:4.1.68.Final - https://netty.io/netty-handler/) * Netty/Handler/Proxy (io.netty:netty-handler-proxy:4.1.53.Final - https://netty.io/netty-handler-proxy/) - * Netty/Resolver (io.netty:netty-resolver:4.1.50.Final - https://netty.io/netty-resolver/) - * Netty/Transport (io.netty:netty-transport:4.1.50.Final - https://netty.io/netty-transport/) + * Netty/Resolver (io.netty:netty-resolver:4.1.68.Final - https://netty.io/netty-resolver/) * Netty/Transport (io.netty:netty-transport:4.1.68.Final - https://netty.io/netty-transport/) - * Netty/Transport/Native/Epoll (io.netty:netty-transport-native-epoll:4.1.50.Final - https://netty.io/netty-transport-native-epoll/) - * Netty/Transport/Native/Unix/Common (io.netty:netty-transport-native-unix-common:4.1.50.Final - https://netty.io/netty-transport-native-unix-common/) + * Netty/Transport/Native/Epoll (io.netty:netty-transport-native-epoll:4.1.68.Final - https://netty.io/netty-transport-native-epoll/) + * Netty/Transport/Native/Unix/Common (io.netty:netty-transport-native-unix-common:4.1.68.Final - https://netty.io/netty-transport-native-unix-common/) * OpenTracing API (io.opentracing:opentracing-api:0.33.0 - https://github.com/opentracing/opentracing-java/opentracing-api) * OpenTracing-noop (io.opentracing:opentracing-noop:0.33.0 - https://github.com/opentracing/opentracing-java/opentracing-noop) * OpenTracing-util (io.opentracing:opentracing-util:0.33.0 - https://github.com/opentracing/opentracing-java/opentracing-util) @@ -147,53 +141,44 @@ https://wiki.lyrasis.org/display/DSPACE/Code+Contribution+Guidelines * Byte Buddy (without dependencies) (net.bytebuddy:byte-buddy:1.11.13 - https://bytebuddy.net/byte-buddy) * Byte Buddy agent (net.bytebuddy:byte-buddy-agent:1.11.13 - https://bytebuddy.net/byte-buddy-agent) * eigenbase-properties (net.hydromatic:eigenbase-properties:1.1.5 - http://github.com/julianhyde/eigenbase-properties) - * Java Native Access (net.java.dev.jna:jna:5.5.0 - https://github.com/java-native-access/jna) * json-unit-core (net.javacrumbs.json-unit:json-unit-core:2.19.0 - https://github.com/lukas-krecan/JsonUnit/json-unit-core) * "Java Concurrency in Practice" book annotations (net.jcip:jcip-annotations:1.0 - http://jcip.net/) * ASM based accessors helper used by json-smart (net.minidev:accessors-smart:1.2 - http://www.minidev.net/) + * ASM based accessors helper used by json-smart (net.minidev:accessors-smart:2.4.7 - https://urielch.github.io/) * JSON Small and Fast Parser (net.minidev:json-smart:2.3 - http://www.minidev.net/) - * ehcache (net.sf.ehcache:ehcache:2.10.6 - http://ehcache.org) - * Ehcache Core (net.sf.ehcache:ehcache-core:2.6.11 - http://ehcache.org) + * JSON Small and Fast Parser (net.minidev:json-smart:2.4.7 - https://urielch.github.io/) * Abdera Core (org.apache.abdera:abdera-core:1.1.3 - http://abdera.apache.org/abdera-core) * I18N Libraries (org.apache.abdera:abdera-i18n:1.1.3 - http://abdera.apache.org) * Apache Ant Core (org.apache.ant:ant:1.10.11 - https://ant.apache.org/) * Apache Ant Launcher (org.apache.ant:ant-launcher:1.10.11 - https://ant.apache.org/) * Apache Commons BCEL (org.apache.bcel:bcel:6.4.0 - https://commons.apache.org/proper/commons-bcel) - * Calcite Core (org.apache.calcite:calcite-core:1.18.0 - https://calcite.apache.org/calcite-core) - * Calcite Linq4j (org.apache.calcite:calcite-linq4j:1.18.0 - https://calcite.apache.org/calcite-linq4j) - * Apache Calcite Avatica (org.apache.calcite.avatica:avatica-core:1.13.0 - https://calcite.apache.org/avatica/avatica-core) + * Calcite Core (org.apache.calcite:calcite-core:1.27.0 - https://calcite.apache.org) + * Calcite Linq4j (org.apache.calcite:calcite-linq4j:1.27.0 - https://calcite.apache.org) + * Apache Calcite Avatica (org.apache.calcite.avatica:avatica-core:1.18.0 - https://calcite.apache.org/avatica) * Apache Commons Collections (org.apache.commons:commons-collections4:4.1 - http://commons.apache.org/proper/commons-collections/) - * Apache Commons Compress (org.apache.commons:commons-compress:1.20 - https://commons.apache.org/proper/commons-compress/) - * Apache Commons Configuration (org.apache.commons:commons-configuration2:2.7 - https://commons.apache.org/proper/commons-configuration/) - * Apache Commons CSV (org.apache.commons:commons-csv:1.8 - https://commons.apache.org/proper/commons-csv/) + * Apache Commons Compress (org.apache.commons:commons-compress:1.21 - https://commons.apache.org/proper/commons-compress/) + * Apache Commons Configuration (org.apache.commons:commons-configuration2:2.8.0 - https://commons.apache.org/proper/commons-configuration/) + * Apache Commons CSV (org.apache.commons:commons-csv:1.9.0 - https://commons.apache.org/proper/commons-csv/) * Apache Commons DBCP (org.apache.commons:commons-dbcp2:2.8.0 - https://commons.apache.org/dbcp/) * Apache Commons Exec (org.apache.commons:commons-exec:1.3 - http://commons.apache.org/proper/commons-exec/) - * Apache Commons Lang (org.apache.commons:commons-lang3:3.7 - http://commons.apache.org/proper/commons-lang/) + * Apache Commons Lang (org.apache.commons:commons-lang3:3.12.0 - https://commons.apache.org/proper/commons-lang/) * Apache Commons Math (org.apache.commons:commons-math3:3.6.1 - http://commons.apache.org/proper/commons-math/) * Apache Commons Pool (org.apache.commons:commons-pool2:2.9.0 - https://commons.apache.org/proper/commons-pool/) - * Apache Commons Text (org.apache.commons:commons-text:1.8 - https://commons.apache.org/proper/commons-text) * Apache Commons Text (org.apache.commons:commons-text:1.9 - https://commons.apache.org/proper/commons-text) * Curator Client (org.apache.curator:curator-client:2.13.0 - http://curator.apache.org/curator-client) * Curator Framework (org.apache.curator:curator-framework:2.13.0 - http://curator.apache.org/curator-framework) * Curator Recipes (org.apache.curator:curator-recipes:2.13.0 - http://curator.apache.org/curator-recipes) - * Apache CXF Core (org.apache.cxf:cxf-core:3.3.6 - https://cxf.apache.org) - * Apache CXF Runtime JAX-RS Frontend (org.apache.cxf:cxf-rt-frontend-jaxrs:3.3.6 - https://cxf.apache.org) - * Apache CXF JAX-RS Client (org.apache.cxf:cxf-rt-rs-client:3.3.6 - https://cxf.apache.org) - * Apache CXF Runtime Security functionality (org.apache.cxf:cxf-rt-security:3.3.6 - https://cxf.apache.org) - * Apache CXF Runtime HTTP Transport (org.apache.cxf:cxf-rt-transports-http:3.3.6 - https://cxf.apache.org) - * JTA 1.1 (org.apache.geronimo.specs:geronimo-jta_1.1_spec:1.1.1 - http://geronimo.apache.org/specs/geronimo-jta_1.1_spec) - * Web Services Metadata 2.0 (org.apache.geronimo.specs:geronimo-ws-metadata_2.0_spec:1.1.3 - http://geronimo.apache.org/maven/specs/geronimo-ws-metadata_2.0_spec/1.1.3) - * Apache Hadoop Annotations (org.apache.hadoop:hadoop-annotations:3.2.0 - no url defined) - * Apache Hadoop Auth (org.apache.hadoop:hadoop-auth:3.2.0 - no url defined) - * Apache Hadoop Common (org.apache.hadoop:hadoop-common:3.2.0 - no url defined) - * Apache Hadoop HDFS Client (org.apache.hadoop:hadoop-hdfs-client:3.2.0 - no url defined) + * Apache Hadoop Annotations (org.apache.hadoop:hadoop-annotations:3.2.2 - no url defined) + * Apache Hadoop Auth (org.apache.hadoop:hadoop-auth:3.2.2 - no url defined) + * Apache Hadoop Common (org.apache.hadoop:hadoop-common:3.2.2 - no url defined) + * Apache Hadoop HDFS Client (org.apache.hadoop:hadoop-hdfs-client:3.2.2 - no url defined) * htrace-core4 (org.apache.htrace:htrace-core4:4.1.0-incubating - http://incubator.apache.org/projects/htrace.html) * Apache HttpClient (org.apache.httpcomponents:httpclient:4.5.13 - http://hc.apache.org/httpcomponents-client) * Apache HttpClient Cache (org.apache.httpcomponents:httpclient-cache:4.2.6 - http://hc.apache.org/httpcomponents-client) - * Apache HttpCore (org.apache.httpcomponents:httpcore:4.4.4 - http://hc.apache.org/httpcomponents-core-ga) - * Apache HttpClient Mime (org.apache.httpcomponents:httpmime:4.5.12 - http://hc.apache.org/httpcomponents-client) - * Apache James :: Mime4j :: Core (org.apache.james:apache-mime4j-core:0.8.3 - http://james.apache.org/mime4j/apache-mime4j-core) - * Apache James :: Mime4j :: DOM (org.apache.james:apache-mime4j-dom:0.8.3 - http://james.apache.org/mime4j/apache-mime4j-dom) + * Apache HttpCore (org.apache.httpcomponents:httpcore:4.4.15 - http://hc.apache.org/httpcomponents-core-ga) + * Apache HttpClient Mime (org.apache.httpcomponents:httpmime:4.5.13 - http://hc.apache.org/httpcomponents-client) + * Apache James :: Mime4j :: Core (org.apache.james:apache-mime4j-core:0.8.4 - http://james.apache.org/mime4j/apache-mime4j-core) + * Apache James :: Mime4j :: DOM (org.apache.james:apache-mime4j-dom:0.8.4 - http://james.apache.org/mime4j/apache-mime4j-dom) * Apache Jena - Libraries POM (org.apache.jena:apache-jena-libs:2.13.0 - http://jena.apache.org/apache-jena-libs/) * Apache Jena - ARQ (SPARQL 1.1 Query Engine) (org.apache.jena:jena-arq:2.13.0 - http://jena.apache.org/jena-arq/) * Apache Jena - Core (org.apache.jena:jena-core:2.13.0 - http://jena.apache.org/jena-core/) @@ -207,121 +192,135 @@ https://wiki.lyrasis.org/display/DSPACE/Code+Contribution+Guidelines * Apache Log4j API (org.apache.logging.log4j:log4j-api:2.17.1 - https://logging.apache.org/log4j/2.x/log4j-api/) * Apache Log4j Core (org.apache.logging.log4j:log4j-core:2.17.1 - https://logging.apache.org/log4j/2.x/log4j-core/) * Apache Log4j JUL Adapter (org.apache.logging.log4j:log4j-jul:2.17.1 - https://logging.apache.org/log4j/2.x/log4j-jul/) + * Apache Log4j Layout for JSON template (org.apache.logging.log4j:log4j-layout-template-json:2.16.0 - https://logging.apache.org/log4j/2.x/log4j-layout-template-json/) * Apache Log4j SLF4J Binding (org.apache.logging.log4j:log4j-slf4j-impl:2.17.1 - https://logging.apache.org/log4j/2.x/log4j-slf4j-impl/) * Apache Log4j Web (org.apache.logging.log4j:log4j-web:2.17.1 - https://logging.apache.org/log4j/2.x/log4j-web/) - * Lucene Common Analyzers (org.apache.lucene:lucene-analyzers-common:8.8.1 - https://lucene.apache.org/lucene-parent/lucene-analyzers-common) - * Lucene ICU Analysis Components (org.apache.lucene:lucene-analyzers-icu:8.8.1 - https://lucene.apache.org/lucene-parent/lucene-analyzers-icu) - * Lucene Kuromoji Japanese Morphological Analyzer (org.apache.lucene:lucene-analyzers-kuromoji:8.8.1 - https://lucene.apache.org/lucene-parent/lucene-analyzers-kuromoji) - * Lucene Nori Korean Morphological Analyzer (org.apache.lucene:lucene-analyzers-nori:8.8.1 - https://lucene.apache.org/lucene-parent/lucene-analyzers-nori) - * Lucene Phonetic Filters (org.apache.lucene:lucene-analyzers-phonetic:8.8.1 - https://lucene.apache.org/lucene-parent/lucene-analyzers-phonetic) - * Lucene Smart Chinese Analyzer (org.apache.lucene:lucene-analyzers-smartcn:8.8.1 - https://lucene.apache.org/lucene-parent/lucene-analyzers-smartcn) - * Lucene Stempel Analyzer (org.apache.lucene:lucene-analyzers-stempel:8.8.1 - https://lucene.apache.org/lucene-parent/lucene-analyzers-stempel) - * Lucene Memory (org.apache.lucene:lucene-backward-codecs:8.8.1 - https://lucene.apache.org/lucene-parent/lucene-backward-codecs) - * Lucene Classification (org.apache.lucene:lucene-classification:8.8.1 - https://lucene.apache.org/lucene-parent/lucene-classification) - * Lucene codecs (org.apache.lucene:lucene-codecs:8.8.1 - https://lucene.apache.org/lucene-parent/lucene-codecs) - * Lucene Core (org.apache.lucene:lucene-core:8.8.1 - https://lucene.apache.org/lucene-parent/lucene-core) - * Lucene Expressions (org.apache.lucene:lucene-expressions:8.8.1 - https://lucene.apache.org/lucene-parent/lucene-expressions) - * Lucene Grouping (org.apache.lucene:lucene-grouping:8.8.1 - https://lucene.apache.org/lucene-parent/lucene-grouping) - * Lucene Highlighter (org.apache.lucene:lucene-highlighter:8.8.1 - https://lucene.apache.org/lucene-parent/lucene-highlighter) - * Lucene Join (org.apache.lucene:lucene-join:8.8.1 - https://lucene.apache.org/lucene-parent/lucene-join) - * Lucene Memory (org.apache.lucene:lucene-memory:8.8.1 - https://lucene.apache.org/lucene-parent/lucene-memory) - * Lucene Miscellaneous (org.apache.lucene:lucene-misc:8.8.1 - https://lucene.apache.org/lucene-parent/lucene-misc) - * Lucene Queries (org.apache.lucene:lucene-queries:8.8.1 - https://lucene.apache.org/lucene-parent/lucene-queries) - * Lucene QueryParsers (org.apache.lucene:lucene-queryparser:8.8.1 - https://lucene.apache.org/lucene-parent/lucene-queryparser) - * Lucene Sandbox (org.apache.lucene:lucene-sandbox:8.8.1 - https://lucene.apache.org/lucene-parent/lucene-sandbox) - * Lucene Spatial Extras (org.apache.lucene:lucene-spatial-extras:8.8.1 - https://lucene.apache.org/lucene-parent/lucene-spatial-extras) - * Lucene Spatial 3D (org.apache.lucene:lucene-spatial3d:8.8.1 - https://lucene.apache.org/lucene-parent/lucene-spatial3d) - * Lucene Suggest (org.apache.lucene:lucene-suggest:8.8.1 - https://lucene.apache.org/lucene-parent/lucene-suggest) - * Apache OpenNLP Tools (org.apache.opennlp:opennlp-tools:1.9.2 - https://www.apache.org/opennlp/opennlp-tools/) + * Lucene Common Analyzers (org.apache.lucene:lucene-analyzers-common:8.11.1 - https://lucene.apache.org/lucene-parent/lucene-analyzers-common) + * Lucene ICU Analysis Components (org.apache.lucene:lucene-analyzers-icu:8.11.1 - https://lucene.apache.org/lucene-parent/lucene-analyzers-icu) + * Lucene Kuromoji Japanese Morphological Analyzer (org.apache.lucene:lucene-analyzers-kuromoji:8.11.1 - https://lucene.apache.org/lucene-parent/lucene-analyzers-kuromoji) + * Lucene Nori Korean Morphological Analyzer (org.apache.lucene:lucene-analyzers-nori:8.11.1 - https://lucene.apache.org/lucene-parent/lucene-analyzers-nori) + * Lucene Phonetic Filters (org.apache.lucene:lucene-analyzers-phonetic:8.11.1 - https://lucene.apache.org/lucene-parent/lucene-analyzers-phonetic) + * Lucene Smart Chinese Analyzer (org.apache.lucene:lucene-analyzers-smartcn:8.11.1 - https://lucene.apache.org/lucene-parent/lucene-analyzers-smartcn) + * Lucene Stempel Analyzer (org.apache.lucene:lucene-analyzers-stempel:8.11.1 - https://lucene.apache.org/lucene-parent/lucene-analyzers-stempel) + * Lucene Memory (org.apache.lucene:lucene-backward-codecs:8.11.1 - https://lucene.apache.org/lucene-parent/lucene-backward-codecs) + * Lucene Classification (org.apache.lucene:lucene-classification:8.11.1 - https://lucene.apache.org/lucene-parent/lucene-classification) + * Lucene codecs (org.apache.lucene:lucene-codecs:8.11.1 - https://lucene.apache.org/lucene-parent/lucene-codecs) + * Lucene Core (org.apache.lucene:lucene-core:8.11.1 - https://lucene.apache.org/lucene-parent/lucene-core) + * Lucene Expressions (org.apache.lucene:lucene-expressions:8.11.1 - https://lucene.apache.org/lucene-parent/lucene-expressions) + * Lucene Grouping (org.apache.lucene:lucene-grouping:8.11.1 - https://lucene.apache.org/lucene-parent/lucene-grouping) + * Lucene Highlighter (org.apache.lucene:lucene-highlighter:8.11.1 - https://lucene.apache.org/lucene-parent/lucene-highlighter) + * Lucene Join (org.apache.lucene:lucene-join:8.11.1 - https://lucene.apache.org/lucene-parent/lucene-join) + * Lucene Memory (org.apache.lucene:lucene-memory:8.11.1 - https://lucene.apache.org/lucene-parent/lucene-memory) + * Lucene Miscellaneous (org.apache.lucene:lucene-misc:8.11.1 - https://lucene.apache.org/lucene-parent/lucene-misc) + * Lucene Queries (org.apache.lucene:lucene-queries:8.11.1 - https://lucene.apache.org/lucene-parent/lucene-queries) + * Lucene QueryParsers (org.apache.lucene:lucene-queryparser:8.11.1 - https://lucene.apache.org/lucene-parent/lucene-queryparser) + * Lucene Sandbox (org.apache.lucene:lucene-sandbox:8.11.1 - https://lucene.apache.org/lucene-parent/lucene-sandbox) + * Lucene Spatial Extras (org.apache.lucene:lucene-spatial-extras:8.11.1 - https://lucene.apache.org/lucene-parent/lucene-spatial-extras) + * Lucene Spatial 3D (org.apache.lucene:lucene-spatial3d:8.11.1 - https://lucene.apache.org/lucene-parent/lucene-spatial3d) + * Lucene Suggest (org.apache.lucene:lucene-suggest:8.11.1 - https://lucene.apache.org/lucene-parent/lucene-suggest) * Apache FontBox (org.apache.pdfbox:fontbox:2.0.24 - http://pdfbox.apache.org/) * PDFBox JBIG2 ImageIO plugin (org.apache.pdfbox:jbig2-imageio:3.0.3 - https://www.apache.org/jbig2-imageio/) * Apache JempBox (org.apache.pdfbox:jempbox:1.8.16 - http://www.apache.org/pdfbox-parent/jempbox/) * Apache PDFBox (org.apache.pdfbox:pdfbox:2.0.24 - https://www.apache.org/pdfbox-parent/pdfbox/) - * Apache PDFBox tools (org.apache.pdfbox:pdfbox-tools:2.0.19 - https://www.apache.org/pdfbox-parent/pdfbox-tools/) - * Apache Preflight (org.apache.pdfbox:preflight:2.0.19 - https://www.apache.org/pdfbox-parent/preflight/) - * Apache XmpBox (org.apache.pdfbox:xmpbox:2.0.19 - https://www.apache.org/pdfbox-parent/xmpbox/) - * Apache POI (org.apache.poi:poi:3.17 - http://poi.apache.org/) - * Apache POI (org.apache.poi:poi-ooxml:3.17 - http://poi.apache.org/) - * Apache POI (org.apache.poi:poi-ooxml-schemas:3.17 - http://poi.apache.org/) - * Apache POI (org.apache.poi:poi-scratchpad:3.17 - http://poi.apache.org/) - * Apache SIS features (org.apache.sis.core:sis-feature:1.0 - http://sis.apache.org/core/sis-feature) - * Apache SIS metadata (org.apache.sis.core:sis-metadata:1.0 - http://sis.apache.org/core/sis-metadata) - * Apache SIS referencing (org.apache.sis.core:sis-referencing:1.0 - http://sis.apache.org/core/sis-referencing) - * Apache SIS utilities (org.apache.sis.core:sis-utility:1.0 - http://sis.apache.org/core/sis-utility) - * Apache SIS netCDF storage (org.apache.sis.storage:sis-netcdf:1.0 - http://sis.apache.org/storage/sis-netcdf) - * Apache SIS common storage (org.apache.sis.storage:sis-storage:1.0 - http://sis.apache.org/storage/sis-storage) - * Apache Solr Content Extraction Library (org.apache.solr:solr-cell:8.8.1 - https://lucene.apache.org/solr-parent/solr-cell) - * Apache Solr Core (org.apache.solr:solr-core:8.8.1 - https://lucene.apache.org/solr-parent/solr-core) - * Apache Solr Solrj (org.apache.solr:solr-solrj:8.8.1 - https://lucene.apache.org/solr-parent/solr-solrj) + * Apache PDFBox Debugger (org.apache.pdfbox:pdfbox-debugger:2.0.25 - https://www.apache.org/pdfbox-parent/pdfbox-debugger/) + * Apache PDFBox tools (org.apache.pdfbox:pdfbox-tools:2.0.25 - https://www.apache.org/pdfbox-parent/pdfbox-tools/) + * Apache XmpBox (org.apache.pdfbox:xmpbox:2.0.25 - https://www.apache.org/pdfbox-parent/xmpbox/) + * Apache POI - Common (org.apache.poi:poi:5.2.0 - https://poi.apache.org/) + * Apache POI - API based on OPC and OOXML schemas (org.apache.poi:poi-ooxml:5.2.0 - https://poi.apache.org/) + * Apache POI (org.apache.poi:poi-ooxml-lite:5.2.0 - https://poi.apache.org/) + * Apache POI (org.apache.poi:poi-scratchpad:5.2.0 - https://poi.apache.org/) + * Apache Solr Core (org.apache.solr:solr-core:8.11.1 - https://lucene.apache.org/solr-parent/solr-core) + * Apache Solr Solrj (org.apache.solr:solr-solrj:8.11.1 - https://lucene.apache.org/solr-parent/solr-solrj) * Apache Standard Taglib Implementation (org.apache.taglibs:taglibs-standard-impl:1.2.5 - http://tomcat.apache.org/taglibs/standard-1.2.5/taglibs-standard-impl) * Apache Standard Taglib Specification API (org.apache.taglibs:taglibs-standard-spec:1.2.5 - http://tomcat.apache.org/taglibs/standard-1.2.5/taglibs-standard-spec) * Apache Thrift (org.apache.thrift:libthrift:0.9.2 - http://thrift.apache.org) - * Apache Tika core (org.apache.tika:tika-core:1.24.1 - http://tika.apache.org/) - * Apache Tika Java-7 Components (org.apache.tika:tika-java7:1.24.1 - http://tika.apache.org/) - * Apache Tika parsers (org.apache.tika:tika-parsers:1.24.1 - http://tika.apache.org/) - * Apache Tika XMP (org.apache.tika:tika-xmp:1.24.1 - http://tika.apache.org/) - * tomcat-embed-core (org.apache.tomcat.embed:tomcat-embed-core:9.0.33 - https://tomcat.apache.org/) - * tomcat-embed-el (org.apache.tomcat.embed:tomcat-embed-el:9.0.33 - https://tomcat.apache.org/) - * tomcat-embed-websocket (org.apache.tomcat.embed:tomcat-embed-websocket:9.0.33 - https://tomcat.apache.org/) - * Apache Velocity - Engine (org.apache.velocity:velocity-engine-core:2.2 - http://velocity.apache.org/engine/devel/velocity-engine-core/) + * Apache Tika core (org.apache.tika:tika-core:2.3.0 - https://tika.apache.org/) + * Apache Tika Apple parser module (org.apache.tika:tika-parser-apple-module:2.3.0 - https://tika.apache.org/tika-parser-apple-module/) + * Apache Tika audiovideo parser module (org.apache.tika:tika-parser-audiovideo-module:2.3.0 - https://tika.apache.org/tika-parser-audiovideo-module/) + * Apache Tika cad parser module (org.apache.tika:tika-parser-cad-module:2.3.0 - https://tika.apache.org/tika-parser-cad-module/) + * Apache Tika code parser module (org.apache.tika:tika-parser-code-module:2.3.0 - https://tika.apache.org/tika-parser-code-module/) + * Apache Tika crypto parser module (org.apache.tika:tika-parser-crypto-module:2.3.0 - https://tika.apache.org/tika-parser-crypto-module/) + * Apache Tika digest commons (org.apache.tika:tika-parser-digest-commons:2.3.0 - https://tika.apache.org/tika-parser-digest-commons/) + * Apache Tika font parser module (org.apache.tika:tika-parser-font-module:2.3.0 - https://tika.apache.org/tika-parser-font-module/) + * Apache Tika html commons (org.apache.tika:tika-parser-html-commons:2.3.0 - https://tika.apache.org/tika-parser-html-commons/) + * Apache Tika html parser module (org.apache.tika:tika-parser-html-module:2.3.0 - https://tika.apache.org/tika-parser-html-module/) + * Apache Tika image parser module (org.apache.tika:tika-parser-image-module:2.3.0 - https://tika.apache.org/tika-parser-image-module/) + * Apache Tika mail commons (org.apache.tika:tika-parser-mail-commons:2.3.0 - https://tika.apache.org/tika-parser-mail-commons/) + * Apache Tika mail parser module (org.apache.tika:tika-parser-mail-module:2.3.0 - https://tika.apache.org/tika-parser-mail-module/) + * Apache Tika Microsoft parser module (org.apache.tika:tika-parser-microsoft-module:2.3.0 - https://tika.apache.org/tika-parser-microsoft-module/) + * Apache Tika miscellaneous office format parser module (org.apache.tika:tika-parser-miscoffice-module:2.3.0 - https://tika.apache.org/tika-parser-miscoffice-module/) + * Apache Tika news parser module (org.apache.tika:tika-parser-news-module:2.3.0 - https://tika.apache.org/tika-parser-news-module/) + * Apache Tika OCR parser module (org.apache.tika:tika-parser-ocr-module:2.3.0 - https://tika.apache.org/tika-parser-ocr-module/) + * Apache Tika PDF parser module (org.apache.tika:tika-parser-pdf-module:2.3.0 - https://tika.apache.org/tika-parser-pdf-module/) + * Apache Tika package parser module (org.apache.tika:tika-parser-pkg-module:2.3.0 - https://tika.apache.org/tika-parser-pkg-module/) + * Apache Tika text parser module (org.apache.tika:tika-parser-text-module:2.3.0 - https://tika.apache.org/tika-parser-text-module/) + * Apache Tika XML parser module (org.apache.tika:tika-parser-xml-module:2.3.0 - https://tika.apache.org/tika-parser-xml-module/) + * Apache Tika XMP commons (org.apache.tika:tika-parser-xmp-commons:2.3.0 - https://tika.apache.org/tika-parser-xmp-commons/) + * Apache Tika ZIP commons (org.apache.tika:tika-parser-zip-commons:2.3.0 - https://tika.apache.org/tika-parser-zip-commons/) + * Apache Tika standard parser package (org.apache.tika:tika-parsers-standard-package:2.3.0 - https://tika.apache.org/tika-parsers/tika-parsers-standard/tika-parsers-standard-package/) + * tomcat-embed-core (org.apache.tomcat.embed:tomcat-embed-core:9.0.63 - https://tomcat.apache.org/) + * tomcat-embed-el (org.apache.tomcat.embed:tomcat-embed-el:9.0.63 - https://tomcat.apache.org/) + * tomcat-embed-websocket (org.apache.tomcat.embed:tomcat-embed-websocket:9.0.63 - https://tomcat.apache.org/) + * Apache Velocity - Engine (org.apache.velocity:velocity-engine-core:2.3 - http://velocity.apache.org/engine/devel/velocity-engine-core/) * Apache Velocity - JSR 223 Scripting (org.apache.velocity:velocity-engine-scripting:2.2 - http://velocity.apache.org/engine/devel/velocity-engine-scripting/) * Axiom API (org.apache.ws.commons.axiom:axiom-api:1.2.22 - http://ws.apache.org/axiom/) - * LLOM (org.apache.ws.commons.axiom:axiom-impl:1.2.22 - http://ws.apache.org/axiom/implementations/axiom-impl/) * Abdera Model (FOM) Implementation (org.apache.ws.commons.axiom:fom-impl:1.2.22 - http://ws.apache.org/axiom/implementations/fom-impl/) - * XmlSchema Core (org.apache.ws.xmlschema:xmlschema-core:2.2.5 - https://ws.apache.org/commons/xmlschema20/xmlschema-core/) - * XmlBeans (org.apache.xmlbeans:xmlbeans:3.1.0 - https://xmlbeans.apache.org/) - * zookeeper (org.apache.zookeeper:zookeeper:3.4.14 - no url defined) + * XmlBeans (org.apache.xmlbeans:xmlbeans:5.0.3 - https://xmlbeans.apache.org/) + * Apache ZooKeeper - Server (org.apache.zookeeper:zookeeper:3.6.2 - http://zookeeper.apache.org/zookeeper) * Apache ZooKeeper - Jute (org.apache.zookeeper:zookeeper-jute:3.6.2 - http://zookeeper.apache.org/zookeeper-jute) - * AssertJ fluent assertions (org.assertj:assertj-core:3.13.2 - http://assertj.org/assertj-core) - * Evo Inflector (org.atteo:evo-inflector:1.2.2 - http://atteo.org/static/evo-inflector) + * org.apiguardian:apiguardian-api (org.apiguardian:apiguardian-api:1.1.0 - https://github.com/apiguardian-team/apiguardian) + * AssertJ fluent assertions (org.assertj:assertj-core:3.21.0 - https://assertj.github.io/doc/assertj-core/) + * Evo Inflector (org.atteo:evo-inflector:1.3 - http://atteo.org/static/evo-inflector) * jose4j (org.bitbucket.b_c:jose4j:0.6.5 - https://bitbucket.org/b_c/jose4j/) * TagSoup (org.ccil.cowan.tagsoup:tagsoup:1.2.1 - http://home.ccil.org/~cowan/XML/tagsoup/) - * Woodstox (org.codehaus.woodstox:woodstox-core-asl:4.4.1 - http://woodstox.codehaus.org) * jems (org.dmfs:jems:1.18 - https://github.com/dmfs/jems) * rfc3986-uri (org.dmfs:rfc3986-uri:0.8.1 - https://github.com/dmfs/uri-toolkit) * Jetty :: Apache JSP Implementation (org.eclipse.jetty:apache-jsp:9.4.15.v20190215 - http://www.eclipse.org/jetty) * Apache :: JSTL module (org.eclipse.jetty:apache-jstl:9.4.15.v20190215 - http://tomcat.apache.org/taglibs/standard/) - * Jetty :: ALPN :: Client (org.eclipse.jetty:jetty-alpn-client:9.4.34.v20201102 - https://eclipse.org/jetty/jetty-alpn-parent/jetty-alpn-client) - * Jetty :: ALPN :: JDK9 Client Implementation (org.eclipse.jetty:jetty-alpn-java-client:9.4.34.v20201102 - https://eclipse.org/jetty/jetty-alpn-parent/jetty-alpn-java-client) - * Jetty :: ALPN :: JDK9 Server Implementation (org.eclipse.jetty:jetty-alpn-java-server:9.4.34.v20201102 - https://eclipse.org/jetty/jetty-alpn-parent/jetty-alpn-java-server) - * Jetty :: ALPN :: Server (org.eclipse.jetty:jetty-alpn-server:9.4.34.v20201102 - https://eclipse.org/jetty/jetty-alpn-parent/jetty-alpn-server) + * Jetty :: ALPN :: Client (org.eclipse.jetty:jetty-alpn-client:9.4.44.v20210927 - https://eclipse.org/jetty/jetty-alpn-parent/jetty-alpn-client) + * Jetty :: ALPN :: JDK9 Client Implementation (org.eclipse.jetty:jetty-alpn-java-client:9.4.44.v20210927 - https://eclipse.org/jetty/jetty-alpn-parent/jetty-alpn-java-client) + * Jetty :: ALPN :: JDK9 Server Implementation (org.eclipse.jetty:jetty-alpn-java-server:9.4.48.v20220622 - https://eclipse.org/jetty/jetty-alpn-parent/jetty-alpn-java-server) + * Jetty :: ALPN :: Server (org.eclipse.jetty:jetty-alpn-server:9.4.44.v20210927 - https://eclipse.org/jetty/jetty-alpn-parent/jetty-alpn-server) + * Jetty :: ALPN :: Server (org.eclipse.jetty:jetty-alpn-server:9.4.48.v20220622 - https://eclipse.org/jetty/jetty-alpn-parent/jetty-alpn-server) * Jetty :: Servlet Annotations (org.eclipse.jetty:jetty-annotations:9.4.15.v20190215 - http://www.eclipse.org/jetty) - * Jetty :: Asynchronous HTTP Client (org.eclipse.jetty:jetty-client:9.4.34.v20201102 - https://eclipse.org/jetty/jetty-client) - * Jetty :: Continuation (org.eclipse.jetty:jetty-continuation:9.4.34.v20201102 - https://eclipse.org/jetty/jetty-continuation) - * Jetty :: Deployers (org.eclipse.jetty:jetty-deploy:9.4.34.v20201102 - https://eclipse.org/jetty/jetty-deploy) - * Jetty :: Http Utility (org.eclipse.jetty:jetty-http:9.4.41.v20210516 - https://eclipse.org/jetty/jetty-http) - * Jetty :: IO Utility (org.eclipse.jetty:jetty-io:9.4.41.v20210516 - https://eclipse.org/jetty/jetty-io) - * Jetty :: JMX Management (org.eclipse.jetty:jetty-jmx:9.4.34.v20201102 - https://eclipse.org/jetty/jetty-jmx) + * Jetty :: Asynchronous HTTP Client (org.eclipse.jetty:jetty-client:9.4.44.v20210927 - https://eclipse.org/jetty/jetty-client) + * Jetty :: Continuation (org.eclipse.jetty:jetty-continuation:9.4.44.v20210927 - https://eclipse.org/jetty/jetty-continuation) + * Jetty :: Continuation (org.eclipse.jetty:jetty-continuation:9.4.48.v20220622 - https://eclipse.org/jetty/jetty-continuation) + * Jetty :: Deployers (org.eclipse.jetty:jetty-deploy:9.4.48.v20220622 - https://eclipse.org/jetty/jetty-deploy) + * Jetty :: Http Utility (org.eclipse.jetty:jetty-http:9.4.48.v20220622 - https://eclipse.org/jetty/jetty-http) + * Jetty :: IO Utility (org.eclipse.jetty:jetty-io:9.4.48.v20220622 - https://eclipse.org/jetty/jetty-io) + * Jetty :: JMX Management (org.eclipse.jetty:jetty-jmx:9.4.44.v20210927 - https://eclipse.org/jetty/jetty-jmx) * Jetty :: JNDI Naming (org.eclipse.jetty:jetty-jndi:9.4.15.v20190215 - http://www.eclipse.org/jetty) * Jetty :: Plus (org.eclipse.jetty:jetty-plus:9.4.15.v20190215 - http://www.eclipse.org/jetty) - * Jetty :: Rewrite Handler (org.eclipse.jetty:jetty-rewrite:9.4.34.v20201102 - https://eclipse.org/jetty/jetty-rewrite) - * Jetty :: Security (org.eclipse.jetty:jetty-security:9.4.34.v20201102 - https://eclipse.org/jetty/jetty-security) - * Jetty :: Server Core (org.eclipse.jetty:jetty-server:9.4.41.v20210516 - https://eclipse.org/jetty/jetty-server) - * Jetty :: Servlet Handling (org.eclipse.jetty:jetty-servlet:9.4.34.v20201102 - https://eclipse.org/jetty/jetty-servlet) - * Jetty :: Utility Servlets and Filters (org.eclipse.jetty:jetty-servlets:9.4.34.v20201102 - https://eclipse.org/jetty/jetty-servlets) - * Jetty :: Utilities (org.eclipse.jetty:jetty-util:9.4.41.v20210516 - https://eclipse.org/jetty/jetty-util) - * Jetty :: Webapp Application Support (org.eclipse.jetty:jetty-webapp:9.4.34.v20201102 - https://eclipse.org/jetty/jetty-webapp) - * Jetty :: XML utilities (org.eclipse.jetty:jetty-xml:9.4.34.v20201102 - https://eclipse.org/jetty/jetty-xml) - * Jetty :: HTTP2 :: Client (org.eclipse.jetty.http2:http2-client:9.4.34.v20201102 - https://eclipse.org/jetty/http2-parent/http2-client) - * Jetty :: HTTP2 :: Common (org.eclipse.jetty.http2:http2-common:9.4.34.v20201102 - https://eclipse.org/jetty/http2-parent/http2-common) - * Jetty :: HTTP2 :: HPACK (org.eclipse.jetty.http2:http2-hpack:9.4.34.v20201102 - https://eclipse.org/jetty/http2-parent/http2-hpack) - * Jetty :: HTTP2 :: HTTP Client Transport (org.eclipse.jetty.http2:http2-http-client-transport:9.4.34.v20201102 - https://eclipse.org/jetty/http2-parent/http2-http-client-transport) - * Jetty :: HTTP2 :: Server (org.eclipse.jetty.http2:http2-server:9.4.34.v20201102 - https://eclipse.org/jetty/http2-parent/http2-server) + * Jetty :: Rewrite Handler (org.eclipse.jetty:jetty-rewrite:9.4.44.v20210927 - https://eclipse.org/jetty/jetty-rewrite) + * Jetty :: Security (org.eclipse.jetty:jetty-security:9.4.44.v20210927 - https://eclipse.org/jetty/jetty-security) + * Jetty :: Security (org.eclipse.jetty:jetty-security:9.4.48.v20220622 - https://eclipse.org/jetty/jetty-security) + * Jetty :: Server Core (org.eclipse.jetty:jetty-server:9.4.48.v20220622 - https://eclipse.org/jetty/jetty-server) + * Jetty :: Servlet Handling (org.eclipse.jetty:jetty-servlet:9.4.48.v20220622 - https://eclipse.org/jetty/jetty-servlet) + * Jetty :: Utility Servlets and Filters (org.eclipse.jetty:jetty-servlets:9.4.48.v20220622 - https://eclipse.org/jetty/jetty-servlets) + * Jetty :: Utilities (org.eclipse.jetty:jetty-util:9.4.48.v20220622 - https://eclipse.org/jetty/jetty-util) + * Jetty :: Utilities :: Ajax(JSON) (org.eclipse.jetty:jetty-util-ajax:9.4.48.v20220622 - https://eclipse.org/jetty/jetty-util-ajax) + * Jetty :: Webapp Application Support (org.eclipse.jetty:jetty-webapp:9.4.48.v20220622 - https://eclipse.org/jetty/jetty-webapp) + * Jetty :: XML utilities (org.eclipse.jetty:jetty-xml:9.4.48.v20220622 - https://eclipse.org/jetty/jetty-xml) + * Jetty :: HTTP2 :: Client (org.eclipse.jetty.http2:http2-client:9.4.44.v20210927 - https://eclipse.org/jetty/http2-parent/http2-client) + * Jetty :: HTTP2 :: Common (org.eclipse.jetty.http2:http2-common:9.4.48.v20220622 - https://eclipse.org/jetty/http2-parent/http2-common) + * Jetty :: HTTP2 :: HPACK (org.eclipse.jetty.http2:http2-hpack:9.4.44.v20210927 - https://eclipse.org/jetty/http2-parent/http2-hpack) + * Jetty :: HTTP2 :: HTTP Client Transport (org.eclipse.jetty.http2:http2-http-client-transport:9.4.44.v20210927 - https://eclipse.org/jetty/http2-parent/http2-http-client-transport) + * Jetty :: HTTP2 :: Server (org.eclipse.jetty.http2:http2-server:9.4.48.v20220622 - https://eclipse.org/jetty/http2-parent/http2-server) * Jetty :: Schemas (org.eclipse.jetty.toolchain:jetty-schemas:3.1.2 - https://eclipse.org/jetty/jetty-schemas) * Ehcache (org.ehcache:ehcache:3.4.0 - http://ehcache.org) - * flyway-core (org.flywaydb:flyway-core:6.5.7 - https://flywaydb.org/flyway-core) + * flyway-core (org.flywaydb:flyway-core:8.4.4 - https://flywaydb.org/flyway-core) * Ogg and Vorbis for Java, Core (org.gagravarr:vorbis-java-core:0.8 - https://github.com/Gagravarr/VorbisJava) * Apache Tika plugin for Ogg, Vorbis and FLAC (org.gagravarr:vorbis-java-tika:0.8 - https://github.com/Gagravarr/VorbisJava) - * jersey-core-client (org.glassfish.jersey.core:jersey-client:2.30.1 - https://projects.eclipse.org/projects/ee4j.jersey/jersey-client) - * jersey-core-common (org.glassfish.jersey.core:jersey-common:2.30.1 - https://projects.eclipse.org/projects/ee4j.jersey/jersey-common) - * jersey-inject-hk2 (org.glassfish.jersey.inject:jersey-hk2:2.30.1 - https://projects.eclipse.org/projects/ee4j.jersey/project/jersey-hk2) - * Hibernate Validator Engine (org.hibernate.validator:hibernate-validator:6.0.18.Final - http://hibernate.org/validator/hibernate-validator) - * Hibernate Validator Portable Extension (org.hibernate.validator:hibernate-validator-cdi:6.0.18.Final - http://hibernate.org/validator/hibernate-validator-cdi) + * jersey-core-client (org.glassfish.jersey.core:jersey-client:2.35 - https://projects.eclipse.org/projects/ee4j.jersey/jersey-client) + * jersey-core-common (org.glassfish.jersey.core:jersey-common:2.35 - https://projects.eclipse.org/projects/ee4j.jersey/jersey-common) + * jersey-inject-hk2 (org.glassfish.jersey.inject:jersey-hk2:2.35 - https://projects.eclipse.org/projects/ee4j.jersey/project/jersey-hk2) + * Hibernate Validator Engine (org.hibernate.validator:hibernate-validator:6.0.23.Final - http://hibernate.org/validator/hibernate-validator) + * Hibernate Validator Portable Extension (org.hibernate.validator:hibernate-validator-cdi:6.0.23.Final - http://hibernate.org/validator/hibernate-validator-cdi) * Javassist (org.javassist:javassist:3.25.0-GA - http://www.javassist.org/) - * Java Annotation Indexer (org.jboss:jandex:2.1.1.Final - http://www.jboss.org/jandex) - * JBoss Logging 3 (org.jboss.logging:jboss-logging:3.3.2.Final - http://www.jboss.org) - * JDOM (org.jdom:jdom:1.1.3 - http://www.jdom.org) - * JDOM (org.jdom:jdom2:2.0.6 - http://www.jdom.org) + * Java Annotation Indexer (org.jboss:jandex:2.4.2.Final - http://www.jboss.org/jandex) + * JBoss Logging 3 (org.jboss.logging:jboss-logging:3.4.3.Final - http://www.jboss.org) + * JDOM (org.jdom:jdom2:2.0.6.1 - http://www.jdom.org) * jtwig-core (org.jtwig:jtwig-core:5.87.0.RELEASE - http://jtwig.org) * jtwig-reflection (org.jtwig:jtwig-reflection:5.87.0.RELEASE - http://jtwig.org) * jtwig-spring (org.jtwig:jtwig-spring:5.87.0.RELEASE - http://jtwig.org) @@ -341,69 +340,67 @@ https://wiki.lyrasis.org/display/DSPACE/Code+Contribution+Guidelines * Objenesis (org.objenesis:objenesis:3.2 - http://objenesis.org/objenesis) * parboiled-core (org.parboiled:parboiled-core:1.3.1 - http://parboiled.org) * parboiled-java (org.parboiled:parboiled-java:1.3.1 - http://parboiled.org) - * quartz (org.quartz-scheduler:quartz:2.3.2 - http://www.quartz-scheduler.org/quartz) - * rome-modules (org.rometools:rome-modules:1.0 - http://www.rometools.org) * RRD4J (org.rrd4j:rrd4j:3.5 - https://github.com/rrd4j/rrd4j/) * JSONassert (org.skyscreamer:jsonassert:1.5.0 - https://github.com/skyscreamer/JSONassert) - * Spring AOP (org.springframework:spring-aop:5.2.5.RELEASE - https://github.com/spring-projects/spring-framework) - * Spring Beans (org.springframework:spring-beans:5.2.5.RELEASE - https://github.com/spring-projects/spring-framework) - * Spring Context (org.springframework:spring-context:5.2.5.RELEASE - https://github.com/spring-projects/spring-framework) - * Spring Context Support (org.springframework:spring-context-support:5.2.5.RELEASE - https://github.com/spring-projects/spring-framework) - * Spring Core (org.springframework:spring-core:5.2.5.RELEASE - https://github.com/spring-projects/spring-framework) - * Spring Expression Language (SpEL) (org.springframework:spring-expression:5.2.5.RELEASE - https://github.com/spring-projects/spring-framework) - * Spring Commons Logging Bridge (org.springframework:spring-jcl:5.2.5.RELEASE - https://github.com/spring-projects/spring-framework) - * Spring JDBC (org.springframework:spring-jdbc:5.2.5.RELEASE - https://github.com/spring-projects/spring-framework) - * Spring Object/Relational Mapping (org.springframework:spring-orm:5.2.5.RELEASE - https://github.com/spring-projects/spring-framework) - * Spring TestContext Framework (org.springframework:spring-test:5.2.5.RELEASE - https://github.com/spring-projects/spring-framework) - * Spring Transaction (org.springframework:spring-tx:5.2.5.RELEASE - https://github.com/spring-projects/spring-framework) - * Spring Web (org.springframework:spring-web:5.2.5.RELEASE - https://github.com/spring-projects/spring-framework) - * Spring Web MVC (org.springframework:spring-webmvc:5.2.5.RELEASE - https://github.com/spring-projects/spring-framework) - * Spring Boot (org.springframework.boot:spring-boot:2.2.6.RELEASE - https://projects.spring.io/spring-boot/#/spring-boot-parent/spring-boot) - * Spring Boot AutoConfigure (org.springframework.boot:spring-boot-autoconfigure:2.2.6.RELEASE - https://projects.spring.io/spring-boot/#/spring-boot-parent/spring-boot-autoconfigure) + * Spring AOP (org.springframework:spring-aop:5.3.20 - https://github.com/spring-projects/spring-framework) + * Spring Beans (org.springframework:spring-beans:5.3.20 - https://github.com/spring-projects/spring-framework) + * Spring Context (org.springframework:spring-context:5.3.20 - https://github.com/spring-projects/spring-framework) + * Spring Context Support (org.springframework:spring-context-support:5.3.20 - https://github.com/spring-projects/spring-framework) + * Spring Core (org.springframework:spring-core:5.3.20 - https://github.com/spring-projects/spring-framework) + * Spring Expression Language (SpEL) (org.springframework:spring-expression:5.3.20 - https://github.com/spring-projects/spring-framework) + * Spring Commons Logging Bridge (org.springframework:spring-jcl:5.3.20 - https://github.com/spring-projects/spring-framework) + * Spring JDBC (org.springframework:spring-jdbc:5.3.20 - https://github.com/spring-projects/spring-framework) + * Spring Object/Relational Mapping (org.springframework:spring-orm:5.3.20 - https://github.com/spring-projects/spring-framework) + * Spring TestContext Framework (org.springframework:spring-test:5.3.20 - https://github.com/spring-projects/spring-framework) + * Spring Transaction (org.springframework:spring-tx:5.3.20 - https://github.com/spring-projects/spring-framework) + * Spring Web (org.springframework:spring-web:5.3.20 - https://github.com/spring-projects/spring-framework) + * Spring Web MVC (org.springframework:spring-webmvc:5.3.20 - https://github.com/spring-projects/spring-framework) + * spring-boot (org.springframework.boot:spring-boot:2.6.8 - https://spring.io/projects/spring-boot) + * spring-boot-actuator (org.springframework.boot:spring-boot-actuator:2.6.8 - https://spring.io/projects/spring-boot) + * spring-boot-actuator-autoconfigure (org.springframework.boot:spring-boot-actuator-autoconfigure:2.6.8 - https://spring.io/projects/spring-boot) + * spring-boot-autoconfigure (org.springframework.boot:spring-boot-autoconfigure:2.6.8 - https://spring.io/projects/spring-boot) * Spring Boot Configuration Processor (org.springframework.boot:spring-boot-configuration-processor:2.0.0.RELEASE - https://projects.spring.io/spring-boot/#/spring-boot-parent/spring-boot-tools/spring-boot-configuration-processor) - * Spring Boot Starter (org.springframework.boot:spring-boot-starter:2.2.6.RELEASE - https://projects.spring.io/spring-boot/#/spring-boot-parent/spring-boot-starters/spring-boot-starter) - * Spring Boot AOP Starter (org.springframework.boot:spring-boot-starter-aop:2.2.6.RELEASE - https://projects.spring.io/spring-boot/#/spring-boot-parent/spring-boot-starters/spring-boot-starter-aop) - * Spring Boot Cache Starter (org.springframework.boot:spring-boot-starter-cache:2.2.6.RELEASE - https://projects.spring.io/spring-boot/#/spring-boot-parent/spring-boot-starters/spring-boot-starter-cache) - * Spring Boot Data REST Starter (org.springframework.boot:spring-boot-starter-data-rest:2.2.6.RELEASE - https://projects.spring.io/spring-boot/#/spring-boot-parent/spring-boot-starters/spring-boot-starter-data-rest) - * Spring Boot Json Starter (org.springframework.boot:spring-boot-starter-json:2.2.6.RELEASE - https://projects.spring.io/spring-boot/#/spring-boot-parent/spring-boot-starters/spring-boot-starter-json) - * Spring Boot Log4j 2 Starter (org.springframework.boot:spring-boot-starter-log4j2:2.2.6.RELEASE - https://projects.spring.io/spring-boot/#/spring-boot-parent/spring-boot-starters/spring-boot-starter-log4j2) - * Spring Boot Security Starter (org.springframework.boot:spring-boot-starter-security:2.2.6.RELEASE - https://projects.spring.io/spring-boot/#/spring-boot-parent/spring-boot-starters/spring-boot-starter-security) - * Spring Boot Test Starter (org.springframework.boot:spring-boot-starter-test:2.2.6.RELEASE - https://projects.spring.io/spring-boot/#/spring-boot-parent/spring-boot-starters/spring-boot-starter-test) - * Spring Boot Tomcat Starter (org.springframework.boot:spring-boot-starter-tomcat:2.2.6.RELEASE - https://projects.spring.io/spring-boot/#/spring-boot-parent/spring-boot-starters/spring-boot-starter-tomcat) - * Spring Boot Validation Starter (org.springframework.boot:spring-boot-starter-validation:2.2.6.RELEASE - https://projects.spring.io/spring-boot/#/spring-boot-parent/spring-boot-starters/spring-boot-starter-validation) - * Spring Boot Web Starter (org.springframework.boot:spring-boot-starter-web:2.2.6.RELEASE - https://projects.spring.io/spring-boot/#/spring-boot-parent/spring-boot-starters/spring-boot-starter-web) - * Spring Boot Test (org.springframework.boot:spring-boot-test:2.2.6.RELEASE - https://projects.spring.io/spring-boot/#/spring-boot-parent/spring-boot-test) - * Spring Boot Test Auto-Configure (org.springframework.boot:spring-boot-test-autoconfigure:2.2.6.RELEASE - https://projects.spring.io/spring-boot/#/spring-boot-parent/spring-boot-test-autoconfigure) - * Spring Data Core (org.springframework.data:spring-data-commons:2.2.6.RELEASE - https://www.spring.io/spring-data/spring-data-commons) - * Spring Data REST - Core (org.springframework.data:spring-data-rest-core:3.2.6.RELEASE - https://www.spring.io/spring-data/spring-data-rest-parent/spring-data-rest-core) - * Spring Data REST - HAL Browser (org.springframework.data:spring-data-rest-hal-browser:3.2.6.RELEASE - https://www.spring.io/spring-data/spring-data-rest-parent/spring-data-rest-hal-browser) - * Spring Data REST - WebMVC (org.springframework.data:spring-data-rest-webmvc:3.2.6.RELEASE - https://www.spring.io/spring-data/spring-data-rest-parent/spring-data-rest-webmvc) - * Spring HATEOAS (org.springframework.hateoas:spring-hateoas:1.0.4.RELEASE - https://github.com/spring-projects/spring-hateoas) + * spring-boot-starter (org.springframework.boot:spring-boot-starter:2.6.8 - https://spring.io/projects/spring-boot) + * spring-boot-starter-actuator (org.springframework.boot:spring-boot-starter-actuator:2.6.8 - https://spring.io/projects/spring-boot) + * spring-boot-starter-aop (org.springframework.boot:spring-boot-starter-aop:2.6.8 - https://spring.io/projects/spring-boot) + * spring-boot-starter-cache (org.springframework.boot:spring-boot-starter-cache:2.6.8 - https://spring.io/projects/spring-boot) + * spring-boot-starter-data-rest (org.springframework.boot:spring-boot-starter-data-rest:2.6.8 - https://spring.io/projects/spring-boot) + * spring-boot-starter-json (org.springframework.boot:spring-boot-starter-json:2.6.8 - https://spring.io/projects/spring-boot) + * spring-boot-starter-log4j2 (org.springframework.boot:spring-boot-starter-log4j2:2.6.8 - https://spring.io/projects/spring-boot) + * spring-boot-starter-security (org.springframework.boot:spring-boot-starter-security:2.6.8 - https://spring.io/projects/spring-boot) + * spring-boot-starter-test (org.springframework.boot:spring-boot-starter-test:2.6.8 - https://spring.io/projects/spring-boot) + * spring-boot-starter-tomcat (org.springframework.boot:spring-boot-starter-tomcat:2.6.8 - https://spring.io/projects/spring-boot) + * spring-boot-starter-web (org.springframework.boot:spring-boot-starter-web:2.6.8 - https://spring.io/projects/spring-boot) + * spring-boot-test (org.springframework.boot:spring-boot-test:2.6.8 - https://spring.io/projects/spring-boot) + * spring-boot-test-autoconfigure (org.springframework.boot:spring-boot-test-autoconfigure:2.6.8 - https://spring.io/projects/spring-boot) + * Spring Data Core (org.springframework.data:spring-data-commons:2.6.4 - https://www.spring.io/spring-data/spring-data-commons) + * Spring Data REST - Core (org.springframework.data:spring-data-rest-core:3.6.4 - https://www.spring.io/spring-data/spring-data-rest-parent/spring-data-rest-core) + * Spring Data REST - WebMVC (org.springframework.data:spring-data-rest-webmvc:3.6.4 - https://www.spring.io/spring-data/spring-data-rest-parent/spring-data-rest-webmvc) + * Spring HATEOAS (org.springframework.hateoas:spring-hateoas:1.4.2 - https://github.com/spring-projects/spring-hateoas) * Spring Plugin - Core (org.springframework.plugin:spring-plugin-core:2.0.0.RELEASE - https://github.com/spring-projects/spring-plugin/spring-plugin-core) - * spring-security-config (org.springframework.security:spring-security-config:5.2.2.RELEASE - http://spring.io/spring-security) - * spring-security-core (org.springframework.security:spring-security-core:5.2.2.RELEASE - http://spring.io/spring-security) - * spring-security-test (org.springframework.security:spring-security-test:5.2.2.RELEASE - http://spring.io/spring-security) - * spring-security-web (org.springframework.security:spring-security-web:5.2.2.RELEASE - http://spring.io/spring-security) + * spring-security-config (org.springframework.security:spring-security-config:5.6.5 - https://spring.io/projects/spring-security) + * spring-security-core (org.springframework.security:spring-security-core:5.6.5 - https://spring.io/projects/spring-security) + * spring-security-crypto (org.springframework.security:spring-security-crypto:5.6.5 - https://spring.io/projects/spring-security) + * spring-security-test (org.springframework.security:spring-security-test:5.6.5 - https://spring.io/projects/spring-security) + * spring-security-web (org.springframework.security:spring-security-web:5.6.5 - https://spring.io/projects/spring-security) * SWORD v2 :: Common Server Library (org.swordapp:sword2-server:1.0 - http://www.swordapp.org/) - * ISO Parser (org.tallison:isoparser:1.9.41.2 - https://github.com/tballison/mp4parser) - * org.tallison:metadata-extractor (org.tallison:metadata-extractor:2.13.0 - https://drewnoakes.com/code/exif/) - * XMPCore Shaded (org.tallison.xmp:xmpcore-shaded:6.1.10 - https://github.com/tballison) * snappy-java (org.xerial.snappy:snappy-java:1.1.7.6 - https://github.com/xerial/snappy-java) * xml-matchers (org.xmlmatchers:xml-matchers:0.10 - http://code.google.com/p/xml-matchers/) - * org.xmlunit:xmlunit-core (org.xmlunit:xmlunit-core:2.6.4 - https://www.xmlunit.org/) * org.xmlunit:xmlunit-core (org.xmlunit:xmlunit-core:2.8.0 - https://www.xmlunit.org/) + * org.xmlunit:xmlunit-core (org.xmlunit:xmlunit-core:2.8.4 - https://www.xmlunit.org/) * org.xmlunit:xmlunit-placeholders (org.xmlunit:xmlunit-placeholders:2.8.0 - https://www.xmlunit.org/xmlunit-placeholders/) - * SnakeYAML (org.yaml:snakeyaml:1.25 - http://www.snakeyaml.org) - * SnakeYAML (org.yaml:snakeyaml:1.26 - http://www.snakeyaml.org) - * ROME, RSS and atOM utilitiEs for Java (rome:rome:1.0 - https://rome.dev.java.net/) + * SnakeYAML (org.yaml:snakeyaml:1.29 - http://www.snakeyaml.org) * software.amazon.ion:ion-java (software.amazon.ion:ion-java:1.0.2 - https://github.com/amznlabs/ion-java/) + * Xalan Java Serializer (xalan:serializer:2.7.2 - http://xml.apache.org/xalan-j/) * xalan (xalan:xalan:2.7.0 - no url defined) - * Xerces2-j (xerces:xercesImpl:2.12.0 - https://xerces.apache.org/xerces2-j/) + * Xalan Java (xalan:xalan:2.7.2 - http://xml.apache.org/xalan-j/) + * Xerces2-j (xerces:xercesImpl:2.12.2 - https://xerces.apache.org/xerces2-j/) * XML Commons External Components XML APIs (xml-apis:xml-apis:1.4.01 - http://xml.apache.org/commons/components/external/) BSD License: * AntLR Parser Generator (antlr:antlr:2.7.7 - http://www.antlr.org/) + * Adobe XMPCore (com.adobe.xmp:xmpcore:6.1.11 - https://www.adobe.com/devnet/xmp/library/eula-xmp-library-java.html) * coverity-escapers (com.coverity.security:coverity-escapers:1.1.1 - http://coverity.com/security) * Java Advanced Imaging Image I/O Tools API core (standalone) (com.github.jai-imageio:jai-imageio-core:1.4.0 - https://github.com/jai-imageio/jai-imageio-core) * JSONLD Java :: Core (com.github.jsonld-java:jsonld-java:0.5.1 - http://github.com/jsonld-java/jsonld-java/jsonld-java/) @@ -411,41 +408,36 @@ https://wiki.lyrasis.org/display/DSPACE/Code+Contribution+Guidelines * Protocol Buffers [Core] (com.google.protobuf:protobuf-java:3.11.0 - https://developers.google.com/protocol-buffers/protobuf-java/) * JZlib (com.jcraft:jzlib:1.1.3 - http://www.jcraft.com/jzlib/) * dnsjava (dnsjava:dnsjava:2.1.7 - http://www.dnsjava.org) - * Units of Measurement API (javax.measure:unit-api:1.0 - http://unitsofmeasurement.github.io/) * jaxen (jaxen:jaxen:1.1.6 - http://jaxen.codehaus.org/) - * JLine (jline:jline:0.9.94 - http://jline.sourceforge.net) * ANTLR 4 Runtime (org.antlr:antlr4-runtime:4.5.1-1 - http://www.antlr.org/antlr4-runtime) * commons-compiler (org.codehaus.janino:commons-compiler:3.0.9 - http://janino-compiler.github.io/commons-compiler/) * janino (org.codehaus.janino:janino:3.0.9 - http://janino-compiler.github.io/janino/) - * Stax2 API (org.codehaus.woodstox:stax2-api:3.1.4 - http://wiki.fasterxml.com/WoodstoxStax2) - * dom4j (org.dom4j:dom4j:2.1.1 - http://dom4j.github.io/) + * Stax2 API (org.codehaus.woodstox:stax2-api:4.2.1 - http://github.com/FasterXML/stax2-api) * Hamcrest Date (org.exparity:hamcrest-date:2.0.7 - https://github.com/exparity/hamcrest-date) - * jersey-core-client (org.glassfish.jersey.core:jersey-client:2.30.1 - https://projects.eclipse.org/projects/ee4j.jersey/jersey-client) - * jersey-inject-hk2 (org.glassfish.jersey.inject:jersey-hk2:2.30.1 - https://projects.eclipse.org/projects/ee4j.jersey/project/jersey-hk2) - * Hamcrest (org.hamcrest:hamcrest:2.1 - http://hamcrest.org/JavaHamcrest/) + * jersey-core-client (org.glassfish.jersey.core:jersey-client:2.35 - https://projects.eclipse.org/projects/ee4j.jersey/jersey-client) + * jersey-inject-hk2 (org.glassfish.jersey.inject:jersey-hk2:2.35 - https://projects.eclipse.org/projects/ee4j.jersey/project/jersey-hk2) + * Hamcrest (org.hamcrest:hamcrest:2.2 - http://hamcrest.org/JavaHamcrest/) * Hamcrest All (org.hamcrest:hamcrest-all:1.3 - https://github.com/hamcrest/JavaHamcrest/hamcrest-all) * Hamcrest Core (org.hamcrest:hamcrest-core:1.3 - https://github.com/hamcrest/JavaHamcrest/hamcrest-core) - * Hamcrest library (org.hamcrest:hamcrest-library:1.3 - https://github.com/hamcrest/JavaHamcrest/hamcrest-library) - * JBibTeX (org.jbibtex:jbibtex:1.0.10 - http://www.jbibtex.org) + * HdrHistogram (org.hdrhistogram:HdrHistogram:2.1.12 - http://hdrhistogram.github.io/HdrHistogram/) + * JBibTeX (org.jbibtex:jbibtex:1.0.20 - http://www.jbibtex.org) * asm (org.ow2.asm:asm:8.0.1 - http://asm.ow2.io/) * asm-analysis (org.ow2.asm:asm-analysis:7.1 - http://asm.ow2.org/) * asm-commons (org.ow2.asm:asm-commons:8.0.1 - http://asm.ow2.io/) * asm-tree (org.ow2.asm:asm-tree:7.1 - http://asm.ow2.org/) * asm-util (org.ow2.asm:asm-util:7.1 - http://asm.ow2.org/) - * PostgreSQL JDBC Driver (org.postgresql:postgresql:42.2.25 - https://jdbc.postgresql.org) + * PostgreSQL JDBC Driver (org.postgresql:postgresql:42.4.1 - https://jdbc.postgresql.org) * Reflections (org.reflections:reflections:0.9.12 - http://github.com/ronmamo/reflections) * JMatIO (org.tallison:jmatio:1.5 - https://github.com/tballison/jmatio) * XMLUnit for Java (xmlunit:xmlunit:1.3 - http://xmlunit.sourceforge.net/) Common Development and Distribution License (CDDL): - * JavaBeans Activation Framework (com.sun.activation:javax.activation:1.2.0 - http://java.net/all/javax.activation/) * istack common utility code runtime (com.sun.istack:istack-commons-runtime:3.0.7 - http://java.net/istack-commons/istack-commons-runtime/) * JavaMail API (com.sun.mail:javax.mail:1.6.2 - http://javaee.github.io/javamail/javax.mail) * JavaMail API (no providers) (com.sun.mail:mailapi:1.6.2 - http://javaee.github.io/javamail/mailapi) * Old JAXB Core (com.sun.xml.bind:jaxb-core:2.3.0.1 - http://jaxb.java.net/jaxb-bundles/jaxb-core) * Old JAXB Runtime (com.sun.xml.bind:jaxb-impl:2.3.1 - http://jaxb.java.net/jaxb-bundles/jaxb-impl) - * saaj-impl (com.sun.xml.messaging.saaj:saaj-impl:1.4.0-b03 - http://java.net/saaj-impl/) * Jakarta Annotations API (jakarta.annotation:jakarta.annotation-api:1.3.5 - https://projects.eclipse.org/projects/ee4j.ca) * jakarta.ws.rs-api (jakarta.ws.rs:jakarta.ws.rs-api:2.1.6 - https://github.com/eclipse-ee4j/jaxrs-api) * JavaBeans Activation Framework (JAF) (javax.activation:activation:1.1 - http://java.sun.com/products/javabeans/jaf/index.jsp) @@ -454,7 +446,6 @@ https://wiki.lyrasis.org/display/DSPACE/Code+Contribution+Guidelines * Java Servlet API (javax.servlet:javax.servlet-api:3.1.0 - http://servlet-spec.java.net) * javax.transaction API (javax.transaction:javax.transaction-api:1.3 - http://jta-spec.java.net) * jaxb-api (javax.xml.bind:jaxb-api:2.3.1 - https://github.com/javaee/jaxb-spec/jaxb-api) - * JAX-WS API (javax.xml.ws:jaxws-api:2.3.1 - https://github.com/javaee/jax-ws-spec) * JHighlight (org.codelibs:jhighlight:1.0.3 - https://github.com/codelibs/jhighlight) * HK2 API module (org.glassfish.hk2:hk2-api:2.6.1 - https://github.com/eclipse-ee4j/glassfish-hk2/hk2-api) * ServiceLocator Default Implementation (org.glassfish.hk2:hk2-locator:2.6.1 - https://github.com/eclipse-ee4j/glassfish-hk2/hk2-locator) @@ -464,10 +455,9 @@ https://wiki.lyrasis.org/display/DSPACE/Code+Contribution+Guidelines * javax.inject:1 as OSGi bundle (org.glassfish.hk2.external:jakarta.inject:2.6.1 - https://github.com/eclipse-ee4j/glassfish-hk2/external/jakarta.inject) * JAXB Runtime (org.glassfish.jaxb:jaxb-runtime:2.3.1 - http://jaxb.java.net/jaxb-runtime-parent/jaxb-runtime) * TXW2 Runtime (org.glassfish.jaxb:txw2:2.3.1 - http://jaxb.java.net/jaxb-txw-parent/txw2) - * jersey-core-client (org.glassfish.jersey.core:jersey-client:2.30.1 - https://projects.eclipse.org/projects/ee4j.jersey/jersey-client) - * jersey-inject-hk2 (org.glassfish.jersey.inject:jersey-hk2:2.30.1 - https://projects.eclipse.org/projects/ee4j.jersey/project/jersey-hk2) + * jersey-core-client (org.glassfish.jersey.core:jersey-client:2.35 - https://projects.eclipse.org/projects/ee4j.jersey/jersey-client) + * jersey-inject-hk2 (org.glassfish.jersey.inject:jersey-hk2:2.35 - https://projects.eclipse.org/projects/ee4j.jersey/project/jersey-hk2) * Java Transaction API (org.jboss.spec.javax.transaction:jboss-transaction-api_1.2_spec:1.1.1.Final - http://www.jboss.org/jboss-transaction-api_1.2_spec) - * MIME streaming extension (org.jvnet.mimepull:mimepull:1.9.7 - http://mimepull.java.net) * Extended StAX API (org.jvnet.staxex:stax-ex:1.8 - http://stax-ex.java.net/) Cordra (Version 2) License Agreement: @@ -478,56 +468,55 @@ https://wiki.lyrasis.org/display/DSPACE/Code+Contribution+Guidelines Eclipse Distribution License, Version 1.0: - * JavaBeans Activation Framework (com.sun.activation:jakarta.activation:1.2.1 - https://github.com/eclipse-ee4j/jaf/jakarta.activation) - * JavaBeans Activation Framework API jar (jakarta.activation:jakarta.activation-api:1.2.1 - https://github.com/eclipse-ee4j/jaf/jakarta.activation-api) * Jakarta Activation API jar (jakarta.activation:jakarta.activation-api:1.2.2 - https://github.com/eclipse-ee4j/jaf/jakarta.activation-api) - * jakarta.xml.bind-api (jakarta.xml.bind:jakarta.xml.bind-api:2.3.2 - https://github.com/eclipse-ee4j/jaxb-api/jakarta.xml.bind-api) * Jakarta XML Binding API (jakarta.xml.bind:jakarta.xml.bind-api:2.3.3 - https://github.com/eclipse-ee4j/jaxb-api/jakarta.xml.bind-api) * javax.persistence-api (javax.persistence:javax.persistence-api:2.2 - https://github.com/javaee/jpa-spec) - * jersey-core-client (org.glassfish.jersey.core:jersey-client:2.30.1 - https://projects.eclipse.org/projects/ee4j.jersey/jersey-client) - * jersey-inject-hk2 (org.glassfish.jersey.inject:jersey-hk2:2.30.1 - https://projects.eclipse.org/projects/ee4j.jersey/project/jersey-hk2) - * Java Persistence API, Version 2.1 (org.hibernate.javax.persistence:hibernate-jpa-2.1-api:1.0.0.Final - http://hibernate.org) + * jersey-core-client (org.glassfish.jersey.core:jersey-client:2.35 - https://projects.eclipse.org/projects/ee4j.jersey/jersey-client) + * jersey-inject-hk2 (org.glassfish.jersey.inject:jersey-hk2:2.35 - https://projects.eclipse.org/projects/ee4j.jersey/project/jersey-hk2) + * Java Persistence API, Version 2.1 (org.hibernate.javax.persistence:hibernate-jpa-2.1-api:1.0.2.Final - http://hibernate.org) Eclipse Public License: * System Rules (com.github.stefanbirkner:system-rules:1.19.0 - http://stefanbirkner.github.io/system-rules/) - * c3p0 (com.mchange:c3p0:0.9.5.5 - https://github.com/swaldman/c3p0) - * mchange-commons-java (com.mchange:mchange-commons-java:0.2.19 - https://github.com/swaldman/mchange-commons-java) + * H2 Database Engine (com.h2database:h2:2.1.210 - https://h2database.com) * Jakarta Annotations API (jakarta.annotation:jakarta.annotation-api:1.3.5 - https://projects.eclipse.org/projects/ee4j.ca) * jakarta.ws.rs-api (jakarta.ws.rs:jakarta.ws.rs-api:2.1.6 - https://github.com/eclipse-ee4j/jaxrs-api) * javax.persistence-api (javax.persistence:javax.persistence-api:2.2 - https://github.com/javaee/jpa-spec) * JUnit (junit:junit:4.13.1 - http://junit.org) - * AspectJ runtime (org.aspectj:aspectjrt:1.8.0 - http://www.aspectj.org) - * AspectJ weaver (org.aspectj:aspectjweaver:1.9.5 - http://www.aspectj.org) + * AspectJ Weaver (org.aspectj:aspectjweaver:1.9.7 - https://www.eclipse.org/aspectj/) * Eclipse Compiler for Java(TM) (org.eclipse.jdt:ecj:3.14.0 - http://www.eclipse.org/jdt) * Jetty :: Apache JSP Implementation (org.eclipse.jetty:apache-jsp:9.4.15.v20190215 - http://www.eclipse.org/jetty) * Apache :: JSTL module (org.eclipse.jetty:apache-jstl:9.4.15.v20190215 - http://tomcat.apache.org/taglibs/standard/) - * Jetty :: ALPN :: Client (org.eclipse.jetty:jetty-alpn-client:9.4.34.v20201102 - https://eclipse.org/jetty/jetty-alpn-parent/jetty-alpn-client) - * Jetty :: ALPN :: JDK9 Client Implementation (org.eclipse.jetty:jetty-alpn-java-client:9.4.34.v20201102 - https://eclipse.org/jetty/jetty-alpn-parent/jetty-alpn-java-client) - * Jetty :: ALPN :: JDK9 Server Implementation (org.eclipse.jetty:jetty-alpn-java-server:9.4.34.v20201102 - https://eclipse.org/jetty/jetty-alpn-parent/jetty-alpn-java-server) - * Jetty :: ALPN :: Server (org.eclipse.jetty:jetty-alpn-server:9.4.34.v20201102 - https://eclipse.org/jetty/jetty-alpn-parent/jetty-alpn-server) + * Jetty :: ALPN :: Client (org.eclipse.jetty:jetty-alpn-client:9.4.44.v20210927 - https://eclipse.org/jetty/jetty-alpn-parent/jetty-alpn-client) + * Jetty :: ALPN :: JDK9 Client Implementation (org.eclipse.jetty:jetty-alpn-java-client:9.4.44.v20210927 - https://eclipse.org/jetty/jetty-alpn-parent/jetty-alpn-java-client) + * Jetty :: ALPN :: JDK9 Server Implementation (org.eclipse.jetty:jetty-alpn-java-server:9.4.48.v20220622 - https://eclipse.org/jetty/jetty-alpn-parent/jetty-alpn-java-server) + * Jetty :: ALPN :: Server (org.eclipse.jetty:jetty-alpn-server:9.4.44.v20210927 - https://eclipse.org/jetty/jetty-alpn-parent/jetty-alpn-server) + * Jetty :: ALPN :: Server (org.eclipse.jetty:jetty-alpn-server:9.4.48.v20220622 - https://eclipse.org/jetty/jetty-alpn-parent/jetty-alpn-server) * Jetty :: Servlet Annotations (org.eclipse.jetty:jetty-annotations:9.4.15.v20190215 - http://www.eclipse.org/jetty) - * Jetty :: Asynchronous HTTP Client (org.eclipse.jetty:jetty-client:9.4.34.v20201102 - https://eclipse.org/jetty/jetty-client) - * Jetty :: Continuation (org.eclipse.jetty:jetty-continuation:9.4.34.v20201102 - https://eclipse.org/jetty/jetty-continuation) - * Jetty :: Deployers (org.eclipse.jetty:jetty-deploy:9.4.34.v20201102 - https://eclipse.org/jetty/jetty-deploy) - * Jetty :: Http Utility (org.eclipse.jetty:jetty-http:9.4.41.v20210516 - https://eclipse.org/jetty/jetty-http) - * Jetty :: IO Utility (org.eclipse.jetty:jetty-io:9.4.41.v20210516 - https://eclipse.org/jetty/jetty-io) - * Jetty :: JMX Management (org.eclipse.jetty:jetty-jmx:9.4.34.v20201102 - https://eclipse.org/jetty/jetty-jmx) + * Jetty :: Asynchronous HTTP Client (org.eclipse.jetty:jetty-client:9.4.44.v20210927 - https://eclipse.org/jetty/jetty-client) + * Jetty :: Continuation (org.eclipse.jetty:jetty-continuation:9.4.44.v20210927 - https://eclipse.org/jetty/jetty-continuation) + * Jetty :: Continuation (org.eclipse.jetty:jetty-continuation:9.4.48.v20220622 - https://eclipse.org/jetty/jetty-continuation) + * Jetty :: Deployers (org.eclipse.jetty:jetty-deploy:9.4.48.v20220622 - https://eclipse.org/jetty/jetty-deploy) + * Jetty :: Http Utility (org.eclipse.jetty:jetty-http:9.4.48.v20220622 - https://eclipse.org/jetty/jetty-http) + * Jetty :: IO Utility (org.eclipse.jetty:jetty-io:9.4.48.v20220622 - https://eclipse.org/jetty/jetty-io) + * Jetty :: JMX Management (org.eclipse.jetty:jetty-jmx:9.4.44.v20210927 - https://eclipse.org/jetty/jetty-jmx) * Jetty :: JNDI Naming (org.eclipse.jetty:jetty-jndi:9.4.15.v20190215 - http://www.eclipse.org/jetty) * Jetty :: Plus (org.eclipse.jetty:jetty-plus:9.4.15.v20190215 - http://www.eclipse.org/jetty) - * Jetty :: Rewrite Handler (org.eclipse.jetty:jetty-rewrite:9.4.34.v20201102 - https://eclipse.org/jetty/jetty-rewrite) - * Jetty :: Security (org.eclipse.jetty:jetty-security:9.4.34.v20201102 - https://eclipse.org/jetty/jetty-security) - * Jetty :: Server Core (org.eclipse.jetty:jetty-server:9.4.41.v20210516 - https://eclipse.org/jetty/jetty-server) - * Jetty :: Servlet Handling (org.eclipse.jetty:jetty-servlet:9.4.34.v20201102 - https://eclipse.org/jetty/jetty-servlet) - * Jetty :: Utility Servlets and Filters (org.eclipse.jetty:jetty-servlets:9.4.34.v20201102 - https://eclipse.org/jetty/jetty-servlets) - * Jetty :: Utilities (org.eclipse.jetty:jetty-util:9.4.41.v20210516 - https://eclipse.org/jetty/jetty-util) - * Jetty :: Webapp Application Support (org.eclipse.jetty:jetty-webapp:9.4.34.v20201102 - https://eclipse.org/jetty/jetty-webapp) - * Jetty :: XML utilities (org.eclipse.jetty:jetty-xml:9.4.34.v20201102 - https://eclipse.org/jetty/jetty-xml) - * Jetty :: HTTP2 :: Client (org.eclipse.jetty.http2:http2-client:9.4.34.v20201102 - https://eclipse.org/jetty/http2-parent/http2-client) - * Jetty :: HTTP2 :: Common (org.eclipse.jetty.http2:http2-common:9.4.34.v20201102 - https://eclipse.org/jetty/http2-parent/http2-common) - * Jetty :: HTTP2 :: HPACK (org.eclipse.jetty.http2:http2-hpack:9.4.34.v20201102 - https://eclipse.org/jetty/http2-parent/http2-hpack) - * Jetty :: HTTP2 :: HTTP Client Transport (org.eclipse.jetty.http2:http2-http-client-transport:9.4.34.v20201102 - https://eclipse.org/jetty/http2-parent/http2-http-client-transport) - * Jetty :: HTTP2 :: Server (org.eclipse.jetty.http2:http2-server:9.4.34.v20201102 - https://eclipse.org/jetty/http2-parent/http2-server) + * Jetty :: Rewrite Handler (org.eclipse.jetty:jetty-rewrite:9.4.44.v20210927 - https://eclipse.org/jetty/jetty-rewrite) + * Jetty :: Security (org.eclipse.jetty:jetty-security:9.4.44.v20210927 - https://eclipse.org/jetty/jetty-security) + * Jetty :: Security (org.eclipse.jetty:jetty-security:9.4.48.v20220622 - https://eclipse.org/jetty/jetty-security) + * Jetty :: Server Core (org.eclipse.jetty:jetty-server:9.4.48.v20220622 - https://eclipse.org/jetty/jetty-server) + * Jetty :: Servlet Handling (org.eclipse.jetty:jetty-servlet:9.4.48.v20220622 - https://eclipse.org/jetty/jetty-servlet) + * Jetty :: Utility Servlets and Filters (org.eclipse.jetty:jetty-servlets:9.4.48.v20220622 - https://eclipse.org/jetty/jetty-servlets) + * Jetty :: Utilities (org.eclipse.jetty:jetty-util:9.4.48.v20220622 - https://eclipse.org/jetty/jetty-util) + * Jetty :: Utilities :: Ajax(JSON) (org.eclipse.jetty:jetty-util-ajax:9.4.48.v20220622 - https://eclipse.org/jetty/jetty-util-ajax) + * Jetty :: Webapp Application Support (org.eclipse.jetty:jetty-webapp:9.4.48.v20220622 - https://eclipse.org/jetty/jetty-webapp) + * Jetty :: XML utilities (org.eclipse.jetty:jetty-xml:9.4.48.v20220622 - https://eclipse.org/jetty/jetty-xml) + * Jetty :: HTTP2 :: Client (org.eclipse.jetty.http2:http2-client:9.4.44.v20210927 - https://eclipse.org/jetty/http2-parent/http2-client) + * Jetty :: HTTP2 :: Common (org.eclipse.jetty.http2:http2-common:9.4.48.v20220622 - https://eclipse.org/jetty/http2-parent/http2-common) + * Jetty :: HTTP2 :: HPACK (org.eclipse.jetty.http2:http2-hpack:9.4.44.v20210927 - https://eclipse.org/jetty/http2-parent/http2-hpack) + * Jetty :: HTTP2 :: HTTP Client Transport (org.eclipse.jetty.http2:http2-http-client-transport:9.4.44.v20210927 - https://eclipse.org/jetty/http2-parent/http2-http-client-transport) + * Jetty :: HTTP2 :: Server (org.eclipse.jetty.http2:http2-server:9.4.48.v20220622 - https://eclipse.org/jetty/http2-parent/http2-server) * Jetty :: Schemas (org.eclipse.jetty.toolchain:jetty-schemas:3.1.2 - https://eclipse.org/jetty/jetty-schemas) * HK2 API module (org.glassfish.hk2:hk2-api:2.6.1 - https://github.com/eclipse-ee4j/glassfish-hk2/hk2-api) * ServiceLocator Default Implementation (org.glassfish.hk2:hk2-locator:2.6.1 - https://github.com/eclipse-ee4j/glassfish-hk2/hk2-locator) @@ -535,10 +524,10 @@ https://wiki.lyrasis.org/display/DSPACE/Code+Contribution+Guidelines * OSGi resource locator (org.glassfish.hk2:osgi-resource-locator:1.0.3 - https://projects.eclipse.org/projects/ee4j/osgi-resource-locator) * aopalliance version 1.0 repackaged as a module (org.glassfish.hk2.external:aopalliance-repackaged:2.6.1 - https://github.com/eclipse-ee4j/glassfish-hk2/external/aopalliance-repackaged) * javax.inject:1 as OSGi bundle (org.glassfish.hk2.external:jakarta.inject:2.6.1 - https://github.com/eclipse-ee4j/glassfish-hk2/external/jakarta.inject) - * jersey-core-client (org.glassfish.jersey.core:jersey-client:2.30.1 - https://projects.eclipse.org/projects/ee4j.jersey/jersey-client) - * jersey-core-common (org.glassfish.jersey.core:jersey-common:2.30.1 - https://projects.eclipse.org/projects/ee4j.jersey/jersey-common) - * jersey-inject-hk2 (org.glassfish.jersey.inject:jersey-hk2:2.30.1 - https://projects.eclipse.org/projects/ee4j.jersey/project/jersey-hk2) - * Java Persistence API, Version 2.1 (org.hibernate.javax.persistence:hibernate-jpa-2.1-api:1.0.0.Final - http://hibernate.org) + * jersey-core-client (org.glassfish.jersey.core:jersey-client:2.35 - https://projects.eclipse.org/projects/ee4j.jersey/jersey-client) + * jersey-core-common (org.glassfish.jersey.core:jersey-common:2.35 - https://projects.eclipse.org/projects/ee4j.jersey/jersey-common) + * jersey-inject-hk2 (org.glassfish.jersey.inject:jersey-hk2:2.35 - https://projects.eclipse.org/projects/ee4j.jersey/project/jersey-hk2) + * Java Persistence API, Version 2.1 (org.hibernate.javax.persistence:hibernate-jpa-2.1-api:1.0.2.Final - http://hibernate.org) * Jetty Server (org.mortbay.jetty:jetty:6.1.26 - http://www.eclipse.org/jetty/jetty-parent/project/modules/jetty) * Jetty Servlet Tester (org.mortbay.jetty:jetty-servlet-tester:6.1.26 - http://www.eclipse.org/jetty/jetty-parent/project/jetty-servlet-tester) * Jetty Utilities (org.mortbay.jetty:jetty-util:6.1.26 - http://www.eclipse.org/jetty/jetty-parent/project/jetty-util) @@ -552,21 +541,16 @@ https://wiki.lyrasis.org/display/DSPACE/Code+Contribution+Guidelines * json-schema-validator (com.github.java-json-tools:json-schema-validator:2.2.14 - https://github.com/java-json-tools/json-schema-validator) * msg-simple (com.github.java-json-tools:msg-simple:1.2 - https://github.com/java-json-tools/msg-simple) * uri-template (com.github.java-json-tools:uri-template:0.10 - https://github.com/java-json-tools/uri-template) - * SpotBugs Annotations (com.github.spotbugs:spotbugs-annotations:3.1.9 - https://spotbugs.github.io/) * FindBugs-Annotations (com.google.code.findbugs:annotations:3.0.1u2 - http://findbugs.sourceforge.net/) - * c3p0 (com.mchange:c3p0:0.9.5.5 - https://github.com/swaldman/c3p0) - * mchange-commons-java (com.mchange:mchange-commons-java:0.2.19 - https://github.com/swaldman/mchange-commons-java) - * Java Native Access (net.java.dev.jna:jna:5.5.0 - https://github.com/java-native-access/jna) * JHighlight (org.codelibs:jhighlight:1.0.3 - https://github.com/codelibs/jhighlight) - * Hibernate ORM - hibernate-core (org.hibernate:hibernate-core:5.4.10.Final - http://hibernate.org/orm) - * Hibernate ORM - hibernate-ehcache (org.hibernate:hibernate-ehcache:5.4.10.Final - http://hibernate.org/orm) - * Hibernate ORM - hibernate-jpamodelgen (org.hibernate:hibernate-jpamodelgen:5.4.10.Final - http://hibernate.org/orm) - * Hibernate Commons Annotations (org.hibernate.common:hibernate-commons-annotations:5.1.0.Final - http://hibernate.org) + * Hibernate ORM - hibernate-core (org.hibernate:hibernate-core:5.6.5.Final - https://hibernate.org/orm) + * Hibernate ORM - hibernate-jcache (org.hibernate:hibernate-jcache:5.6.5.Final - https://hibernate.org/orm) + * Hibernate ORM - hibernate-jpamodelgen (org.hibernate:hibernate-jpamodelgen:5.6.5.Final - https://hibernate.org/orm) + * Hibernate Commons Annotations (org.hibernate.common:hibernate-commons-annotations:5.1.2.Final - http://hibernate.org) * im4java (org.im4java:im4java:1.4.0 - http://sourceforge.net/projects/im4java/) - * JacORB OMG-API (org.jacorb:jacorb-omgapi:3.9 - http://www.jacorb.org) * Javassist (org.javassist:javassist:3.25.0-GA - http://www.javassist.org/) - * Java RMI API (org.jboss.spec.javax.rmi:jboss-rmi-api_1.0_spec:1.0.6.Final - http://www.jboss.org/jboss-rmi-api_1.0_spec) * XOM (xom:xom:1.2.5 - http://xom.nu) + * XOM (xom:xom:1.3.7 - https://xom.nu) Go License: @@ -576,29 +560,21 @@ https://wiki.lyrasis.org/display/DSPACE/Code+Contribution+Guidelines * Handle Server (net.handle:handle:9.3.0 - https://www.handle.net) - JDOM License (Apache-style license): - - * jdom (jdom:jdom:1.0 - no url defined) - MIT License: * Java SemVer (com.github.zafarkhaja:java-semver:0.9.0 - https://github.com/zafarkhaja/jsemver) + * dd-plist (com.googlecode.plist:dd-plist:1.23 - http://www.github.com/3breadt/dd-plist) * DigitalCollections: IIIF API Library (de.digitalcollections.iiif:iiif-apis:0.3.9 - https://github.com/dbmdz/iiif-apis) - * CDM core library (edu.ucar:cdm:4.5.5 - http://www.unidata.ucar.edu/software/netcdf-java/documentation.htm) - * GRIB IOSP and Feature Collection (edu.ucar:grib:4.5.5 - http://www.unidata.ucar.edu/software/netcdf-java/) - * HttpClient Wrappers (edu.ucar:httpservices:4.5.5 - http://www.unidata.ucar.edu/software/netcdf-java/documentation.htm) - * netCDF-4 IOSP JNI connection to C library (edu.ucar:netcdf4:4.5.5 - http://www.unidata.ucar.edu/software/netcdf-java/netcdf4/) - * udunits (edu.ucar:udunits:4.5.5 - http://www.unidata.ucar.edu/software/udunits//) * JOpt Simple (net.sf.jopt-simple:jopt-simple:5.0.4 - http://jopt-simple.github.io/jopt-simple) - * Bouncy Castle S/MIME API (org.bouncycastle:bcmail-jdk15on:1.65 - http://www.bouncycastle.org/java.html) - * Bouncy Castle PKIX, CMS, EAC, TSP, PKCS, OCSP, CMP, and CRMF APIs (org.bouncycastle:bcpkix-jdk15on:1.65 - http://www.bouncycastle.org/java.html) - * Bouncy Castle Provider (org.bouncycastle:bcprov-jdk15on:1.65 - http://www.bouncycastle.org/java.html) + * Bouncy Castle S/MIME API (org.bouncycastle:bcmail-jdk15on:1.70 - https://www.bouncycastle.org/java.html) + * Bouncy Castle PKIX, CMS, EAC, TSP, PKCS, OCSP, CMP, and CRMF APIs (org.bouncycastle:bcpkix-jdk15on:1.70 - https://www.bouncycastle.org/java.html) + * Bouncy Castle Provider (org.bouncycastle:bcprov-jdk15on:1.70 - https://www.bouncycastle.org/java.html) + * Bouncy Castle ASN.1 Extension and Utility APIs (org.bouncycastle:bcutil-jdk15on:1.70 - https://www.bouncycastle.org/java.html) * org.brotli:dec (org.brotli:dec:0.1.2 - http://brotli.org/dec) + * Checker Qual (org.checkerframework:checker-qual:3.10.0 - https://checkerframework.org) * Checker Qual (org.checkerframework:checker-qual:3.5.0 - https://checkerframework.org) - * jersey-core-client (org.glassfish.jersey.core:jersey-client:2.30.1 - https://projects.eclipse.org/projects/ee4j.jersey/jersey-client) - * jersey-inject-hk2 (org.glassfish.jersey.inject:jersey-hk2:2.30.1 - https://projects.eclipse.org/projects/ee4j.jersey/project/jersey-hk2) - * Itadaki jbzip2 (org.itadaki:bzip2:0.9.1 - https://code.google.com/p/jbzip2/) - * jsoup Java HTML Parser (org.jsoup:jsoup:1.13.1 - https://jsoup.org/) + * jersey-core-client (org.glassfish.jersey.core:jersey-client:2.35 - https://projects.eclipse.org/projects/ee4j.jersey/jersey-client) + * jersey-inject-hk2 (org.glassfish.jersey.inject:jersey-hk2:2.35 - https://projects.eclipse.org/projects/ee4j.jersey/project/jersey-hk2) * mockito-core (org.mockito:mockito-core:3.12.4 - https://github.com/mockito/mockito) * mockito-inline (org.mockito:mockito-inline:3.12.4 - https://github.com/mockito/mockito) * ORCID - Model (org.orcid:orcid-model:3.0.2 - http://github.com/ORCID/orcid-model) @@ -606,29 +582,33 @@ https://wiki.lyrasis.org/display/DSPACE/Code+Contribution+Guidelines * JUL to SLF4J bridge (org.slf4j:jul-to-slf4j:1.7.25 - http://www.slf4j.org) * SLF4J API Module (org.slf4j:slf4j-api:1.7.25 - http://www.slf4j.org) * SLF4J Extensions Module (org.slf4j:slf4j-ext:1.7.28 - http://www.slf4j.org) + * HAL Browser (org.webjars:hal-browser:ad9b865 - http://webjars.org) * toastr (org.webjars.bowergithub.codeseven:toastr:2.1.4 - http://webjars.org) - * jquery (org.webjars.bowergithub.jquery:jquery-dist:3.5.1 - https://www.webjars.org) - * bootstrap (org.webjars.bowergithub.twbs:bootstrap:4.5.2 - https://www.webjars.org) + * backbone (org.webjars.bowergithub.jashkenas:backbone:1.4.1 - https://www.webjars.org) + * underscore (org.webjars.bowergithub.jashkenas:underscore:1.13.2 - https://www.webjars.org) + * jquery (org.webjars.bowergithub.jquery:jquery-dist:3.6.0 - https://www.webjars.org) + * urijs (org.webjars.bowergithub.medialize:uri.js:1.19.10 - https://www.webjars.org) + * bootstrap (org.webjars.bowergithub.twbs:bootstrap:4.6.1 - https://www.webjars.org) + * core-js (org.webjars.npm:core-js:3.25.2 - https://www.webjars.org) + * @json-editor/json-editor (org.webjars.npm:json-editor__json-editor:2.6.1 - https://www.webjars.org) Mozilla Public License: * juniversalchardet (com.googlecode.juniversalchardet:juniversalchardet:1.0.3 - http://juniversalchardet.googlecode.com/) - * h2 (com.h2database:h2:1.4.187 - no url defined) + * H2 Database Engine (com.h2database:h2:2.1.210 - https://h2database.com) * Saxon-HE (net.sf.saxon:Saxon-HE:9.8.0-14 - http://www.saxonica.com/) * Javassist (org.javassist:javassist:3.25.0-GA - http://www.javassist.org/) * Mozilla Rhino (org.mozilla:rhino:1.7.7.2 - https://developer.mozilla.org/en/Rhino) - OGC copyright: - - * GeoAPI (org.opengis:geoapi:3.0.1 - http://www.geoapi.org/geoapi/) - Public Domain: - * jersey-core-client (org.glassfish.jersey.core:jersey-client:2.30.1 - https://projects.eclipse.org/projects/ee4j.jersey/jersey-client) - * jersey-core-common (org.glassfish.jersey.core:jersey-common:2.30.1 - https://projects.eclipse.org/projects/ee4j.jersey/jersey-common) - * jersey-inject-hk2 (org.glassfish.jersey.inject:jersey-hk2:2.30.1 - https://projects.eclipse.org/projects/ee4j.jersey/project/jersey-hk2) + * jersey-core-client (org.glassfish.jersey.core:jersey-client:2.35 - https://projects.eclipse.org/projects/ee4j.jersey/jersey-client) + * jersey-core-common (org.glassfish.jersey.core:jersey-common:2.35 - https://projects.eclipse.org/projects/ee4j.jersey/jersey-common) + * jersey-inject-hk2 (org.glassfish.jersey.inject:jersey-hk2:2.35 - https://projects.eclipse.org/projects/ee4j.jersey/project/jersey-hk2) + * HdrHistogram (org.hdrhistogram:HdrHistogram:2.1.12 - http://hdrhistogram.github.io/HdrHistogram/) + * LatencyUtils (org.latencyutils:LatencyUtils:2.0.3 - http://latencyutils.github.io/LatencyUtils/) * Reflections (org.reflections:reflections:0.9.12 - http://github.com/ronmamo/reflections) - * XZ for Java (org.tukaani:xz:1.8 - https://tukaani.org/xz/java.html) + * XZ for Java (org.tukaani:xz:1.9 - https://tukaani.org/xz/java.html) The JSON License: @@ -636,7 +616,7 @@ https://wiki.lyrasis.org/display/DSPACE/Code+Contribution+Guidelines UnRar License: - * Java UnRar (com.github.junrar:junrar:4.0.0 - https://github.com/junrar/junrar) + * Java Unrar (com.github.junrar:junrar:7.4.1 - https://github.com/junrar/junrar) Unicode/ICU License: @@ -644,10 +624,10 @@ https://wiki.lyrasis.org/display/DSPACE/Code+Contribution+Guidelines W3C license: - * jersey-core-client (org.glassfish.jersey.core:jersey-client:2.30.1 - https://projects.eclipse.org/projects/ee4j.jersey/jersey-client) - * jersey-inject-hk2 (org.glassfish.jersey.inject:jersey-hk2:2.30.1 - https://projects.eclipse.org/projects/ee4j.jersey/project/jersey-hk2) + * jersey-core-client (org.glassfish.jersey.core:jersey-client:2.35 - https://projects.eclipse.org/projects/ee4j.jersey/jersey-client) + * jersey-inject-hk2 (org.glassfish.jersey.inject:jersey-hk2:2.35 - https://projects.eclipse.org/projects/ee4j.jersey/project/jersey-hk2) jQuery license: - * jersey-core-client (org.glassfish.jersey.core:jersey-client:2.30.1 - https://projects.eclipse.org/projects/ee4j.jersey/jersey-client) - * jersey-inject-hk2 (org.glassfish.jersey.inject:jersey-hk2:2.30.1 - https://projects.eclipse.org/projects/ee4j.jersey/project/jersey-hk2) + * jersey-core-client (org.glassfish.jersey.core:jersey-client:2.35 - https://projects.eclipse.org/projects/ee4j.jersey/jersey-client) + * jersey-inject-hk2 (org.glassfish.jersey.inject:jersey-hk2:2.35 - https://projects.eclipse.org/projects/ee4j.jersey/project/jersey-hk2) diff --git a/README.md b/README.md index 864a099c1d..af9158eff3 100644 --- a/README.md +++ b/README.md @@ -35,7 +35,7 @@ Documentation for each release may be viewed online or downloaded via our [Docum The latest DSpace Installation instructions are available at: https://wiki.lyrasis.org/display/DSDOC7x/Installing+DSpace -Please be aware that, as a Java web application, DSpace requires a database (PostgreSQL or Oracle) +Please be aware that, as a Java web application, DSpace requires a database (PostgreSQL) and a servlet container (usually Tomcat) in order to function. More information about these and all other prerequisites can be found in the Installation instructions above. @@ -48,18 +48,7 @@ See [Running DSpace 7 with Docker Compose](dspace/src/main/docker-compose/README ## Contributing -DSpace is a community built and supported project. We do not have a centralized development or support team, -but have a dedicated group of volunteers who help us improve the software, documentation, resources, etc. - -We welcome contributions of any type. Here's a few basic guides that provide suggestions for contributing to DSpace: -* [How to Contribute to DSpace](https://wiki.lyrasis.org/display/DSPACE/How+to+Contribute+to+DSpace): How to contribute in general (via code, documentation, bug reports, expertise, etc) -* [Code Contribution Guidelines](https://wiki.lyrasis.org/display/DSPACE/Code+Contribution+Guidelines): How to give back code or contribute features, bug fixes, etc. -* [DSpace Community Advisory Team (DCAT)](https://wiki.lyrasis.org/display/cmtygp/DSpace+Community+Advisory+Team): If you are not a developer, we also have an interest group specifically for repository managers. The DCAT group meets virtually, once a month, and sends open invitations to join their meetings via the [DCAT mailing list](https://groups.google.com/d/forum/DSpaceCommunityAdvisoryTeam). - -We also encourage GitHub Pull Requests (PRs) at any time. Please see our [Development with Git](https://wiki.lyrasis.org/display/DSPACE/Development+with+Git) guide for more info. - -In addition, a listing of all known contributors to DSpace software can be -found online at: https://wiki.lyrasis.org/display/DSPACE/DSpaceContributors +See [Contributing documentation](CONTRIBUTING.md) ## Getting Help diff --git a/checkstyle.xml b/checkstyle.xml index 815edaec7b..e0fa808d83 100644 --- a/checkstyle.xml +++ b/checkstyle.xml @@ -92,9 +92,7 @@ For more information on CheckStyle configurations below, see: http://checkstyle. - - - + diff --git a/docker-compose.yml b/docker-compose.yml index f790257bdb..6008b873ae 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -41,6 +41,8 @@ services: target: 8080 - published: 8009 target: 8009 + - published: 8000 + target: 8000 stdin_open: true tty: true volumes: diff --git a/dspace-api/pom.xml b/dspace-api/pom.xml index f000f2e1ba..6850051600 100644 --- a/dspace-api/pom.xml +++ b/dspace-api/pom.xml @@ -12,7 +12,7 @@ org.dspace dspace-parent - 7.2 + 7.5-SNAPSHOT .. @@ -334,18 +334,47 @@ - + + org.apache.logging.log4j + log4j-api + org.hibernate - hibernate-ehcache + hibernate-core + + + + org.javassist + javassist + + + + + org.hibernate + hibernate-jcache + + + org.ehcache + ehcache + ${ehcache.version} + + + + org.springframework.boot + spring-boot-starter-cache + ${spring-boot.version} - - org.javassist - javassist + org.springframework.boot + spring-boot-starter-logging + + javax.cache + cache-api + org.hibernate hibernate-jpamodelgen @@ -358,7 +387,7 @@ org.hibernate.javax.persistence hibernate-jpa-2.1-api - 1.0.0.Final + 1.0.2.Final @@ -379,7 +408,7 @@ org.ow2.asm asm-commons - + org.bouncycastle bcpkix-jdk15on @@ -388,39 +417,6 @@ org.bouncycastle bcprov-jdk15on - - - org.eclipse.jetty - jetty-alpn-java-server - - - org.eclipse.jetty - jetty-deploy - - - org.eclipse.jetty - jetty-servlet - - - org.eclipse.jetty - jetty-servlets - - - org.eclipse.jetty - jetty-webapp - - - org.eclipse.jetty - jetty-xml - - - org.eclipse.jetty.http2 - http2-common - - - org.eclipse.jetty.http2 - http2-server - @@ -505,7 +501,7 @@ org.jdom - jdom + jdom2 org.apache.pdfbox @@ -515,22 +511,11 @@ org.apache.pdfbox fontbox - - org.apache.poi - poi-scratchpad - - - xalan - xalan - - - xerces - xercesImpl - com.ibm.icu icu4j + org.dspace oclc-harvester2 @@ -566,101 +551,49 @@ - org.rometools + com.rometools + rome + + + com.rometools rome-modules - 1.0 org.jbibtex jbibtex - 1.0.10 + 1.0.20 org.apache.httpcomponents httpclient + + org.apache.httpcomponents + httpcore + + + org.apache.httpcomponents + httpmime + + org.apache.solr solr-solrj ${solr.client.version} - - - - org.eclipse.jetty - jetty-http - - - org.eclipse.jetty - jetty-io - - - org.eclipse.jetty - jetty-util - - - + org.apache.solr solr-core test ${solr.client.version} - - - - org.apache.commons - commons-text - - - - org.eclipse.jetty - jetty-http - - - org.eclipse.jetty - jetty-io - - - org.eclipse.jetty - jetty-util - - - - - org.apache.solr - solr-cell - - - - org.apache.commons - commons-text - - - - org.eclipse.jetty - jetty-http - - - org.eclipse.jetty - jetty-io - - - org.eclipse.jetty - jetty-util - - org.apache.lucene lucene-core - - - org.apache.tika - tika-parsers - org.apache.lucene lucene-analyzers-icu @@ -676,9 +609,15 @@ lucene-analyzers-stempel test + + - org.apache.xmlbeans - xmlbeans + org.apache.tika + tika-core + + + org.apache.tika + tika-parsers-standard-package @@ -702,13 +641,6 @@ 1.1.1 - - - com.google.code.gson - gson - compile - - com.google.guava guava @@ -736,7 +668,7 @@ org.flywaydb flyway-core - 6.5.7 + 8.4.4 @@ -792,44 +724,6 @@ jaxb-runtime - - - org.apache.ws.commons.axiom - axiom-impl - ${axiom.version} - - - - org.apache.geronimo.specs - * - - - - org.codehaus.woodstox - woodstox-core-asl - - - - - org.apache.ws.commons.axiom - axiom-api - ${axiom.version} - - - - org.apache.geronimo.specs - * - - - - org.codehaus.woodstox - woodstox-core-asl - - - - org.glassfish.jersey.core @@ -848,7 +742,7 @@ com.amazonaws aws-java-sdk-s3 - 1.12.116 + 1.12.261 @@ -885,18 +779,25 @@ 20180130 + + + com.github.stefanbirkner + system-rules + 1.19.0 + test + + com.opencsv opencsv - 5.2 + 5.6 org.apache.velocity velocity-engine-core - jar @@ -918,18 +819,18 @@ 2.0.0 - - com.github.stefanbirkner - system-rules - 1.19.0 - test - - org.mock-server mockserver-junit-rule 5.11.2 test + + + + org.yaml + snakeyaml + + @@ -938,11 +839,6 @@ - - org.apache.commons - commons-text - 1.9 - io.netty netty-buffer @@ -971,7 +867,7 @@ org.apache.velocity velocity-engine-core - 2.2 + 2.3 org.xmlunit diff --git a/dspace-api/src/main/java/org/dspace/access/status/AccessStatusHelper.java b/dspace-api/src/main/java/org/dspace/access/status/AccessStatusHelper.java new file mode 100644 index 0000000000..1cacbf6aed --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/access/status/AccessStatusHelper.java @@ -0,0 +1,30 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.access.status; + +import java.sql.SQLException; +import java.util.Date; + +import org.dspace.content.Item; +import org.dspace.core.Context; + +/** + * Plugin interface for the access status calculation. + */ +public interface AccessStatusHelper { + /** + * Calculate the access status for the item. + * + * @param context the DSpace context + * @param item the item + * @return an access status value + * @throws SQLException An exception that provides information on a database access error or other errors. + */ + public String getAccessStatusFromItem(Context context, Item item, Date threshold) + throws SQLException; +} diff --git a/dspace-api/src/main/java/org/dspace/access/status/AccessStatusServiceImpl.java b/dspace-api/src/main/java/org/dspace/access/status/AccessStatusServiceImpl.java new file mode 100644 index 0000000000..544dc99cb4 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/access/status/AccessStatusServiceImpl.java @@ -0,0 +1,66 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.access.status; + +import java.sql.SQLException; +import java.util.Date; + +import org.dspace.access.status.service.AccessStatusService; +import org.dspace.content.Item; +import org.dspace.core.Context; +import org.dspace.core.service.PluginService; +import org.dspace.services.ConfigurationService; +import org.joda.time.LocalDate; +import org.springframework.beans.factory.annotation.Autowired; + +/** + * Implementation for the access status calculation service. + */ +public class AccessStatusServiceImpl implements AccessStatusService { + // Plugin implementation, set from the DSpace configuration by init(). + protected AccessStatusHelper helper = null; + + protected Date forever_date = null; + + @Autowired(required = true) + protected ConfigurationService configurationService; + + @Autowired(required = true) + protected PluginService pluginService; + + /** + * Initialize the bean (after dependency injection has already taken place). + * Ensures the configurationService is injected, so that we can get the plugin + * and the forever embargo date threshold from the configuration. + * Called by "init-method" in Spring configuration. + * + * @throws Exception on generic exception + */ + public void init() throws Exception { + if (helper == null) { + helper = (AccessStatusHelper) pluginService.getSinglePlugin(AccessStatusHelper.class); + if (helper == null) { + throw new IllegalStateException("The AccessStatusHelper plugin was not defined in " + + "DSpace configuration."); + } + + // Defines the embargo forever date threshold for the access status. + // Look at EmbargoService.FOREVER for some improvements? + int year = configurationService.getIntProperty("access.status.embargo.forever.year"); + int month = configurationService.getIntProperty("access.status.embargo.forever.month"); + int day = configurationService.getIntProperty("access.status.embargo.forever.day"); + + forever_date = new LocalDate(year, month, day).toDate(); + } + } + + @Override + public String getAccessStatus(Context context, Item item) throws SQLException { + return helper.getAccessStatusFromItem(context, item, forever_date); + } +} diff --git a/dspace-api/src/main/java/org/dspace/access/status/DefaultAccessStatusHelper.java b/dspace-api/src/main/java/org/dspace/access/status/DefaultAccessStatusHelper.java new file mode 100644 index 0000000000..a67fa67af3 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/access/status/DefaultAccessStatusHelper.java @@ -0,0 +1,159 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.access.status; + +import java.sql.SQLException; +import java.util.Date; +import java.util.List; +import java.util.Objects; + +import org.apache.commons.lang3.StringUtils; +import org.dspace.authorize.ResourcePolicy; +import org.dspace.authorize.factory.AuthorizeServiceFactory; +import org.dspace.authorize.service.AuthorizeService; +import org.dspace.authorize.service.ResourcePolicyService; +import org.dspace.content.Bitstream; +import org.dspace.content.Bundle; +import org.dspace.content.DSpaceObject; +import org.dspace.content.Item; +import org.dspace.content.factory.ContentServiceFactory; +import org.dspace.content.service.ItemService; +import org.dspace.core.Constants; +import org.dspace.core.Context; +import org.dspace.eperson.Group; + +/** + * Default plugin implementation of the access status helper. + * The getAccessStatusFromItem method provides a simple logic to + * calculate the access status of an item based on the policies of + * the primary or the first bitstream in the original bundle. + * Users can override this method for enhanced functionality. + */ +public class DefaultAccessStatusHelper implements AccessStatusHelper { + public static final String EMBARGO = "embargo"; + public static final String METADATA_ONLY = "metadata.only"; + public static final String OPEN_ACCESS = "open.access"; + public static final String RESTRICTED = "restricted"; + public static final String UNKNOWN = "unknown"; + + protected ItemService itemService = + ContentServiceFactory.getInstance().getItemService(); + protected ResourcePolicyService resourcePolicyService = + AuthorizeServiceFactory.getInstance().getResourcePolicyService(); + protected AuthorizeService authorizeService = + AuthorizeServiceFactory.getInstance().getAuthorizeService(); + + public DefaultAccessStatusHelper() { + super(); + } + + /** + * Look at the item's policies to determine an access status value. + * It is also considering a date threshold for embargos and restrictions. + * + * If the item is null, simply returns the "unknown" value. + * + * @param context the DSpace context + * @param item the item to embargo + * @param threshold the embargo threshold date + * @return an access status value + */ + @Override + public String getAccessStatusFromItem(Context context, Item item, Date threshold) + throws SQLException { + if (item == null) { + return UNKNOWN; + } + // Consider only the original bundles. + List bundles = item.getBundles(Constants.DEFAULT_BUNDLE_NAME); + // Check for primary bitstreams first. + Bitstream bitstream = bundles.stream() + .map(bundle -> bundle.getPrimaryBitstream()) + .filter(Objects::nonNull) + .findFirst() + .orElse(null); + if (bitstream == null) { + // If there is no primary bitstream, + // take the first bitstream in the bundles. + bitstream = bundles.stream() + .map(bundle -> bundle.getBitstreams()) + .flatMap(List::stream) + .findFirst() + .orElse(null); + } + return caculateAccessStatusForDso(context, bitstream, threshold); + } + + /** + * Look at the DSpace object's policies to determine an access status value. + * + * If the object is null, returns the "metadata.only" value. + * If any policy attached to the object is valid for the anonymous group, + * returns the "open.access" value. + * Otherwise, if the policy start date is before the embargo threshold date, + * returns the "embargo" value. + * Every other cases return the "restricted" value. + * + * @param context the DSpace context + * @param dso the DSpace object + * @param threshold the embargo threshold date + * @return an access status value + */ + private String caculateAccessStatusForDso(Context context, DSpaceObject dso, Date threshold) + throws SQLException { + if (dso == null) { + return METADATA_ONLY; + } + // Only consider read policies. + List policies = authorizeService + .getPoliciesActionFilter(context, dso, Constants.READ); + int openAccessCount = 0; + int embargoCount = 0; + int restrictedCount = 0; + int unknownCount = 0; + // Looks at all read policies. + for (ResourcePolicy policy : policies) { + boolean isValid = resourcePolicyService.isDateValid(policy); + Group group = policy.getGroup(); + // The group must not be null here. However, + // if it is, consider this as an unexpected case. + if (group == null) { + unknownCount++; + } else if (StringUtils.equals(group.getName(), Group.ANONYMOUS)) { + // Only calculate the status for the anonymous group. + if (isValid) { + // If the policy is valid, the anonymous group have access + // to the bitstream. + openAccessCount++; + } else { + Date startDate = policy.getStartDate(); + if (startDate != null && !startDate.before(threshold)) { + // If the policy start date have a value and if this value + // is equal or superior to the configured forever date, the + // access status is also restricted. + restrictedCount++; + } else { + // If the current date is not between the policy start date + // and end date, the access status is embargo. + embargoCount++; + } + } + } + } + if (openAccessCount > 0) { + return OPEN_ACCESS; + } + if (embargoCount > 0 && restrictedCount == 0) { + return EMBARGO; + } + if (unknownCount > 0) { + return UNKNOWN; + } + return RESTRICTED; + } +} diff --git a/dspace-api/src/main/java/org/dspace/access/status/factory/AccessStatusServiceFactory.java b/dspace-api/src/main/java/org/dspace/access/status/factory/AccessStatusServiceFactory.java new file mode 100644 index 0000000000..77d8f6b448 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/access/status/factory/AccessStatusServiceFactory.java @@ -0,0 +1,25 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.access.status.factory; + +import org.dspace.access.status.service.AccessStatusService; +import org.dspace.services.factory.DSpaceServicesFactory; + +/** + * Abstract factory to get services for the access status package, + * use AccessStatusServiceFactory.getInstance() to retrieve an implementation. + */ +public abstract class AccessStatusServiceFactory { + + public abstract AccessStatusService getAccessStatusService(); + + public static AccessStatusServiceFactory getInstance() { + return DSpaceServicesFactory.getInstance().getServiceManager() + .getServiceByName("accessStatusServiceFactory", AccessStatusServiceFactory.class); + } +} diff --git a/dspace-api/src/main/java/org/dspace/access/status/factory/AccessStatusServiceFactoryImpl.java b/dspace-api/src/main/java/org/dspace/access/status/factory/AccessStatusServiceFactoryImpl.java new file mode 100644 index 0000000000..fe3848cb2b --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/access/status/factory/AccessStatusServiceFactoryImpl.java @@ -0,0 +1,26 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.access.status.factory; + +import org.dspace.access.status.service.AccessStatusService; +import org.springframework.beans.factory.annotation.Autowired; + +/** + * Factory implementation to get services for the access status package, + * use AccessStatusServiceFactory.getInstance() to retrieve an implementation. + */ +public class AccessStatusServiceFactoryImpl extends AccessStatusServiceFactory { + + @Autowired(required = true) + private AccessStatusService accessStatusService; + + @Override + public AccessStatusService getAccessStatusService() { + return accessStatusService; + } +} diff --git a/dspace-api/src/main/java/org/dspace/access/status/package-info.java b/dspace-api/src/main/java/org/dspace/access/status/package-info.java new file mode 100644 index 0000000000..2c0ed22cd4 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/access/status/package-info.java @@ -0,0 +1,30 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +/** + *

+ * Access status allows the users to view the bitstreams availability before + * browsing into the item itself. + *

+ *

+ * The access status is calculated through a pluggable class: + * {@link org.dspace.access.status.AccessStatusHelper}. + * The {@link org.dspace.access.status.AccessStatusServiceImpl} + * must be configured to specify this class, as well as a forever embargo date + * threshold year, month and day. + *

+ *

+ * See {@link org.dspace.access.status.DefaultAccessStatusHelper} for a simple calculation + * based on the primary or the first bitstream of the original bundle. You can + * supply your own class to implement more complex access statuses. + *

+ *

+ * For now, the access status is calculated when the item is shown in a list. + *

+ */ + +package org.dspace.access.status; diff --git a/dspace-api/src/main/java/org/dspace/access/status/service/AccessStatusService.java b/dspace-api/src/main/java/org/dspace/access/status/service/AccessStatusService.java new file mode 100644 index 0000000000..43de5e3c47 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/access/status/service/AccessStatusService.java @@ -0,0 +1,46 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.access.status.service; + +import java.sql.SQLException; + +import org.dspace.content.Item; +import org.dspace.core.Context; + +/** + * Public interface to the access status subsystem. + *

+ * Configuration properties: (with examples) + * {@code + * # values for the forever embargo date threshold + * # This threshold date is used in the default access status helper to dermine if an item is + * # restricted or embargoed based on the start date of the primary (or first) file policies. + * # In this case, if the policy start date is inferior to the threshold date, the status will + * # be embargo, else it will be restricted. + * # You might want to change this threshold based on your needs. For example: some databases + * # doesn't accept a date superior to 31 december 9999. + * access.status.embargo.forever.year = 10000 + * access.status.embargo.forever.month = 1 + * access.status.embargo.forever.day = 1 + * # implementation of access status helper plugin - replace with local implementation if applicable + * # This default access status helper provides an item status based on the policies of the primary + * # bitstream (or first bitstream in the original bundles if no primary file is specified). + * plugin.single.org.dspace.access.status.AccessStatusHelper = org.dspace.access.status.DefaultAccessStatusHelper + * } + */ +public interface AccessStatusService { + + /** + * Calculate the access status for an Item while considering the forever embargo date threshold. + * + * @param context the DSpace context + * @param item the item + * @throws SQLException An exception that provides information on a database access error or other errors. + */ + public String getAccessStatus(Context context, Item item) throws SQLException; +} diff --git a/dspace-api/src/main/java/org/dspace/administer/CreateAdministrator.java b/dspace-api/src/main/java/org/dspace/administer/CreateAdministrator.java index 80d69f3b66..81250e9c82 100644 --- a/dspace-api/src/main/java/org/dspace/administer/CreateAdministrator.java +++ b/dspace-api/src/main/java/org/dspace/administer/CreateAdministrator.java @@ -14,6 +14,7 @@ import java.util.Locale; import org.apache.commons.cli.CommandLine; import org.apache.commons.cli.CommandLineParser; import org.apache.commons.cli.DefaultParser; +import org.apache.commons.cli.HelpFormatter; import org.apache.commons.cli.Options; import org.apache.commons.lang3.StringUtils; import org.dspace.core.Context; @@ -54,14 +55,14 @@ public final class CreateAdministrator { protected GroupService groupService; /** - * For invoking via the command line. If called with no command line arguments, + * For invoking via the command line. If called with no command line arguments, * it will negotiate with the user for the administrator details * * @param argv the command line arguments given * @throws Exception if error */ public static void main(String[] argv) - throws Exception { + throws Exception { CommandLineParser parser = new DefaultParser(); Options options = new Options(); @@ -69,19 +70,41 @@ public final class CreateAdministrator { options.addOption("e", "email", true, "administrator email address"); options.addOption("f", "first", true, "administrator first name"); + options.addOption("h", "help", false, "explain create-administrator options"); options.addOption("l", "last", true, "administrator last name"); options.addOption("c", "language", true, "administrator language"); options.addOption("p", "password", true, "administrator password"); - CommandLine line = parser.parse(options, argv); + CommandLine line = null; + + try { + + line = parser.parse(options, argv); + + } catch (Exception e) { + + System.out.println(e.getMessage() + "\nTry \"dspace create-administrator -h\" to print help information."); + System.exit(1); + + } if (line.hasOption("e") && line.hasOption("f") && line.hasOption("l") && - line.hasOption("c") && line.hasOption("p")) { + line.hasOption("c") && line.hasOption("p")) { ca.createAdministrator(line.getOptionValue("e"), - line.getOptionValue("f"), line.getOptionValue("l"), - line.getOptionValue("c"), line.getOptionValue("p")); + line.getOptionValue("f"), line.getOptionValue("l"), + line.getOptionValue("c"), line.getOptionValue("p")); + } else if (line.hasOption("h")) { + String header = "\nA command-line tool for creating an initial administrator for setting up a" + + " DSpace site. Unless all the required parameters are passed it will" + + " prompt for an e-mail address, last name, first name and password from" + + " standard input.. An administrator group is then created and the data passed" + + " in used to create an e-person in that group.\n\n"; + String footer = "\n"; + HelpFormatter formatter = new HelpFormatter(); + formatter.printHelp("dspace create-administrator", header, options, footer, true); + return; } else { - ca.negotiateAdministratorDetails(); + ca.negotiateAdministratorDetails(line); } } @@ -91,7 +114,7 @@ public final class CreateAdministrator { * @throws Exception if error */ protected CreateAdministrator() - throws Exception { + throws Exception { context = new Context(); groupService = EPersonServiceFactory.getInstance().getGroupService(); ePersonService = EPersonServiceFactory.getInstance().getEPersonService(); @@ -103,20 +126,20 @@ public final class CreateAdministrator { * * @throws Exception if error */ - protected void negotiateAdministratorDetails() - throws Exception { + protected void negotiateAdministratorDetails(CommandLine line) + throws Exception { Console console = System.console(); System.out.println("Creating an initial administrator account"); - boolean dataOK = false; - - String email = null; - String firstName = null; - String lastName = null; - char[] password1 = null; - char[] password2 = null; + String email = line.getOptionValue('e'); + String firstName = line.getOptionValue('f'); + String lastName = line.getOptionValue('l'); String language = I18nUtil.getDefaultLocale().getLanguage(); + ConfigurationService cfg = DSpaceServicesFactory.getInstance().getConfigurationService(); + boolean flag = line.hasOption('p'); + char[] password = null; + boolean dataOK = line.hasOption('f') && line.hasOption('e') && line.hasOption('l'); while (!dataOK) { System.out.print("E-mail address: "); @@ -147,8 +170,6 @@ public final class CreateAdministrator { if (lastName != null) { lastName = lastName.trim(); } - - ConfigurationService cfg = DSpaceServicesFactory.getInstance().getConfigurationService(); if (cfg.hasProperty("webui.supported.locales")) { System.out.println("Select one of the following languages: " + cfg.getProperty("webui.supported.locales")); @@ -163,46 +184,59 @@ public final class CreateAdministrator { } } - System.out.println("Password will not display on screen."); - System.out.print("Password: "); + System.out.print("Is the above data correct? (y or n): "); System.out.flush(); - password1 = console.readPassword(); + String s = console.readLine(); - System.out.print("Again to confirm: "); - System.out.flush(); - - password2 = console.readPassword(); - - //TODO real password validation - if (password1.length > 1 && Arrays.equals(password1, password2)) { - // password OK - System.out.print("Is the above data correct? (y or n): "); - System.out.flush(); - - String s = console.readLine(); - - if (s != null) { - s = s.trim(); - if (s.toLowerCase().startsWith("y")) { - dataOK = true; - } + if (s != null) { + s = s.trim(); + if (s.toLowerCase().startsWith("y")) { + dataOK = true; } - } else { - System.out.println("Passwords don't match"); } + + } + if (!flag) { + password = getPassword(console); + if (password == null) { + return; + } + } else { + password = line.getOptionValue("p").toCharArray(); } - // if we make it to here, we are ready to create an administrator - createAdministrator(email, firstName, lastName, language, String.valueOf(password1)); + createAdministrator(email, firstName, lastName, language, String.valueOf(password)); - //Cleaning arrays that held password - Arrays.fill(password1, ' '); - Arrays.fill(password2, ' '); + } + + private char[] getPassword(Console console) { + char[] password1 = null; + char[] password2 = null; + System.out.println("Password will not display on screen."); + System.out.print("Password: "); + System.out.flush(); + + password1 = console.readPassword(); + + System.out.print("Again to confirm: "); + System.out.flush(); + + password2 = console.readPassword(); + + // TODO real password validation + if (password1.length > 1 && Arrays.equals(password1, password2)) { + // password OK + Arrays.fill(password2, ' '); + return password1; + } else { + System.out.println("Passwords don't match"); + return null; + } } /** - * Create the administrator with the given details. If the user + * Create the administrator with the given details. If the user * already exists then they are simply upped to administrator status * * @param email the email for the user @@ -213,8 +247,8 @@ public final class CreateAdministrator { * @throws Exception if error */ protected void createAdministrator(String email, String first, String last, - String language, String pw) - throws Exception { + String language, String pw) + throws Exception { // Of course we aren't an administrator yet so we need to // circumvent authorisation context.turnOffAuthorisationSystem(); diff --git a/dspace-api/src/main/java/org/dspace/administer/MetadataImporter.java b/dspace-api/src/main/java/org/dspace/administer/MetadataImporter.java index 42461d7210..2677cb2050 100644 --- a/dspace-api/src/main/java/org/dspace/administer/MetadataImporter.java +++ b/dspace-api/src/main/java/org/dspace/administer/MetadataImporter.java @@ -11,13 +11,16 @@ import java.io.IOException; import java.sql.SQLException; import javax.xml.parsers.ParserConfigurationException; import javax.xml.transform.TransformerException; +import javax.xml.xpath.XPath; +import javax.xml.xpath.XPathConstants; +import javax.xml.xpath.XPathExpressionException; +import javax.xml.xpath.XPathFactory; import org.apache.commons.cli.CommandLine; import org.apache.commons.cli.CommandLineParser; import org.apache.commons.cli.DefaultParser; import org.apache.commons.cli.Options; import org.apache.commons.cli.ParseException; -import org.apache.xpath.XPathAPI; import org.dspace.authorize.AuthorizeException; import org.dspace.content.MetadataField; import org.dspace.content.MetadataSchema; @@ -90,7 +93,7 @@ public class MetadataImporter { public static void main(String[] args) throws ParseException, SQLException, IOException, TransformerException, ParserConfigurationException, AuthorizeException, SAXException, - NonUniqueMetadataException, RegistryImportException { + NonUniqueMetadataException, RegistryImportException, XPathExpressionException { // create an options object and populate it CommandLineParser parser = new DefaultParser(); @@ -124,8 +127,8 @@ public class MetadataImporter { * @throws RegistryImportException if import fails */ public static void loadRegistry(String file, boolean forceUpdate) - throws SQLException, IOException, TransformerException, ParserConfigurationException, - AuthorizeException, SAXException, NonUniqueMetadataException, RegistryImportException { + throws SQLException, IOException, TransformerException, ParserConfigurationException, AuthorizeException, + SAXException, NonUniqueMetadataException, RegistryImportException, XPathExpressionException { Context context = null; try { @@ -137,7 +140,9 @@ public class MetadataImporter { Document document = RegistryImporter.loadXML(file); // Get the nodes corresponding to types - NodeList schemaNodes = XPathAPI.selectNodeList(document, "/dspace-dc-types/dc-schema"); + XPath xPath = XPathFactory.newInstance().newXPath(); + NodeList schemaNodes = (NodeList) xPath.compile("/dspace-dc-types/dc-schema") + .evaluate(document, XPathConstants.NODESET); // Add each one as a new format to the registry for (int i = 0; i < schemaNodes.getLength(); i++) { @@ -146,7 +151,8 @@ public class MetadataImporter { } // Get the nodes corresponding to types - NodeList typeNodes = XPathAPI.selectNodeList(document, "/dspace-dc-types/dc-type"); + NodeList typeNodes = (NodeList) xPath.compile("/dspace-dc-types/dc-type") + .evaluate(document, XPathConstants.NODESET); // Add each one as a new format to the registry for (int i = 0; i < typeNodes.getLength(); i++) { @@ -178,8 +184,8 @@ public class MetadataImporter { * @throws RegistryImportException if import fails */ private static void loadSchema(Context context, Node node, boolean updateExisting) - throws SQLException, IOException, TransformerException, - AuthorizeException, NonUniqueMetadataException, RegistryImportException { + throws SQLException, AuthorizeException, NonUniqueMetadataException, RegistryImportException, + XPathExpressionException { // Get the values String name = RegistryImporter.getElementData(node, "name"); String namespace = RegistryImporter.getElementData(node, "namespace"); @@ -236,8 +242,8 @@ public class MetadataImporter { * @throws RegistryImportException if import fails */ private static void loadType(Context context, Node node) - throws SQLException, IOException, TransformerException, - AuthorizeException, NonUniqueMetadataException, RegistryImportException { + throws SQLException, IOException, AuthorizeException, NonUniqueMetadataException, RegistryImportException, + XPathExpressionException { // Get the values String schema = RegistryImporter.getElementData(node, "schema"); String element = RegistryImporter.getElementData(node, "element"); diff --git a/dspace-api/src/main/java/org/dspace/administer/ProcessCleaner.java b/dspace-api/src/main/java/org/dspace/administer/ProcessCleaner.java new file mode 100644 index 0000000000..ee6b8d08b0 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/administer/ProcessCleaner.java @@ -0,0 +1,140 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.administer; + +import java.io.IOException; +import java.sql.SQLException; +import java.util.ArrayList; +import java.util.Date; +import java.util.List; + +import org.apache.commons.cli.ParseException; +import org.apache.commons.lang.time.DateUtils; +import org.dspace.authorize.AuthorizeException; +import org.dspace.content.ProcessStatus; +import org.dspace.core.Context; +import org.dspace.scripts.DSpaceRunnable; +import org.dspace.scripts.Process; +import org.dspace.scripts.factory.ScriptServiceFactory; +import org.dspace.scripts.service.ProcessService; +import org.dspace.services.ConfigurationService; +import org.dspace.services.factory.DSpaceServicesFactory; +import org.dspace.utils.DSpace; + +/** + * Script to cleanup the old processes in the specified state. + * + * @author Luca Giamminonni (luca.giamminonni at 4science.it) + * + */ +public class ProcessCleaner extends DSpaceRunnable> { + + private ConfigurationService configurationService; + + private ProcessService processService; + + + private boolean cleanCompleted = false; + + private boolean cleanFailed = false; + + private boolean cleanRunning = false; + + private boolean help = false; + + private Integer days; + + + @Override + public void setup() throws ParseException { + + this.configurationService = DSpaceServicesFactory.getInstance().getConfigurationService(); + this.processService = ScriptServiceFactory.getInstance().getProcessService(); + + this.help = commandLine.hasOption('h'); + this.cleanFailed = commandLine.hasOption('f'); + this.cleanRunning = commandLine.hasOption('r'); + this.cleanCompleted = commandLine.hasOption('c') || (!cleanFailed && !cleanRunning); + + this.days = configurationService.getIntProperty("process-cleaner.days", 14); + + if (this.days <= 0) { + throw new IllegalStateException("The number of days must be a positive integer."); + } + + } + + @Override + public void internalRun() throws Exception { + + if (help) { + printHelp(); + return; + } + + Context context = new Context(); + + try { + context.turnOffAuthorisationSystem(); + performDeletion(context); + } finally { + context.restoreAuthSystemState(); + context.complete(); + } + + } + + /** + * Delete the processes based on the specified statuses and the configured days + * from their creation. + */ + private void performDeletion(Context context) throws SQLException, IOException, AuthorizeException { + + List statuses = getProcessToDeleteStatuses(); + Date creationDate = calculateCreationDate(); + + handler.logInfo("Searching for processes with status: " + statuses); + List processes = processService.findByStatusAndCreationTimeOlderThan(context, statuses, creationDate); + handler.logInfo("Found " + processes.size() + " processes to be deleted"); + for (Process process : processes) { + processService.delete(context, process); + } + + handler.logInfo("Process cleanup completed"); + + } + + /** + * Returns the list of Process statuses do be deleted. + */ + private List getProcessToDeleteStatuses() { + List statuses = new ArrayList(); + if (cleanCompleted) { + statuses.add(ProcessStatus.COMPLETED); + } + if (cleanFailed) { + statuses.add(ProcessStatus.FAILED); + } + if (cleanRunning) { + statuses.add(ProcessStatus.RUNNING); + } + return statuses; + } + + private Date calculateCreationDate() { + return DateUtils.addDays(new Date(), -days); + } + + @Override + @SuppressWarnings("unchecked") + public ProcessCleanerConfiguration getScriptConfiguration() { + return new DSpace().getServiceManager() + .getServiceByName("process-cleaner", ProcessCleanerConfiguration.class); + } + +} diff --git a/dspace-services/src/main/java/org/dspace/services/caching/model/package-info.java b/dspace-api/src/main/java/org/dspace/administer/ProcessCleanerCli.java similarity index 53% rename from dspace-services/src/main/java/org/dspace/services/caching/model/package-info.java rename to dspace-api/src/main/java/org/dspace/administer/ProcessCleanerCli.java index 67e1ee1ffc..292c6c372e 100644 --- a/dspace-services/src/main/java/org/dspace/services/caching/model/package-info.java +++ b/dspace-api/src/main/java/org/dspace/administer/ProcessCleanerCli.java @@ -5,9 +5,14 @@ * * http://www.dspace.org/license/ */ +package org.dspace.administer; /** - * Implementations of the Cache type, for various purposes. + * The {@link ProcessCleaner} for CLI. + * + * @author Luca Giamminonni (luca.giamminonni at 4science.it) + * */ +public class ProcessCleanerCli extends ProcessCleaner { -package org.dspace.services.caching.model; +} diff --git a/dspace-api/src/main/java/org/dspace/administer/ProcessCleanerCliConfiguration.java b/dspace-api/src/main/java/org/dspace/administer/ProcessCleanerCliConfiguration.java new file mode 100644 index 0000000000..043990156d --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/administer/ProcessCleanerCliConfiguration.java @@ -0,0 +1,18 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.administer; + +/** + * The {@link ProcessCleanerConfiguration} for CLI. + * + * @author Luca Giamminonni (luca.giamminonni at 4science.it) + * + */ +public class ProcessCleanerCliConfiguration extends ProcessCleanerConfiguration { + +} diff --git a/dspace-api/src/main/java/org/dspace/administer/ProcessCleanerConfiguration.java b/dspace-api/src/main/java/org/dspace/administer/ProcessCleanerConfiguration.java new file mode 100644 index 0000000000..8d189038d9 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/administer/ProcessCleanerConfiguration.java @@ -0,0 +1,70 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.administer; + +import java.sql.SQLException; + +import org.apache.commons.cli.Options; +import org.dspace.authorize.service.AuthorizeService; +import org.dspace.core.Context; +import org.dspace.scripts.configuration.ScriptConfiguration; +import org.springframework.beans.factory.annotation.Autowired; + +/** + * The {@link ScriptConfiguration} for the {@link ProcessCleaner} script. + */ +public class ProcessCleanerConfiguration extends ScriptConfiguration { + + @Autowired + private AuthorizeService authorizeService; + + private Class dspaceRunnableClass; + + @Override + public boolean isAllowedToExecute(Context context) { + try { + return authorizeService.isAdmin(context); + } catch (SQLException e) { + throw new RuntimeException("SQLException occurred when checking if the current user is an admin", e); + } + } + + @Override + public Options getOptions() { + if (options == null) { + + Options options = new Options(); + + options.addOption("h", "help", false, "help"); + + options.addOption("r", "running", false, "delete the process with RUNNING status"); + options.getOption("r").setType(boolean.class); + + options.addOption("f", "failed", false, "delete the process with FAILED status"); + options.getOption("f").setType(boolean.class); + + options.addOption("c", "completed", false, + "delete the process with COMPLETED status (default if no statuses are specified)"); + options.getOption("c").setType(boolean.class); + + super.options = options; + } + return options; + } + + @Override + public Class getDspaceRunnableClass() { + return dspaceRunnableClass; + } + + @Override + public void setDspaceRunnableClass(Class dspaceRunnableClass) { + this.dspaceRunnableClass = dspaceRunnableClass; + } + +} diff --git a/dspace-api/src/main/java/org/dspace/administer/RegistryImporter.java b/dspace-api/src/main/java/org/dspace/administer/RegistryImporter.java index 5b5f70412a..27a6534213 100644 --- a/dspace-api/src/main/java/org/dspace/administer/RegistryImporter.java +++ b/dspace-api/src/main/java/org/dspace/administer/RegistryImporter.java @@ -13,8 +13,11 @@ import javax.xml.parsers.DocumentBuilder; import javax.xml.parsers.DocumentBuilderFactory; import javax.xml.parsers.ParserConfigurationException; import javax.xml.transform.TransformerException; +import javax.xml.xpath.XPath; +import javax.xml.xpath.XPathConstants; +import javax.xml.xpath.XPathExpressionException; +import javax.xml.xpath.XPathFactory; -import org.apache.xpath.XPathAPI; import org.w3c.dom.Document; import org.w3c.dom.Node; import org.w3c.dom.NodeList; @@ -72,9 +75,10 @@ public class RegistryImporter { * @throws TransformerException if error */ public static String getElementData(Node parentElement, String childName) - throws TransformerException { + throws XPathExpressionException { // Grab the child node - Node childNode = XPathAPI.selectSingleNode(parentElement, childName); + XPath xPath = XPathFactory.newInstance().newXPath(); + Node childNode = (Node) xPath.compile(childName).evaluate(parentElement, XPathConstants.NODE); if (childNode == null) { // No child node, so no values @@ -115,9 +119,10 @@ public class RegistryImporter { * @throws TransformerException if error */ public static String[] getRepeatedElementData(Node parentElement, - String childName) throws TransformerException { + String childName) throws XPathExpressionException { // Grab the child node - NodeList childNodes = XPathAPI.selectNodeList(parentElement, childName); + XPath xPath = XPathFactory.newInstance().newXPath(); + NodeList childNodes = (NodeList) xPath.compile(childName).evaluate(parentElement, XPathConstants.NODESET); String[] data = new String[childNodes.getLength()]; diff --git a/dspace-api/src/main/java/org/dspace/administer/RegistryLoader.java b/dspace-api/src/main/java/org/dspace/administer/RegistryLoader.java index 2b6a01b558..bbf320a0d5 100644 --- a/dspace-api/src/main/java/org/dspace/administer/RegistryLoader.java +++ b/dspace-api/src/main/java/org/dspace/administer/RegistryLoader.java @@ -16,9 +16,12 @@ import javax.xml.parsers.DocumentBuilder; import javax.xml.parsers.DocumentBuilderFactory; import javax.xml.parsers.ParserConfigurationException; import javax.xml.transform.TransformerException; +import javax.xml.xpath.XPath; +import javax.xml.xpath.XPathConstants; +import javax.xml.xpath.XPathExpressionException; +import javax.xml.xpath.XPathFactory; import org.apache.logging.log4j.Logger; -import org.apache.xpath.XPathAPI; import org.dspace.authorize.AuthorizeException; import org.dspace.content.BitstreamFormat; import org.dspace.content.factory.ContentServiceFactory; @@ -122,12 +125,13 @@ public class RegistryLoader { */ public static void loadBitstreamFormats(Context context, String filename) throws SQLException, IOException, ParserConfigurationException, - SAXException, TransformerException, AuthorizeException { + SAXException, TransformerException, AuthorizeException, XPathExpressionException { Document document = loadXML(filename); // Get the nodes corresponding to formats - NodeList typeNodes = XPathAPI.selectNodeList(document, - "dspace-bitstream-types/bitstream-type"); + XPath xPath = XPathFactory.newInstance().newXPath(); + NodeList typeNodes = (NodeList) xPath.compile("dspace-bitstream-types/bitstream-type") + .evaluate(document, XPathConstants.NODESET); // Add each one as a new format to the registry for (int i = 0; i < typeNodes.getLength(); i++) { @@ -151,8 +155,7 @@ public class RegistryLoader { * @throws AuthorizeException if authorization error */ private static void loadFormat(Context context, Node node) - throws SQLException, IOException, TransformerException, - AuthorizeException { + throws SQLException, AuthorizeException, XPathExpressionException { // Get the values String mimeType = getElementData(node, "mimetype"); String shortDesc = getElementData(node, "short_description"); @@ -231,9 +234,10 @@ public class RegistryLoader { * @throws TransformerException if transformer error */ private static String getElementData(Node parentElement, String childName) - throws TransformerException { + throws XPathExpressionException { // Grab the child node - Node childNode = XPathAPI.selectSingleNode(parentElement, childName); + XPath xPath = XPathFactory.newInstance().newXPath(); + Node childNode = (Node) xPath.compile(childName).evaluate(parentElement, XPathConstants.NODE); if (childNode == null) { // No child node, so no values @@ -274,9 +278,10 @@ public class RegistryLoader { * @throws TransformerException if transformer error */ private static String[] getRepeatedElementData(Node parentElement, - String childName) throws TransformerException { + String childName) throws XPathExpressionException { // Grab the child node - NodeList childNodes = XPathAPI.selectNodeList(parentElement, childName); + XPath xPath = XPathFactory.newInstance().newXPath(); + NodeList childNodes = (NodeList) xPath.compile(childName).evaluate(parentElement, XPathConstants.NODESET); String[] data = new String[childNodes.getLength()]; diff --git a/dspace-api/src/main/java/org/dspace/administer/StructBuilder.java b/dspace-api/src/main/java/org/dspace/administer/StructBuilder.java index 89d9ffe5a8..13a1b3b5bb 100644 --- a/dspace-api/src/main/java/org/dspace/administer/StructBuilder.java +++ b/dspace-api/src/main/java/org/dspace/administer/StructBuilder.java @@ -30,6 +30,10 @@ import javax.xml.parsers.DocumentBuilder; import javax.xml.parsers.DocumentBuilderFactory; import javax.xml.parsers.ParserConfigurationException; import javax.xml.transform.TransformerException; +import javax.xml.xpath.XPath; +import javax.xml.xpath.XPathConstants; +import javax.xml.xpath.XPathExpressionException; +import javax.xml.xpath.XPathFactory; import org.apache.commons.cli.CommandLine; import org.apache.commons.cli.CommandLineParser; @@ -38,7 +42,7 @@ import org.apache.commons.cli.HelpFormatter; import org.apache.commons.cli.Option; import org.apache.commons.cli.Options; import org.apache.commons.cli.ParseException; -import org.apache.xpath.XPathAPI; +import org.apache.commons.lang3.StringUtils; import org.dspace.authorize.AuthorizeException; import org.dspace.content.Collection; import org.dspace.content.Community; @@ -52,9 +56,11 @@ import org.dspace.content.service.CommunityService; import org.dspace.core.Context; import org.dspace.eperson.factory.EPersonServiceFactory; import org.dspace.eperson.service.EPersonService; -import org.jdom.Element; -import org.jdom.output.Format; -import org.jdom.output.XMLOutputter; +import org.dspace.handle.factory.HandleServiceFactory; +import org.dspace.handle.service.HandleService; +import org.jdom2.Element; +import org.jdom2.output.Format; +import org.jdom2.output.XMLOutputter; import org.w3c.dom.Document; import org.w3c.dom.Node; import org.w3c.dom.NodeList; @@ -76,6 +82,7 @@ import org.xml.sax.SAXException; * * * } + * *

* It can be arbitrarily deep, and supports all the metadata elements * that make up the community and collection metadata. See the system @@ -104,12 +111,14 @@ public class StructBuilder { */ private static final Map communityMap = new HashMap<>(); - protected static CommunityService communityService + protected static final CommunityService communityService = ContentServiceFactory.getInstance().getCommunityService(); - protected static CollectionService collectionService + protected static final CollectionService collectionService = ContentServiceFactory.getInstance().getCollectionService(); - protected static EPersonService ePersonService + protected static final EPersonService ePersonService = EPersonServiceFactory.getInstance().getEPersonService(); + protected static final HandleService handleService + = HandleServiceFactory.getInstance().getHandleService(); /** * Default constructor @@ -135,16 +144,18 @@ public class StructBuilder { * @throws SQLException passed through. * @throws FileNotFoundException if input or output could not be opened. * @throws TransformerException if the input document is invalid. + * @throws XPathExpressionException passed through. */ public static void main(String[] argv) - throws ParserConfigurationException, SQLException, - FileNotFoundException, IOException, TransformerException { + throws ParserConfigurationException, SQLException, + IOException, TransformerException, XPathExpressionException { // Define command line options. Options options = new Options(); options.addOption("h", "help", false, "Print this help message."); options.addOption("?", "help"); options.addOption("x", "export", false, "Export the current structure as XML."); + options.addOption("k", "keep-handles", false, "Apply Handles from input document."); options.addOption(Option.builder("e").longOpt("eperson") .desc("User who is manipulating the repository's structure.") @@ -206,6 +217,7 @@ public class StructBuilder { // Export? Import? if (line.hasOption('x')) { // export exportStructure(context, outputStream); + outputStream.close(); } else { // Must be import String input = line.getOptionValue('f'); if (null == input) { @@ -220,7 +232,12 @@ public class StructBuilder { inputStream = new FileInputStream(input); } - importStructure(context, inputStream, outputStream); + boolean keepHandles = options.hasOption("k"); + importStructure(context, inputStream, outputStream, keepHandles); + + inputStream.close(); + outputStream.close(); + // save changes from import context.complete(); } @@ -233,14 +250,17 @@ public class StructBuilder { * @param context * @param input XML which describes the new communities and collections. * @param output input, annotated with the new objects' identifiers. + * @param keepHandles true if Handles should be set from input. * @throws IOException * @throws ParserConfigurationException * @throws SAXException * @throws TransformerException * @throws SQLException */ - static void importStructure(Context context, InputStream input, OutputStream output) - throws IOException, ParserConfigurationException, SQLException, TransformerException { + static void importStructure(Context context, InputStream input, + OutputStream output, boolean keepHandles) + throws IOException, ParserConfigurationException, SQLException, + TransformerException, XPathExpressionException { // load the XML Document document = null; @@ -258,15 +278,29 @@ public class StructBuilder { // is properly structured. try { validate(document); - } catch (TransformerException ex) { + } catch (XPathExpressionException ex) { System.err.format("The input document is invalid: %s%n", ex.getMessage()); System.exit(1); } // Check for 'identifier' attributes -- possibly output by this class. - NodeList identifierNodes = XPathAPI.selectNodeList(document, "//*[@identifier]"); + XPath xPath = XPathFactory.newInstance().newXPath(); + NodeList identifierNodes = (NodeList) xPath.compile("//*[@identifier]") + .evaluate(document, XPathConstants.NODESET); if (identifierNodes.getLength() > 0) { - System.err.println("The input document has 'identifier' attributes, which will be ignored."); + if (!keepHandles) { + System.err.println("The input document has 'identifier' attributes, which will be ignored."); + } else { + for (int i = 0; i < identifierNodes.getLength() ; i++) { + String identifier = identifierNodes.item(i).getAttributes().item(0).getTextContent(); + if (handleService.resolveToURL(context, identifier) != null) { + System.err.printf("The input document contains handle %s," + + " which is in use already. Aborting...%n", + identifier); + System.exit(1); + } + } + } } // load the mappings into the member variable hashmaps @@ -287,10 +321,11 @@ public class StructBuilder { Element[] elements = new Element[]{}; try { // get the top level community list - NodeList first = XPathAPI.selectNodeList(document, "/import_structure/community"); + NodeList first = (NodeList) xPath.compile("/import_structure/community") + .evaluate(document, XPathConstants.NODESET); // run the import starting with the top level communities - elements = handleCommunities(context, first, null); + elements = handleCommunities(context, first, null, keepHandles); } catch (TransformerException ex) { System.err.format("Input content not understood: %s%n", ex.getMessage()); System.exit(1); @@ -307,7 +342,7 @@ public class StructBuilder { } // finally write the string into the output file. - final org.jdom.Document xmlOutput = new org.jdom.Document(root); + final org.jdom2.Document xmlOutput = new org.jdom2.Document(root); try { new XMLOutputter().output(xmlOutput, output); } catch (IOException e) { @@ -411,7 +446,7 @@ public class StructBuilder { } // Now write the structure out. - org.jdom.Document xmlOutput = new org.jdom.Document(rootElement); + org.jdom2.Document xmlOutput = new org.jdom2.Document(rootElement); try { XMLOutputter outputter = new XMLOutputter(Format.getPrettyFormat()); outputter.output(xmlOutput, output); @@ -456,14 +491,16 @@ public class StructBuilder { * @throws TransformerException if transformer error */ private static void validate(org.w3c.dom.Document document) - throws TransformerException { + throws XPathExpressionException { StringBuilder err = new StringBuilder(); boolean trip = false; err.append("The following errors were encountered parsing the source XML.\n"); err.append("No changes have been made to the DSpace instance.\n\n"); - NodeList first = XPathAPI.selectNodeList(document, "/import_structure/community"); + XPath xPath = XPathFactory.newInstance().newXPath(); + NodeList first = (NodeList) xPath.compile("/import_structure/community") + .evaluate(document, XPathConstants.NODESET); if (first.getLength() == 0) { err.append("-There are no top level communities in the source document."); System.out.println(err.toString()); @@ -493,14 +530,15 @@ public class StructBuilder { * no errors. */ private static String validateCommunities(NodeList communities, int level) - throws TransformerException { + throws XPathExpressionException { StringBuilder err = new StringBuilder(); boolean trip = false; String errs = null; + XPath xPath = XPathFactory.newInstance().newXPath(); for (int i = 0; i < communities.getLength(); i++) { Node n = communities.item(i); - NodeList name = XPathAPI.selectNodeList(n, "name"); + NodeList name = (NodeList) xPath.compile("name").evaluate(n, XPathConstants.NODESET); if (name.getLength() != 1) { String pos = Integer.toString(i + 1); err.append("-The level ").append(level) @@ -510,7 +548,7 @@ public class StructBuilder { } // validate sub communities - NodeList subCommunities = XPathAPI.selectNodeList(n, "community"); + NodeList subCommunities = (NodeList) xPath.compile("community").evaluate(n, XPathConstants.NODESET); String comErrs = validateCommunities(subCommunities, level + 1); if (comErrs != null) { err.append(comErrs); @@ -518,7 +556,7 @@ public class StructBuilder { } // validate collections - NodeList collections = XPathAPI.selectNodeList(n, "collection"); + NodeList collections = (NodeList) xPath.compile("collection").evaluate(n, XPathConstants.NODESET); String colErrs = validateCollections(collections, level + 1); if (colErrs != null) { err.append(colErrs); @@ -542,14 +580,15 @@ public class StructBuilder { * @return the errors to be generated by the calling method, or null if none */ private static String validateCollections(NodeList collections, int level) - throws TransformerException { + throws XPathExpressionException { StringBuilder err = new StringBuilder(); boolean trip = false; String errs = null; + XPath xPath = XPathFactory.newInstance().newXPath(); for (int i = 0; i < collections.getLength(); i++) { Node n = collections.item(i); - NodeList name = XPathAPI.selectNodeList(n, "name"); + NodeList name = (NodeList) xPath.compile("name").evaluate(n, XPathConstants.NODESET); if (name.getLength() != 1) { String pos = Integer.toString(i + 1); err.append("-The level ").append(level) @@ -609,22 +648,29 @@ public class StructBuilder { * @param context the context of the request * @param communities a nodelist of communities to create along with their sub-structures * @param parent the parent community of the nodelist of communities to create + * @param keepHandles use Handles from input. * @return an element array containing additional information regarding the * created communities (e.g. the handles they have been assigned) */ - private static Element[] handleCommunities(Context context, NodeList communities, Community parent) - throws TransformerException, SQLException, AuthorizeException { + private static Element[] handleCommunities(Context context, NodeList communities, + Community parent, boolean keepHandles) + throws TransformerException, SQLException, AuthorizeException, + XPathExpressionException { Element[] elements = new Element[communities.getLength()]; + XPath xPath = XPathFactory.newInstance().newXPath(); for (int i = 0; i < communities.getLength(); i++) { - Community community; - Element element = new Element("community"); + Node tn = communities.item(i); + Node identifier = tn.getAttributes().getNamedItem("identifier"); // create the community or sub community - if (parent != null) { + Community community; + if (null == identifier + || StringUtils.isBlank(identifier.getNodeValue()) + || !keepHandles) { community = communityService.create(parent, context); } else { - community = communityService.create(null, context); + community = communityService.create(parent, context, identifier.getNodeValue()); } // default the short description to be an empty string @@ -632,9 +678,8 @@ public class StructBuilder { MD_SHORT_DESCRIPTION, null, " "); // now update the metadata - Node tn = communities.item(i); for (Map.Entry entry : communityMap.entrySet()) { - NodeList nl = XPathAPI.selectNodeList(tn, entry.getKey()); + NodeList nl = (NodeList) xPath.compile(entry.getKey()).evaluate(tn, XPathConstants.NODESET); if (nl.getLength() == 1) { communityService.setMetadataSingleValue(context, community, entry.getValue(), null, getStringValue(nl.item(0))); @@ -658,6 +703,7 @@ public class StructBuilder { // but it's here to keep it separate from the create process in // case // we want to move it or make it switchable later + Element element = new Element("community"); element.setAttribute("identifier", community.getHandle()); Element nameElement = new Element("name"); @@ -700,12 +746,16 @@ public class StructBuilder { } // handle sub communities - NodeList subCommunities = XPathAPI.selectNodeList(tn, "community"); - Element[] subCommunityElements = handleCommunities(context, subCommunities, community); + NodeList subCommunities = (NodeList) xPath.compile("community") + .evaluate(tn, XPathConstants.NODESET); + Element[] subCommunityElements = handleCommunities(context, + subCommunities, community, keepHandles); // handle collections - NodeList collections = XPathAPI.selectNodeList(tn, "collection"); - Element[] collectionElements = handleCollections(context, collections, community); + NodeList collections = (NodeList) xPath.compile("collection") + .evaluate(tn, XPathConstants.NODESET); + Element[] collectionElements = handleCollections(context, + collections, community, keepHandles); int j; for (j = 0; j < subCommunityElements.length; j++) { @@ -730,22 +780,33 @@ public class StructBuilder { * @return an Element array containing additional information about the * created collections (e.g. the handle) */ - private static Element[] handleCollections(Context context, NodeList collections, Community parent) - throws TransformerException, SQLException, AuthorizeException { + private static Element[] handleCollections(Context context, + NodeList collections, Community parent, boolean keepHandles) + throws SQLException, AuthorizeException, XPathExpressionException { Element[] elements = new Element[collections.getLength()]; + XPath xPath = XPathFactory.newInstance().newXPath(); for (int i = 0; i < collections.getLength(); i++) { - Element element = new Element("collection"); - Collection collection = collectionService.create(context, parent); + Node tn = collections.item(i); + Node identifier = tn.getAttributes().getNamedItem("identifier"); + + // Create the Collection. + Collection collection; + if (null == identifier + || StringUtils.isBlank(identifier.getNodeValue()) + || !keepHandles) { + collection = collectionService.create(context, parent); + } else { + collection = collectionService.create(context, parent, identifier.getNodeValue()); + } // default the short description to the empty string collectionService.setMetadataSingleValue(context, collection, MD_SHORT_DESCRIPTION, Item.ANY, " "); // import the rest of the metadata - Node tn = collections.item(i); for (Map.Entry entry : collectionMap.entrySet()) { - NodeList nl = XPathAPI.selectNodeList(tn, entry.getKey()); + NodeList nl = (NodeList) xPath.compile(entry.getKey()).evaluate(tn, XPathConstants.NODESET); if (nl.getLength() == 1) { collectionService.setMetadataSingleValue(context, collection, entry.getValue(), null, getStringValue(nl.item(0))); @@ -754,6 +815,7 @@ public class StructBuilder { collectionService.update(context, collection); + Element element = new Element("collection"); element.setAttribute("identifier", collection.getHandle()); Element nameElement = new Element("name"); diff --git a/dspace-api/src/main/java/org/dspace/app/bulkedit/MetadataDeletionScriptConfiguration.java b/dspace-api/src/main/java/org/dspace/app/bulkedit/MetadataDeletionScriptConfiguration.java index b8d41318db..9ccd53944a 100644 --- a/dspace-api/src/main/java/org/dspace/app/bulkedit/MetadataDeletionScriptConfiguration.java +++ b/dspace-api/src/main/java/org/dspace/app/bulkedit/MetadataDeletionScriptConfiguration.java @@ -41,10 +41,8 @@ public class MetadataDeletionScriptConfiguration ext Options options = new Options(); options.addOption("m", "metadata", true, "metadata field name"); - options.getOption("m").setType(String.class); options.addOption("l", "list", false, "lists the metadata fields that can be deleted"); - options.getOption("l").setType(boolean.class); super.options = options; } diff --git a/dspace-api/src/main/java/org/dspace/app/bulkedit/MetadataExportScriptConfiguration.java b/dspace-api/src/main/java/org/dspace/app/bulkedit/MetadataExportScriptConfiguration.java index 0c513c4667..31556afc8d 100644 --- a/dspace-api/src/main/java/org/dspace/app/bulkedit/MetadataExportScriptConfiguration.java +++ b/dspace-api/src/main/java/org/dspace/app/bulkedit/MetadataExportScriptConfiguration.java @@ -54,12 +54,9 @@ public class MetadataExportScriptConfiguration extends Options options = new Options(); options.addOption("i", "id", true, "ID or handle of thing to export (item, collection, or community)"); - options.getOption("i").setType(String.class); options.addOption("a", "all", false, "include all metadata fields that are not normally changed (e.g. provenance)"); - options.getOption("a").setType(boolean.class); options.addOption("h", "help", false, "help"); - options.getOption("h").setType(boolean.class); super.options = options; diff --git a/dspace-api/src/main/java/org/dspace/app/bulkedit/MetadataExportSearch.java b/dspace-api/src/main/java/org/dspace/app/bulkedit/MetadataExportSearch.java new file mode 100644 index 0000000000..027ad116a7 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/app/bulkedit/MetadataExportSearch.java @@ -0,0 +1,170 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ + +package org.dspace.app.bulkedit; + +import java.sql.SQLException; +import java.util.ArrayList; +import java.util.Iterator; +import java.util.List; +import java.util.UUID; + +import org.apache.commons.cli.ParseException; +import org.dspace.content.Item; +import org.dspace.content.MetadataDSpaceCsvExportServiceImpl; +import org.dspace.content.factory.ContentServiceFactory; +import org.dspace.content.service.CollectionService; +import org.dspace.content.service.CommunityService; +import org.dspace.content.service.MetadataDSpaceCsvExportService; +import org.dspace.core.Context; +import org.dspace.discovery.DiscoverQuery; +import org.dspace.discovery.IndexableObject; +import org.dspace.discovery.SearchService; +import org.dspace.discovery.SearchUtils; +import org.dspace.discovery.configuration.DiscoveryConfiguration; +import org.dspace.discovery.configuration.DiscoveryConfigurationService; +import org.dspace.discovery.indexobject.IndexableCollection; +import org.dspace.discovery.indexobject.IndexableCommunity; +import org.dspace.discovery.utils.DiscoverQueryBuilder; +import org.dspace.discovery.utils.parameter.QueryBuilderSearchFilter; +import org.dspace.eperson.factory.EPersonServiceFactory; +import org.dspace.eperson.service.EPersonService; +import org.dspace.scripts.DSpaceRunnable; +import org.dspace.sort.SortOption; +import org.dspace.utils.DSpace; + +/** + * Metadata exporter to allow the batch export of metadata from a discovery search into a file + * + */ +public class MetadataExportSearch extends DSpaceRunnable { + private static final String EXPORT_CSV = "exportCSV"; + private boolean help = false; + private String identifier; + private String discoveryConfigName; + private String[] filterQueryStrings; + private boolean hasScope = false; + private String query; + + private SearchService searchService; + private MetadataDSpaceCsvExportService metadataDSpaceCsvExportService; + private EPersonService ePersonService; + private DiscoveryConfigurationService discoveryConfigurationService; + private CommunityService communityService; + private CollectionService collectionService; + private DiscoverQueryBuilder queryBuilder; + + @Override + public MetadataExportSearchScriptConfiguration getScriptConfiguration() { + return new DSpace().getServiceManager() + .getServiceByName("metadata-export-search", MetadataExportSearchScriptConfiguration.class); + } + + @Override + public void setup() throws ParseException { + searchService = SearchUtils.getSearchService(); + metadataDSpaceCsvExportService = new DSpace().getServiceManager() + .getServiceByName( + MetadataDSpaceCsvExportServiceImpl.class.getCanonicalName(), + MetadataDSpaceCsvExportService.class + ); + ePersonService = EPersonServiceFactory.getInstance().getEPersonService(); + discoveryConfigurationService = SearchUtils.getConfigurationService(); + communityService = ContentServiceFactory.getInstance().getCommunityService(); + collectionService = ContentServiceFactory.getInstance().getCollectionService(); + queryBuilder = SearchUtils.getQueryBuilder(); + + if (commandLine.hasOption('h')) { + help = true; + return; + } + + if (commandLine.hasOption('q')) { + query = commandLine.getOptionValue('q'); + } + + if (commandLine.hasOption('s')) { + hasScope = true; + identifier = commandLine.getOptionValue('s'); + } + + if (commandLine.hasOption('c')) { + discoveryConfigName = commandLine.getOptionValue('c'); + } + + if (commandLine.hasOption('f')) { + filterQueryStrings = commandLine.getOptionValues('f'); + } + } + + @Override + public void internalRun() throws Exception { + if (help) { + loghelpinfo(); + printHelp(); + return; + } + handler.logDebug("starting search export"); + + IndexableObject dso = null; + Context context = new Context(); + context.setCurrentUser(ePersonService.find(context, this.getEpersonIdentifier())); + + if (hasScope) { + dso = resolveScope(context, identifier); + } + + DiscoveryConfiguration discoveryConfiguration = + discoveryConfigurationService.getDiscoveryConfiguration(discoveryConfigName); + + List queryBuilderSearchFilters = new ArrayList<>(); + + handler.logDebug("processing filter queries"); + if (filterQueryStrings != null) { + for (String filterQueryString: filterQueryStrings) { + String field = filterQueryString.split(",", 2)[0]; + String operator = filterQueryString.split("(,|=)", 3)[1]; + String value = filterQueryString.split("=", 2)[1]; + QueryBuilderSearchFilter queryBuilderSearchFilter = + new QueryBuilderSearchFilter(field, operator, value); + queryBuilderSearchFilters.add(queryBuilderSearchFilter); + } + } + handler.logDebug("building query"); + DiscoverQuery discoverQuery = + queryBuilder.buildQuery(context, dso, discoveryConfiguration, query, queryBuilderSearchFilters, + "Item", 10, Long.getLong("0"), null, SortOption.DESCENDING); + handler.logDebug("creating iterator"); + + Iterator itemIterator = searchService.iteratorSearch(context, dso, discoverQuery); + handler.logDebug("creating dspacecsv"); + DSpaceCSV dSpaceCSV = metadataDSpaceCsvExportService.export(context, itemIterator, true); + handler.logDebug("writing to file " + getFileNameOrExportFile()); + handler.writeFilestream(context, getFileNameOrExportFile(), dSpaceCSV.getInputStream(), EXPORT_CSV); + context.restoreAuthSystemState(); + context.complete(); + + } + + protected void loghelpinfo() { + handler.logInfo("metadata-export"); + } + + protected String getFileNameOrExportFile() { + return "metadataExportSearch.csv"; + } + + public IndexableObject resolveScope(Context context, String id) throws SQLException { + UUID uuid = UUID.fromString(id); + IndexableObject scopeObj = new IndexableCommunity(communityService.find(context, uuid)); + if (scopeObj.getIndexedObject() == null) { + scopeObj = new IndexableCollection(collectionService.find(context, uuid)); + } + return scopeObj; + } +} diff --git a/dspace-api/src/main/java/org/dspace/app/bulkedit/MetadataExportSearchCli.java b/dspace-api/src/main/java/org/dspace/app/bulkedit/MetadataExportSearchCli.java new file mode 100644 index 0000000000..51ca77cbfb --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/app/bulkedit/MetadataExportSearchCli.java @@ -0,0 +1,20 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ + +package org.dspace.app.bulkedit; + +/** + * The cli version of the {@link MetadataExportSearch} script + */ +public class MetadataExportSearchCli extends MetadataExportSearch { + + @Override + protected String getFileNameOrExportFile() { + return commandLine.getOptionValue('n'); + } +} diff --git a/dspace-api/src/main/java/org/dspace/app/bulkedit/MetadataExportSearchCliScriptConfiguration.java b/dspace-api/src/main/java/org/dspace/app/bulkedit/MetadataExportSearchCliScriptConfiguration.java new file mode 100644 index 0000000000..c0343f545a --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/app/bulkedit/MetadataExportSearchCliScriptConfiguration.java @@ -0,0 +1,26 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ + +package org.dspace.app.bulkedit; + +import org.apache.commons.cli.Options; + +/** + * This is the CLI version of the {@link MetadataExportSearchScriptConfiguration} class that handles the + * configuration for the {@link MetadataExportSearchCli} script + */ +public class MetadataExportSearchCliScriptConfiguration + extends MetadataExportSearchScriptConfiguration { + + @Override + public Options getOptions() { + Options options = super.getOptions(); + options.addOption("n", "filename", true, "the filename to export to"); + return super.getOptions(); + } +} diff --git a/dspace-api/src/main/java/org/dspace/app/bulkedit/MetadataExportSearchScriptConfiguration.java b/dspace-api/src/main/java/org/dspace/app/bulkedit/MetadataExportSearchScriptConfiguration.java new file mode 100644 index 0000000000..4e350562bc --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/app/bulkedit/MetadataExportSearchScriptConfiguration.java @@ -0,0 +1,62 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ + +package org.dspace.app.bulkedit; + +import org.apache.commons.cli.Options; +import org.dspace.core.Context; +import org.dspace.scripts.configuration.ScriptConfiguration; + +/** + * The {@link ScriptConfiguration} for the {@link MetadataExportSearch} script + */ +public class MetadataExportSearchScriptConfiguration extends ScriptConfiguration { + + private Class dspaceRunnableclass; + + @Override + public Class getDspaceRunnableClass() { + return dspaceRunnableclass; + } + + @Override + public void setDspaceRunnableClass(Class dspaceRunnableClass) { + this.dspaceRunnableclass = dspaceRunnableClass; + } + + @Override + public boolean isAllowedToExecute(Context context) { + return true; + } + + @Override + public Options getOptions() { + if (options == null) { + Options options = new Options(); + options.addOption("q", "query", true, + "The discovery search string to will be used to match records. Not URL encoded"); + options.getOption("q").setType(String.class); + options.addOption("s", "scope", true, + "UUID of a specific DSpace container (site, community or collection) to which the search has to be " + + "limited"); + options.getOption("s").setType(String.class); + options.addOption("c", "configuration", true, + "The name of a Discovery configuration that should be used by this search"); + options.getOption("c").setType(String.class); + options.addOption("f", "filter", true, + "Advanced search filter that has to be used to filter the result set, with syntax `<:filter-name>," + + "<:filter-operator>=<:filter-value>`. Not URL encoded. For example `author," + + "authority=5df05073-3be7-410d-8166-e254369e4166` or `title,contains=sample text`"); + options.getOption("f").setType(String.class); + options.addOption("h", "help", false, "help"); + + super.options = options; + } + return options; + } +} diff --git a/dspace-api/src/main/java/org/dspace/app/bulkedit/MetadataImport.java b/dspace-api/src/main/java/org/dspace/app/bulkedit/MetadataImport.java index 469245908a..4161bbb4d8 100644 --- a/dspace-api/src/main/java/org/dspace/app/bulkedit/MetadataImport.java +++ b/dspace-api/src/main/java/org/dspace/app/bulkedit/MetadataImport.java @@ -598,18 +598,19 @@ public class MetadataImport extends DSpaceRunnable extends options.getOption("f").setRequired(true); options.addOption("s", "silent", false, "silent operation - doesn't request confirmation of changes USE WITH CAUTION"); - options.getOption("s").setType(boolean.class); options.addOption("w", "workflow", false, "workflow - when adding new items, use collection workflow"); - options.getOption("w").setType(boolean.class); options.addOption("n", "notify", false, "notify - when adding new items using a workflow, send notification emails"); - options.getOption("n").setType(boolean.class); options.addOption("v", "validate-only", false, "validate - just validate the csv, don't run the import"); - options.getOption("v").setType(boolean.class); options.addOption("t", "template", false, "template - when adding new items, use the collection template (if it exists)"); - options.getOption("t").setType(boolean.class); options.addOption("h", "help", false, "help"); - options.getOption("h").setType(boolean.class); super.options = options; } diff --git a/dspace-api/src/main/java/org/dspace/app/exception/ResourceAlreadyExistsException.java b/dspace-api/src/main/java/org/dspace/app/exception/ResourceAlreadyExistsException.java new file mode 100644 index 0000000000..8291af87fc --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/app/exception/ResourceAlreadyExistsException.java @@ -0,0 +1,32 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.exception; + +/** + * This class provides an exception to be used when trying to save a resource + * that already exists. + * + * @author Luca Giamminonni (luca.giamminonni at 4science.it) + * + */ +public class ResourceAlreadyExistsException extends RuntimeException { + + private static final long serialVersionUID = 1L; + + /** + * Create a ResourceAlreadyExistsException with a message and the already + * existing resource. + * + * @param message the error message + */ + public ResourceAlreadyExistsException(String message) { + super(message); + } + + +} diff --git a/dspace-api/src/main/java/org/dspace/app/harvest/HarvestScriptConfiguration.java b/dspace-api/src/main/java/org/dspace/app/harvest/HarvestScriptConfiguration.java index 6290562143..982973e47c 100644 --- a/dspace-api/src/main/java/org/dspace/app/harvest/HarvestScriptConfiguration.java +++ b/dspace-api/src/main/java/org/dspace/app/harvest/HarvestScriptConfiguration.java @@ -43,22 +43,14 @@ public class HarvestScriptConfiguration extends ScriptConfigu public Options getOptions() { Options options = new Options(); options.addOption("p", "purge", false, "delete all items in the collection"); - options.getOption("p").setType(boolean.class); options.addOption("r", "run", false, "run the standard harvest procedure"); - options.getOption("r").setType(boolean.class); options.addOption("g", "ping", false, "test the OAI server and set"); - options.getOption("g").setType(boolean.class); options.addOption("s", "setup", false, "Set the collection up for harvesting"); - options.getOption("s").setType(boolean.class); options.addOption("S", "start", false, "start the harvest loop"); - options.getOption("S").setType(boolean.class); options.addOption("R", "reset", false, "reset harvest status on all collections"); - options.getOption("R").setType(boolean.class); options.addOption("P", "purgeCollections", false, "purge all harvestable collections"); - options.getOption("P").setType(boolean.class); options.addOption("o", "reimport", false, "reimport all items in the collection, " + "this is equivalent to -p -r, purging all items in a collection and reimporting them"); - options.getOption("o").setType(boolean.class); options.addOption("c", "collection", true, "harvesting collection (handle or id)"); options.addOption("t", "type", true, @@ -72,7 +64,6 @@ public class HarvestScriptConfiguration extends ScriptConfigu "crosswalk in dspace.cfg"); options.addOption("h", "help", false, "help"); - options.getOption("h").setType(boolean.class); return options; } diff --git a/dspace-api/src/main/java/org/dspace/app/itemexport/ItemExport.java b/dspace-api/src/main/java/org/dspace/app/itemexport/ItemExport.java new file mode 100644 index 0000000000..71fc088694 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/app/itemexport/ItemExport.java @@ -0,0 +1,264 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.itemexport; + +import java.io.File; +import java.io.FileInputStream; +import java.io.InputStream; +import java.nio.file.Path; +import java.sql.SQLException; +import java.util.ArrayList; +import java.util.Iterator; +import java.util.List; +import java.util.UUID; + +import org.apache.commons.cli.ParseException; +import org.apache.commons.io.file.PathUtils; +import org.dspace.app.itemexport.factory.ItemExportServiceFactory; +import org.dspace.app.itemexport.service.ItemExportService; +import org.dspace.content.Collection; +import org.dspace.content.Item; +import org.dspace.content.factory.ContentServiceFactory; +import org.dspace.content.service.CollectionService; +import org.dspace.content.service.ItemService; +import org.dspace.core.Constants; +import org.dspace.core.Context; +import org.dspace.eperson.EPerson; +import org.dspace.eperson.factory.EPersonServiceFactory; +import org.dspace.eperson.service.EPersonService; +import org.dspace.handle.factory.HandleServiceFactory; +import org.dspace.handle.service.HandleService; +import org.dspace.scripts.DSpaceRunnable; +import org.dspace.utils.DSpace; + +/** + * Item exporter to create simple AIPs for DSpace content. Currently exports + * individual items, or entire collections. For instructions on use, see + * printUsage() method. + *

+ * ItemExport creates the simple AIP package that the importer also uses. It + * consists of: + *

+ * /exportdir/42/ (one directory per item) / dublin_core.xml - qualified dublin + * core in RDF schema / contents - text file, listing one file per line / file1 + * - files contained in the item / file2 / ... + *

+ * issues -doesn't handle special characters in metadata (needs to turn {@code &'s} into + * {@code &}, etc.) + *

+ * Modified by David Little, UCSD Libraries 12/21/04 to allow the registration + * of files (bitstreams) into DSpace. + * + * @author David Little + * @author Jay Paz + */ +public class ItemExport extends DSpaceRunnable { + + public static final String TEMP_DIR = "exportSAF"; + public static final String ZIP_NAME = "exportSAFZip"; + public static final String ZIP_FILENAME = "saf-export"; + public static final String ZIP_EXT = "zip"; + + protected String typeString = null; + protected String destDirName = null; + protected String idString = null; + protected int seqStart = -1; + protected int type = -1; + protected Item item = null; + protected Collection collection = null; + protected boolean migrate = false; + protected boolean zip = false; + protected String zipFileName = ""; + protected boolean excludeBitstreams = false; + protected boolean help = false; + + protected static HandleService handleService = HandleServiceFactory.getInstance().getHandleService(); + protected static ItemService itemService = ContentServiceFactory.getInstance().getItemService(); + protected static CollectionService collectionService = ContentServiceFactory.getInstance().getCollectionService(); + protected static final EPersonService epersonService = + EPersonServiceFactory.getInstance().getEPersonService(); + + @Override + public ItemExportScriptConfiguration getScriptConfiguration() { + return new DSpace().getServiceManager() + .getServiceByName("export", ItemExportScriptConfiguration.class); + } + + @Override + public void setup() throws ParseException { + help = commandLine.hasOption('h'); + + if (commandLine.hasOption('t')) { // type + typeString = commandLine.getOptionValue('t'); + + if ("ITEM".equals(typeString)) { + type = Constants.ITEM; + } else if ("COLLECTION".equals(typeString)) { + type = Constants.COLLECTION; + } + } + + if (commandLine.hasOption('i')) { // id + idString = commandLine.getOptionValue('i'); + } + + setNumber(); + + if (commandLine.hasOption('m')) { // number + migrate = true; + } + + if (commandLine.hasOption('x')) { + excludeBitstreams = true; + } + } + + @Override + public void internalRun() throws Exception { + if (help) { + printHelp(); + return; + } + + validate(); + + Context context = new Context(); + context.turnOffAuthorisationSystem(); + + if (type == Constants.ITEM) { + // first, is myIDString a handle? + if (idString.indexOf('/') != -1) { + item = (Item) handleService.resolveToObject(context, idString); + + if ((item == null) || (item.getType() != Constants.ITEM)) { + item = null; + } + } else { + item = itemService.find(context, UUID.fromString(idString)); + } + + if (item == null) { + handler.logError("The item cannot be found: " + idString + " (run with -h flag for details)"); + throw new UnsupportedOperationException("The item cannot be found: " + idString); + } + } else { + if (idString.indexOf('/') != -1) { + // has a / must be a handle + collection = (Collection) handleService.resolveToObject(context, + idString); + + // ensure it's a collection + if ((collection == null) + || (collection.getType() != Constants.COLLECTION)) { + collection = null; + } + } else { + collection = collectionService.find(context, UUID.fromString(idString)); + } + + if (collection == null) { + handler.logError("The collection cannot be found: " + idString + " (run with -h flag for details)"); + throw new UnsupportedOperationException("The collection cannot be found: " + idString); + } + } + + ItemExportService itemExportService = ItemExportServiceFactory.getInstance() + .getItemExportService(); + try { + itemExportService.setHandler(handler); + process(context, itemExportService); + context.complete(); + } catch (Exception e) { + context.abort(); + throw new Exception(e); + } + } + + /** + * Validate the options + */ + protected void validate() { + if (type == -1) { + handler.logError("The type must be either COLLECTION or ITEM (run with -h flag for details)"); + throw new UnsupportedOperationException("The type must be either COLLECTION or ITEM"); + } + + if (idString == null) { + handler.logError("The ID must be set to either a database ID or a handle (run with -h flag for details)"); + throw new UnsupportedOperationException("The ID must be set to either a database ID or a handle"); + } + } + + /** + * Process the export + * @param context + * @throws Exception + */ + protected void process(Context context, ItemExportService itemExportService) throws Exception { + setEPerson(context); + setDestDirName(context, itemExportService); + setZip(context); + + Iterator items; + if (item != null) { + List myItems = new ArrayList<>(); + myItems.add(item); + items = myItems.iterator(); + } else { + handler.logInfo("Exporting from collection: " + idString); + items = itemService.findByCollection(context, collection); + } + itemExportService.exportAsZip(context, items, destDirName, zipFileName, + seqStart, migrate, excludeBitstreams); + + File zip = new File(destDirName + System.getProperty("file.separator") + zipFileName); + try (InputStream is = new FileInputStream(zip)) { + // write input stream on handler + handler.writeFilestream(context, ZIP_FILENAME + "." + ZIP_EXT, is, ZIP_NAME); + } finally { + PathUtils.deleteDirectory(Path.of(destDirName)); + } + } + + /** + * Set the destination directory option + */ + protected void setDestDirName(Context context, ItemExportService itemExportService) throws Exception { + destDirName = itemExportService.getExportWorkDirectory() + File.separator + TEMP_DIR; + } + + /** + * Set the zip option + */ + protected void setZip(Context context) { + zip = true; + zipFileName = ZIP_FILENAME + "-" + context.getCurrentUser().getID() + "." + ZIP_EXT; + } + + /** + * Set the number option + */ + protected void setNumber() { + seqStart = 1; + if (commandLine.hasOption('n')) { // number + seqStart = Integer.parseInt(commandLine.getOptionValue('n')); + } + } + + private void setEPerson(Context context) throws SQLException { + EPerson myEPerson = epersonService.find(context, this.getEpersonIdentifier()); + + // check eperson + if (myEPerson == null) { + handler.logError("EPerson cannot be found: " + this.getEpersonIdentifier()); + throw new UnsupportedOperationException("EPerson cannot be found: " + this.getEpersonIdentifier()); + } + + context.setCurrentUser(myEPerson); + } +} diff --git a/dspace-api/src/main/java/org/dspace/app/itemexport/ItemExportCLI.java b/dspace-api/src/main/java/org/dspace/app/itemexport/ItemExportCLI.java new file mode 100644 index 0000000000..8e9af1e010 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/app/itemexport/ItemExportCLI.java @@ -0,0 +1,96 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.itemexport; + +import java.util.ArrayList; +import java.util.Collections; +import java.util.Iterator; +import java.util.List; + +import org.dspace.app.itemexport.service.ItemExportService; +import org.dspace.content.Item; +import org.dspace.core.Context; + +/** + * CLI variant for the {@link ItemExport} class. + * This was done to specify the specific behaviors for the CLI. + * + * @author Francesco Pio Scognamiglio (francescopio.scognamiglio at 4science.com) + */ +public class ItemExportCLI extends ItemExport { + + @Override + protected void validate() { + super.validate(); + + setDestDirName(); + + if (destDirName == null) { + handler.logError("The destination directory must be set (run with -h flag for details)"); + throw new UnsupportedOperationException("The destination directory must be set"); + } + + if (seqStart == -1) { + handler.logError("The sequence start number must be set (run with -h flag for details)"); + throw new UnsupportedOperationException("The sequence start number must be set"); + } + } + + @Override + protected void process(Context context, ItemExportService itemExportService) throws Exception { + setZip(context); + + if (zip) { + Iterator items; + if (item != null) { + List myItems = new ArrayList<>(); + myItems.add(item); + items = myItems.iterator(); + } else { + handler.logInfo("Exporting from collection: " + idString); + items = itemService.findByCollection(context, collection); + } + itemExportService.exportAsZip(context, items, destDirName, zipFileName, + seqStart, migrate, excludeBitstreams); + } else { + if (item != null) { + // it's only a single item + itemExportService + .exportItem(context, Collections.singletonList(item).iterator(), destDirName, + seqStart, migrate, excludeBitstreams); + } else { + handler.logInfo("Exporting from collection: " + idString); + + // it's a collection, so do a bunch of items + Iterator i = itemService.findByCollection(context, collection); + itemExportService.exportItem(context, i, destDirName, seqStart, migrate, excludeBitstreams); + } + } + } + + protected void setDestDirName() { + if (commandLine.hasOption('d')) { // dest + destDirName = commandLine.getOptionValue('d'); + } + } + + @Override + protected void setZip(Context context) { + if (commandLine.hasOption('z')) { + zip = true; + zipFileName = commandLine.getOptionValue('z'); + } + } + + @Override + protected void setNumber() { + if (commandLine.hasOption('n')) { // number + seqStart = Integer.parseInt(commandLine.getOptionValue('n')); + } + } +} diff --git a/dspace-api/src/main/java/org/dspace/app/itemexport/ItemExportCLIScriptConfiguration.java b/dspace-api/src/main/java/org/dspace/app/itemexport/ItemExportCLIScriptConfiguration.java new file mode 100644 index 0000000000..ff79c7cfa7 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/app/itemexport/ItemExportCLIScriptConfiguration.java @@ -0,0 +1,56 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.itemexport; + +import org.apache.commons.cli.Option; +import org.apache.commons.cli.Options; +import org.dspace.scripts.configuration.ScriptConfiguration; + +/** + * The {@link ScriptConfiguration} for the {@link ItemExportCLI} script + * + * @author Francesco Pio Scognamiglio (francescopio.scognamiglio at 4science.com) + */ +public class ItemExportCLIScriptConfiguration extends ItemExportScriptConfiguration { + + @Override + public Options getOptions() { + Options options = new Options(); + + options.addOption(Option.builder("t").longOpt("type") + .desc("type: COLLECTION or ITEM") + .hasArg().required().build()); + options.addOption(Option.builder("i").longOpt("id") + .desc("ID or handle of thing to export") + .hasArg().required().build()); + options.addOption(Option.builder("d").longOpt("dest") + .desc("destination where you want items to go") + .hasArg().required().build()); + options.addOption(Option.builder("n").longOpt("number") + .desc("sequence number to begin exporting items with") + .hasArg().required().build()); + options.addOption(Option.builder("z").longOpt("zip") + .desc("export as zip file (specify filename e.g. export.zip)") + .hasArg().required(false).build()); + options.addOption(Option.builder("m").longOpt("migrate") + .desc("export for migration (remove handle and metadata that will be re-created in new system)") + .hasArg(false).required(false).build()); + + // as pointed out by Peter Dietz this provides similar functionality to export metadata + // but it is needed since it directly exports to Simple Archive Format (SAF) + options.addOption(Option.builder("x").longOpt("exclude-bitstreams") + .desc("do not export bitstreams") + .hasArg(false).required(false).build()); + + options.addOption(Option.builder("h").longOpt("help") + .desc("help") + .hasArg(false).required(false).build()); + + return options; + } +} diff --git a/dspace-api/src/main/java/org/dspace/app/itemexport/ItemExportCLITool.java b/dspace-api/src/main/java/org/dspace/app/itemexport/ItemExportCLITool.java deleted file mode 100644 index d6a69b5823..0000000000 --- a/dspace-api/src/main/java/org/dspace/app/itemexport/ItemExportCLITool.java +++ /dev/null @@ -1,246 +0,0 @@ -/** - * The contents of this file are subject to the license and copyright - * detailed in the LICENSE and NOTICE files at the root of the source - * tree and available online at - * - * http://www.dspace.org/license/ - */ -package org.dspace.app.itemexport; - -import java.util.ArrayList; -import java.util.Collections; -import java.util.Iterator; -import java.util.List; -import java.util.UUID; - -import org.apache.commons.cli.CommandLine; -import org.apache.commons.cli.CommandLineParser; -import org.apache.commons.cli.DefaultParser; -import org.apache.commons.cli.HelpFormatter; -import org.apache.commons.cli.Options; -import org.dspace.app.itemexport.factory.ItemExportServiceFactory; -import org.dspace.app.itemexport.service.ItemExportService; -import org.dspace.content.Collection; -import org.dspace.content.Item; -import org.dspace.content.factory.ContentServiceFactory; -import org.dspace.content.service.CollectionService; -import org.dspace.content.service.ItemService; -import org.dspace.core.Constants; -import org.dspace.core.Context; -import org.dspace.handle.factory.HandleServiceFactory; -import org.dspace.handle.service.HandleService; - -/** - * Item exporter to create simple AIPs for DSpace content. Currently exports - * individual items, or entire collections. For instructions on use, see - * printUsage() method. - *

- * ItemExport creates the simple AIP package that the importer also uses. It - * consists of: - *

- * /exportdir/42/ (one directory per item) / dublin_core.xml - qualified dublin - * core in RDF schema / contents - text file, listing one file per line / file1 - * - files contained in the item / file2 / ... - *

- * issues -doesn't handle special characters in metadata (needs to turn {@code &'s} into - * {@code &}, etc.) - *

- * Modified by David Little, UCSD Libraries 12/21/04 to allow the registration - * of files (bitstreams) into DSpace. - * - * @author David Little - * @author Jay Paz - */ -public class ItemExportCLITool { - - protected static ItemExportService itemExportService = ItemExportServiceFactory.getInstance() - .getItemExportService(); - protected static HandleService handleService = HandleServiceFactory.getInstance().getHandleService(); - protected static ItemService itemService = ContentServiceFactory.getInstance().getItemService(); - protected static CollectionService collectionService = ContentServiceFactory.getInstance().getCollectionService(); - - /** - * Default constructor - */ - private ItemExportCLITool() { } - - /* - * - */ - public static void main(String[] argv) throws Exception { - // create an options object and populate it - CommandLineParser parser = new DefaultParser(); - - Options options = new Options(); - - options.addOption("t", "type", true, "type: COLLECTION or ITEM"); - options.addOption("i", "id", true, "ID or handle of thing to export"); - options.addOption("d", "dest", true, - "destination where you want items to go"); - options.addOption("m", "migrate", false, - "export for migration (remove handle and metadata that will be re-created in new system)"); - options.addOption("n", "number", true, - "sequence number to begin exporting items with"); - options.addOption("z", "zip", true, "export as zip file (specify filename e.g. export.zip)"); - options.addOption("h", "help", false, "help"); - - // as pointed out by Peter Dietz this provides similar functionality to export metadata - // but it is needed since it directly exports to Simple Archive Format (SAF) - options.addOption("x", "exclude-bitstreams", false, "do not export bitstreams"); - - CommandLine line = parser.parse(options, argv); - - String typeString = null; - String destDirName = null; - String myIDString = null; - int seqStart = -1; - int myType = -1; - - Item myItem = null; - Collection mycollection = null; - - if (line.hasOption('h')) { - HelpFormatter myhelp = new HelpFormatter(); - myhelp.printHelp("ItemExport\n", options); - System.out - .println("\nfull collection: ItemExport -t COLLECTION -i ID -d dest -n number"); - System.out - .println("singleitem: ItemExport -t ITEM -i ID -d dest -n number"); - - System.exit(0); - } - - if (line.hasOption('t')) { // type - typeString = line.getOptionValue('t'); - - if ("ITEM".equals(typeString)) { - myType = Constants.ITEM; - } else if ("COLLECTION".equals(typeString)) { - myType = Constants.COLLECTION; - } - } - - if (line.hasOption('i')) { // id - myIDString = line.getOptionValue('i'); - } - - if (line.hasOption('d')) { // dest - destDirName = line.getOptionValue('d'); - } - - if (line.hasOption('n')) { // number - seqStart = Integer.parseInt(line.getOptionValue('n')); - } - - boolean migrate = false; - if (line.hasOption('m')) { // number - migrate = true; - } - - boolean zip = false; - String zipFileName = ""; - if (line.hasOption('z')) { - zip = true; - zipFileName = line.getOptionValue('z'); - } - - boolean excludeBitstreams = false; - if (line.hasOption('x')) { - excludeBitstreams = true; - } - - // now validate the args - if (myType == -1) { - System.out - .println("type must be either COLLECTION or ITEM (-h for help)"); - System.exit(1); - } - - if (destDirName == null) { - System.out - .println("destination directory must be set (-h for help)"); - System.exit(1); - } - - if (seqStart == -1) { - System.out - .println("sequence start number must be set (-h for help)"); - System.exit(1); - } - - if (myIDString == null) { - System.out - .println("ID must be set to either a database ID or a handle (-h for help)"); - System.exit(1); - } - - Context c = new Context(Context.Mode.READ_ONLY); - c.turnOffAuthorisationSystem(); - - if (myType == Constants.ITEM) { - // first, is myIDString a handle? - if (myIDString.indexOf('/') != -1) { - myItem = (Item) handleService.resolveToObject(c, myIDString); - - if ((myItem == null) || (myItem.getType() != Constants.ITEM)) { - myItem = null; - } - } else { - myItem = itemService.find(c, UUID.fromString(myIDString)); - } - - if (myItem == null) { - System.out - .println("Error, item cannot be found: " + myIDString); - } - } else { - if (myIDString.indexOf('/') != -1) { - // has a / must be a handle - mycollection = (Collection) handleService.resolveToObject(c, - myIDString); - - // ensure it's a collection - if ((mycollection == null) - || (mycollection.getType() != Constants.COLLECTION)) { - mycollection = null; - } - } else if (myIDString != null) { - mycollection = collectionService.find(c, UUID.fromString(myIDString)); - } - - if (mycollection == null) { - System.out.println("Error, collection cannot be found: " - + myIDString); - System.exit(1); - } - } - - if (zip) { - Iterator items; - if (myItem != null) { - List myItems = new ArrayList<>(); - myItems.add(myItem); - items = myItems.iterator(); - } else { - System.out.println("Exporting from collection: " + myIDString); - items = itemService.findByCollection(c, mycollection); - } - itemExportService.exportAsZip(c, items, destDirName, zipFileName, seqStart, migrate, excludeBitstreams); - } else { - if (myItem != null) { - // it's only a single item - itemExportService - .exportItem(c, Collections.singletonList(myItem).iterator(), destDirName, seqStart, migrate, - excludeBitstreams); - } else { - System.out.println("Exporting from collection: " + myIDString); - - // it's a collection, so do a bunch of items - Iterator i = itemService.findByCollection(c, mycollection); - itemExportService.exportItem(c, i, destDirName, seqStart, migrate, excludeBitstreams); - } - } - - c.complete(); - } -} diff --git a/dspace-api/src/main/java/org/dspace/app/itemexport/ItemExportScriptConfiguration.java b/dspace-api/src/main/java/org/dspace/app/itemexport/ItemExportScriptConfiguration.java new file mode 100644 index 0000000000..cf70120d27 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/app/itemexport/ItemExportScriptConfiguration.java @@ -0,0 +1,79 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.itemexport; + +import java.sql.SQLException; + +import org.apache.commons.cli.Option; +import org.apache.commons.cli.Options; +import org.dspace.authorize.service.AuthorizeService; +import org.dspace.core.Context; +import org.dspace.scripts.configuration.ScriptConfiguration; +import org.springframework.beans.factory.annotation.Autowired; + +/** + * The {@link ScriptConfiguration} for the {@link ItemExport} script + * + * @author Francesco Pio Scognamiglio (francescopio.scognamiglio at 4science.com) + */ +public class ItemExportScriptConfiguration extends ScriptConfiguration { + + @Autowired + private AuthorizeService authorizeService; + + private Class dspaceRunnableClass; + + @Override + public Class getDspaceRunnableClass() { + return dspaceRunnableClass; + } + + @Override + public void setDspaceRunnableClass(Class dspaceRunnableClass) { + this.dspaceRunnableClass = dspaceRunnableClass; + } + + @Override + public boolean isAllowedToExecute(final Context context) { + try { + return authorizeService.isAdmin(context); + } catch (SQLException e) { + throw new RuntimeException("SQLException occurred when checking if the current user is an admin", e); + } + } + + @Override + public Options getOptions() { + Options options = new Options(); + + options.addOption(Option.builder("t").longOpt("type") + .desc("type: COLLECTION or ITEM") + .hasArg().required().build()); + options.addOption(Option.builder("i").longOpt("id") + .desc("ID or handle of thing to export") + .hasArg().required().build()); + options.addOption(Option.builder("n").longOpt("number") + .desc("sequence number to begin exporting items with") + .hasArg().required(false).build()); + options.addOption(Option.builder("m").longOpt("migrate") + .desc("export for migration (remove handle and metadata that will be re-created in new system)") + .hasArg(false).required(false).build()); + + // as pointed out by Peter Dietz this provides similar functionality to export metadata + // but it is needed since it directly exports to Simple Archive Format (SAF) + options.addOption(Option.builder("x").longOpt("exclude-bitstreams") + .desc("do not export bitstreams") + .hasArg(false).required(false).build()); + + options.addOption(Option.builder("h").longOpt("help") + .desc("help") + .hasArg(false).required(false).build()); + + return options; + } +} diff --git a/dspace-api/src/main/java/org/dspace/app/itemexport/ItemExportServiceImpl.java b/dspace-api/src/main/java/org/dspace/app/itemexport/ItemExportServiceImpl.java index 6578e57de2..a884f9b075 100644 --- a/dspace-api/src/main/java/org/dspace/app/itemexport/ItemExportServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/app/itemexport/ItemExportServiceImpl.java @@ -57,6 +57,7 @@ import org.dspace.core.Utils; import org.dspace.eperson.EPerson; import org.dspace.eperson.service.EPersonService; import org.dspace.handle.service.HandleService; +import org.dspace.scripts.handler.DSpaceRunnableHandler; import org.dspace.services.ConfigurationService; import org.springframework.beans.factory.annotation.Autowired; @@ -64,17 +65,21 @@ import org.springframework.beans.factory.annotation.Autowired; * Item exporter to create simple AIPs for DSpace content. Currently exports * individual items, or entire collections. For instructions on use, see * printUsage() method. - *

+ *

* ItemExport creates the simple AIP package that the importer also uses. It * consists of: - *

- * /exportdir/42/ (one directory per item) / dublin_core.xml - qualified dublin - * core in RDF schema / contents - text file, listing one file per line / file1 - * - files contained in the item / file2 / ... - *

+ *

{@code
+ * /exportdir/42/ (one directory per item)
+ *              / dublin_core.xml - qualified dublin core in RDF schema
+ *              / contents - text file, listing one file per line
+ *              / file1 - files contained in the item
+ *              / file2
+ *              / ...
+ * }
+ *

* issues -doesn't handle special characters in metadata (needs to turn {@code &'s} into * {@code &}, etc.) - *

+ *

* Modified by David Little, UCSD Libraries 12/21/04 to allow the registration * of files (bitstreams) into DSpace. * @@ -97,11 +102,12 @@ public class ItemExportServiceImpl implements ItemExportService { @Autowired(required = true) protected ConfigurationService configurationService; - /** * log4j logger */ - private final Logger log = org.apache.logging.log4j.LogManager.getLogger(ItemExportServiceImpl.class); + private final Logger log = org.apache.logging.log4j.LogManager.getLogger(); + + private DSpaceRunnableHandler handler; protected ItemExportServiceImpl() { @@ -126,7 +132,7 @@ public class ItemExportServiceImpl implements ItemExportService { } } - System.out.println("Beginning export"); + logInfo("Beginning export"); while (i.hasNext()) { if (SUBDIR_LIMIT > 0 && ++counter == SUBDIR_LIMIT) { @@ -139,7 +145,7 @@ public class ItemExportServiceImpl implements ItemExportService { } } - System.out.println("Exporting item to " + mySequenceNumber); + logInfo("Exporting item to " + mySequenceNumber); Item item = i.next(); exportItem(c, item, fullPath, mySequenceNumber, migrate, excludeBitstreams); c.uncacheEntity(item); @@ -155,7 +161,7 @@ public class ItemExportServiceImpl implements ItemExportService { // now create a subdirectory File itemDir = new File(destDir + "/" + seqStart); - System.out.println("Exporting Item " + myItem.getID() + + logInfo("Exporting Item " + myItem.getID() + (myItem.getHandle() != null ? ", handle " + myItem.getHandle() : "") + " to " + itemDir); @@ -168,6 +174,7 @@ public class ItemExportServiceImpl implements ItemExportService { // make it this far, now start exporting writeMetadata(c, myItem, itemDir, migrate); writeBitstreams(c, myItem, itemDir, excludeBitstreams); + writeCollections(myItem, itemDir); if (!migrate) { writeHandle(c, myItem, itemDir); } @@ -225,7 +232,7 @@ public class ItemExportServiceImpl implements ItemExportService { File outFile = new File(destDir, filename); - System.out.println("Attempting to create file " + outFile); + logInfo("Attempting to create file " + outFile); if (outFile.createNewFile()) { BufferedOutputStream out = new BufferedOutputStream( @@ -343,6 +350,33 @@ public class ItemExportServiceImpl implements ItemExportService { } } + /** + * Create the 'collections' file. List handles of all Collections which + * contain this Item. The "owning" Collection is listed first. + * + * @param item list collections holding this Item. + * @param destDir write the file here. + * @throws IOException if the file cannot be created or written. + */ + protected void writeCollections(Item item, File destDir) + throws IOException { + File outFile = new File(destDir, "collections"); + if (outFile.createNewFile()) { + try (PrintWriter out = new PrintWriter(new FileWriter(outFile))) { + String ownerHandle = item.getOwningCollection().getHandle(); + out.println(ownerHandle); + for (Collection collection : item.getCollections()) { + String collectionHandle = collection.getHandle(); + if (!collectionHandle.equals(ownerHandle)) { + out.println(collectionHandle); + } + } + } + } else { + throw new IOException("Cannot create 'collections' in " + destDir); + } + } + /** * Create both the bitstreams and the contents file. Any bitstreams that * were originally registered will be marked in the contents file as such. @@ -399,7 +433,7 @@ public class ItemExportServiceImpl implements ItemExportService { File fdirs = new File(destDir + File.separator + dirs); if (!fdirs.exists() && !fdirs.mkdirs()) { - log.error("Unable to create destination directory"); + logError("Unable to create destination directory"); } } @@ -456,12 +490,12 @@ public class ItemExportServiceImpl implements ItemExportService { File wkDir = new File(workDir); if (!wkDir.exists() && !wkDir.mkdirs()) { - log.error("Unable to create working direcory"); + logError("Unable to create working direcory"); } File dnDir = new File(destDirName); if (!dnDir.exists() && !dnDir.mkdirs()) { - log.error("Unable to create destination directory"); + logError("Unable to create destination directory"); } // export the items using normal export method @@ -630,11 +664,9 @@ public class ItemExportServiceImpl implements ItemExportService { Thread go = new Thread() { @Override public void run() { - Context context = null; + Context context = new Context(); Iterator iitems = null; try { - // create a new dspace context - context = new Context(); // ignore auths context.turnOffAuthorisationSystem(); @@ -646,7 +678,7 @@ public class ItemExportServiceImpl implements ItemExportService { String downloadDir = getExportDownloadDirectory(eperson); File dnDir = new File(downloadDir); if (!dnDir.exists() && !dnDir.mkdirs()) { - log.error("Unable to create download directory"); + logError("Unable to create download directory"); } Iterator iter = itemsMap.keySet().iterator(); @@ -665,7 +697,7 @@ public class ItemExportServiceImpl implements ItemExportService { File wkDir = new File(workDir); if (!wkDir.exists() && !wkDir.mkdirs()) { - log.error("Unable to create working directory"); + logError("Unable to create working directory"); } @@ -756,7 +788,8 @@ public class ItemExportServiceImpl implements ItemExportService { throw new Exception( "A dspace.cfg entry for 'org.dspace.app.itemexport.work.dir' does not exist."); } - return exportDir; + // clean work dir path from duplicate separators + return StringUtils.replace(exportDir, File.separator + File.separator, File.separator); } @Override @@ -884,7 +917,7 @@ public class ItemExportServiceImpl implements ItemExportService { for (File file : files) { if (file.lastModified() < now.getTimeInMillis()) { if (!file.delete()) { - log.error("Unable to delete export file"); + logError("Unable to delete export file"); } } } @@ -908,7 +941,7 @@ public class ItemExportServiceImpl implements ItemExportService { for (File file : files) { if (file.lastModified() < now.getTimeInMillis()) { if (!file.delete()) { - log.error("Unable to delete old files"); + logError("Unable to delete old files"); } } } @@ -916,7 +949,7 @@ public class ItemExportServiceImpl implements ItemExportService { // If the directory is now empty then we delete it too. if (dir.listFiles().length == 0) { if (!dir.delete()) { - log.error("Unable to delete directory"); + logError("Unable to delete directory"); } } } @@ -937,14 +970,14 @@ public class ItemExportServiceImpl implements ItemExportService { email.send(); } catch (Exception e) { - log.warn(LogHelper.getHeader(context, "emailSuccessMessage", "cannot notify user of export"), e); + logWarn(LogHelper.getHeader(context, "emailSuccessMessage", "cannot notify user of export"), e); } } @Override public void emailErrorMessage(EPerson eperson, String error) throws MessagingException { - log.warn("An error occurred during item export, the user will be notified. " + error); + logWarn("An error occurred during item export, the user will be notified. " + error); try { Locale supportedLocale = I18nUtil.getEPersonLocale(eperson); Email email = Email.getEmail(I18nUtil.getEmailFilename(supportedLocale, "export_error")); @@ -954,7 +987,7 @@ public class ItemExportServiceImpl implements ItemExportService { email.send(); } catch (Exception e) { - log.warn("error during item export error notification", e); + logWarn("error during item export error notification", e); } } @@ -969,7 +1002,7 @@ public class ItemExportServiceImpl implements ItemExportService { } File targetFile = new File(tempFileName); if (!targetFile.createNewFile()) { - log.warn("Target file already exists: " + targetFile.getName()); + logWarn("Target file already exists: " + targetFile.getName()); } FileOutputStream fos = new FileOutputStream(tempFileName); @@ -985,7 +1018,7 @@ public class ItemExportServiceImpl implements ItemExportService { deleteDirectory(cpFile); if (!targetFile.renameTo(new File(target))) { - log.error("Unable to rename file"); + logError("Unable to rename file"); } } finally { if (cpZipOutputStream != null) { @@ -1018,8 +1051,11 @@ public class ItemExportServiceImpl implements ItemExportService { return; } String strAbsPath = cpFile.getPath(); - String strZipEntryName = strAbsPath.substring(strSource - .length() + 1, strAbsPath.length()); + int startIndex = strSource.length(); + if (!StringUtils.endsWith(strSource, File.separator)) { + startIndex++; + } + String strZipEntryName = strAbsPath.substring(startIndex, strAbsPath.length()); // byte[] b = new byte[ (int)(cpFile.length()) ]; @@ -1058,7 +1094,7 @@ public class ItemExportServiceImpl implements ItemExportService { deleteDirectory(file); } else { if (!file.delete()) { - log.error("Unable to delete file: " + file.getName()); + logError("Unable to delete file: " + file.getName()); } } } @@ -1067,4 +1103,64 @@ public class ItemExportServiceImpl implements ItemExportService { return (path.delete()); } + @Override + public void setHandler(DSpaceRunnableHandler handler) { + this.handler = handler; + } + + private void logInfo(String message) { + logInfo(message, null); + } + + private void logInfo(String message, Exception e) { + if (handler != null) { + handler.logInfo(message); + return; + } + + if (e != null) { + log.info(message, e); + } else { + log.info(message); + } + } + + private void logWarn(String message) { + logWarn(message, null); + } + + private void logWarn(String message, Exception e) { + if (handler != null) { + handler.logWarning(message); + return; + } + + if (e != null) { + log.warn(message, e); + } else { + log.warn(message); + } + } + + private void logError(String message) { + logError(message, null); + } + + private void logError(String message, Exception e) { + if (handler != null) { + if (e != null) { + handler.logError(message, e); + } else { + handler.logError(message); + } + return; + } + + if (e != null) { + log.error(message, e); + } else { + log.error(message); + } + } + } diff --git a/dspace-api/src/main/java/org/dspace/app/itemexport/service/ItemExportService.java b/dspace-api/src/main/java/org/dspace/app/itemexport/service/ItemExportService.java index 7dedc9950b..6ec1027709 100644 --- a/dspace-api/src/main/java/org/dspace/app/itemexport/service/ItemExportService.java +++ b/dspace-api/src/main/java/org/dspace/app/itemexport/service/ItemExportService.java @@ -17,6 +17,7 @@ import org.dspace.content.DSpaceObject; import org.dspace.content.Item; import org.dspace.core.Context; import org.dspace.eperson.EPerson; +import org.dspace.scripts.handler.DSpaceRunnableHandler; /** * Item exporter to create simple AIPs for DSpace content. Currently exports @@ -267,4 +268,10 @@ public interface ItemExportService { */ public void zip(String strSource, String target) throws Exception; + /** + * Set the DSpace Runnable Handler + * @param handler + */ + public void setHandler(DSpaceRunnableHandler handler); + } diff --git a/dspace-api/src/main/java/org/dspace/app/itemimport/ItemImport.java b/dspace-api/src/main/java/org/dspace/app/itemimport/ItemImport.java new file mode 100644 index 0000000000..6870b94eee --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/app/itemimport/ItemImport.java @@ -0,0 +1,378 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.itemimport; + +import java.io.File; +import java.io.FileInputStream; +import java.io.IOException; +import java.io.InputStream; +import java.nio.file.Files; +import java.sql.SQLException; +import java.util.ArrayList; +import java.util.Date; +import java.util.List; +import java.util.Optional; +import java.util.UUID; + +import org.apache.commons.cli.ParseException; +import org.apache.commons.io.FileUtils; +import org.apache.commons.lang3.StringUtils; +import org.dspace.app.itemimport.factory.ItemImportServiceFactory; +import org.dspace.app.itemimport.service.ItemImportService; +import org.dspace.authorize.AuthorizeException; +import org.dspace.content.Collection; +import org.dspace.content.factory.ContentServiceFactory; +import org.dspace.content.service.CollectionService; +import org.dspace.core.Constants; +import org.dspace.core.Context; +import org.dspace.eperson.EPerson; +import org.dspace.eperson.factory.EPersonServiceFactory; +import org.dspace.eperson.service.EPersonService; +import org.dspace.handle.factory.HandleServiceFactory; +import org.dspace.handle.service.HandleService; +import org.dspace.scripts.DSpaceRunnable; +import org.dspace.utils.DSpace; + +/** + * Import items into DSpace. The conventional use is upload files by copying + * them. DSpace writes the item's bitstreams into its assetstore. Metadata is + * also loaded to the DSpace database. + *

+ * A second use assumes the bitstream files already exist in a storage + * resource accessible to DSpace. In this case the bitstreams are 'registered'. + * That is, the metadata is loaded to the DSpace database and DSpace is given + * the location of the file which is subsumed into DSpace. + *

+ * The distinction is controlled by the format of lines in the 'contents' file. + * See comments in processContentsFile() below. + *

+ * Modified by David Little, UCSD Libraries 12/21/04 to + * allow the registration of files (bitstreams) into DSpace. + */ +public class ItemImport extends DSpaceRunnable { + + public static String TEMP_DIR = "importSAF"; + public static String MAPFILE_FILENAME = "mapfile"; + public static String MAPFILE_BITSTREAM_TYPE = "importSAFMapfile"; + + protected boolean template = false; + protected String command = null; + protected String sourcedir = null; + protected String mapfile = null; + protected String eperson = null; + protected String[] collections = null; + protected boolean isTest = false; + protected boolean isExcludeContent = false; + protected boolean isResume = false; + protected boolean useWorkflow = false; + protected boolean useWorkflowSendEmail = false; + protected boolean isQuiet = false; + protected boolean commandLineCollections = false; + protected boolean zip = false; + protected String zipfilename = null; + protected boolean help = false; + protected File workDir = null; + private File workFile = null; + + protected static final CollectionService collectionService = + ContentServiceFactory.getInstance().getCollectionService(); + protected static final EPersonService epersonService = + EPersonServiceFactory.getInstance().getEPersonService(); + protected static final HandleService handleService = + HandleServiceFactory.getInstance().getHandleService(); + + @Override + public ItemImportScriptConfiguration getScriptConfiguration() { + return new DSpace().getServiceManager() + .getServiceByName("import", ItemImportScriptConfiguration.class); + } + + @Override + public void setup() throws ParseException { + help = commandLine.hasOption('h'); + + if (commandLine.hasOption('a')) { + command = "add"; + } + + if (commandLine.hasOption('r')) { + command = "replace"; + } + + if (commandLine.hasOption('d')) { + command = "delete"; + } + + if (commandLine.hasOption('w')) { + useWorkflow = true; + if (commandLine.hasOption('n')) { + useWorkflowSendEmail = true; + } + } + + if (commandLine.hasOption('v')) { + isTest = true; + handler.logInfo("**Test Run** - not actually importing items."); + } + + isExcludeContent = commandLine.hasOption('x'); + + if (commandLine.hasOption('p')) { + template = true; + } + + if (commandLine.hasOption('c')) { // collections + collections = commandLine.getOptionValues('c'); + commandLineCollections = true; + } else { + handler.logInfo("No collections given. Assuming 'collections' file inside item directory"); + } + + if (commandLine.hasOption('R')) { + isResume = true; + handler.logInfo("**Resume import** - attempting to import items not already imported"); + } + + if (commandLine.hasOption('q')) { + isQuiet = true; + } + + setZip(); + } + + @Override + public void internalRun() throws Exception { + if (help) { + printHelp(); + return; + } + + Date startTime = new Date(); + Context context = new Context(Context.Mode.BATCH_EDIT); + + setMapFile(); + + validate(context); + + setEPerson(context); + + // check collection + List mycollections = null; + // don't need to validate collections set if command is "delete" + // also if no collections are given in the command line + if (!"delete".equals(command) && commandLineCollections) { + handler.logInfo("Destination collections:"); + + mycollections = new ArrayList<>(); + + // validate each collection arg to see if it's a real collection + for (int i = 0; i < collections.length; i++) { + Collection collection = null; + if (collections[i] != null) { + // is the ID a handle? + if (collections[i].indexOf('/') != -1) { + // string has a / so it must be a handle - try and resolve + // it + collection = ((Collection) handleService + .resolveToObject(context, collections[i])); + } else { + // not a handle, try and treat it as an integer collection database ID + collection = collectionService.find(context, UUID.fromString(collections[i])); + } + } + + // was the collection valid? + if (collection == null + || collection.getType() != Constants.COLLECTION) { + throw new IllegalArgumentException("Cannot resolve " + + collections[i] + " to collection"); + } + + // add resolved collection to list + mycollections.add(collection); + + // print progress info + handler.logInfo((i == 0 ? "Owning " : "") + "Collection: " + collection.getName()); + } + } + // end validation + + // start + ItemImportService itemImportService = ItemImportServiceFactory.getInstance() + .getItemImportService(); + try { + itemImportService.setTest(isTest); + itemImportService.setExcludeContent(isExcludeContent); + itemImportService.setResume(isResume); + itemImportService.setUseWorkflow(useWorkflow); + itemImportService.setUseWorkflowSendEmail(useWorkflowSendEmail); + itemImportService.setQuiet(isQuiet); + itemImportService.setHandler(handler); + + try { + context.turnOffAuthorisationSystem(); + + readZip(context, itemImportService); + + process(context, itemImportService, mycollections); + + // complete all transactions + context.complete(); + } catch (Exception e) { + context.abort(); + throw new Exception( + "Error committing changes to database: " + e.getMessage() + ", aborting most recent changes", e); + } + + if (isTest) { + handler.logInfo("***End of Test Run***"); + } + } finally { + // clean work dir + if (zip) { + FileUtils.deleteDirectory(new File(sourcedir)); + FileUtils.deleteDirectory(workDir); + } + + Date endTime = new Date(); + handler.logInfo("Started: " + startTime.getTime()); + handler.logInfo("Ended: " + endTime.getTime()); + handler.logInfo( + "Elapsed time: " + ((endTime.getTime() - startTime.getTime()) / 1000) + " secs (" + (endTime + .getTime() - startTime.getTime()) + " msecs)"); + } + } + + /** + * Validate the options + * @param context + */ + protected void validate(Context context) { + if (command == null) { + handler.logError("Must run with either add, replace, or remove (run with -h flag for details)"); + throw new UnsupportedOperationException("Must run with either add, replace, or remove"); + } + + // can only resume for adds + if (isResume && !"add".equals(command)) { + handler.logError("Resume option only works with the --add command (run with -h flag for details)"); + throw new UnsupportedOperationException("Resume option only works with the --add command"); + } + + if (isResume && StringUtils.isBlank(mapfile)) { + handler.logError("The mapfile does not exist. "); + throw new UnsupportedOperationException("The mapfile does not exist"); + } + } + + /** + * Process the import + * @param context + * @param itemImportService + * @param collections + * @throws Exception + */ + protected void process(Context context, ItemImportService itemImportService, + List collections) throws Exception { + readMapfile(context); + + if ("add".equals(command)) { + itemImportService.addItems(context, collections, sourcedir, mapfile, template); + } else if ("replace".equals(command)) { + itemImportService.replaceItems(context, collections, sourcedir, mapfile, template); + } else if ("delete".equals(command)) { + itemImportService.deleteItems(context, mapfile); + } + + // write input stream on handler + File mapFile = new File(mapfile); + try (InputStream mapfileInputStream = new FileInputStream(mapFile)) { + handler.writeFilestream(context, MAPFILE_FILENAME, mapfileInputStream, MAPFILE_BITSTREAM_TYPE); + } finally { + mapFile.delete(); + workFile.delete(); + } + } + + /** + * Read the ZIP archive in SAF format + * @param context + * @param itemImportService + * @throws Exception + */ + protected void readZip(Context context, ItemImportService itemImportService) throws Exception { + Optional optionalFileStream = handler.getFileStream(context, zipfilename); + if (optionalFileStream.isPresent()) { + workFile = new File(itemImportService.getTempWorkDir() + File.separator + + zipfilename + "-" + context.getCurrentUser().getID()); + FileUtils.copyInputStreamToFile(optionalFileStream.get(), workFile); + workDir = new File(itemImportService.getTempWorkDir() + File.separator + TEMP_DIR); + sourcedir = itemImportService.unzip(workFile, workDir.getAbsolutePath()); + } else { + throw new IllegalArgumentException( + "Error reading file, the file couldn't be found for filename: " + zipfilename); + } + } + + /** + * Read the mapfile + * @param context + */ + protected void readMapfile(Context context) { + if (isResume) { + try { + Optional optionalFileStream = handler.getFileStream(context, mapfile); + if (optionalFileStream.isPresent()) { + File tempFile = File.createTempFile(mapfile, "temp"); + tempFile.deleteOnExit(); + FileUtils.copyInputStreamToFile(optionalFileStream.get(), tempFile); + mapfile = tempFile.getAbsolutePath(); + } + } catch (IOException | AuthorizeException e) { + throw new UnsupportedOperationException("The mapfile does not exist"); + } + } + } + + /** + * Set the mapfile option + * @throws IOException + */ + protected void setMapFile() throws IOException { + if (isResume && commandLine.hasOption('m')) { + mapfile = commandLine.getOptionValue('m'); + } else { + mapfile = Files.createTempFile(MAPFILE_FILENAME, "temp").toString(); + } + } + + /** + * Set the zip option + */ + protected void setZip() { + zip = true; + zipfilename = commandLine.getOptionValue('z'); + } + + /** + * Set the eperson in the context + * @param context + * @throws SQLException + */ + protected void setEPerson(Context context) throws SQLException { + EPerson myEPerson = epersonService.find(context, this.getEpersonIdentifier()); + + // check eperson + if (myEPerson == null) { + handler.logError("EPerson cannot be found: " + this.getEpersonIdentifier()); + throw new UnsupportedOperationException("EPerson cannot be found: " + this.getEpersonIdentifier()); + } + + context.setCurrentUser(myEPerson); + } +} diff --git a/dspace-api/src/main/java/org/dspace/app/itemimport/ItemImportCLI.java b/dspace-api/src/main/java/org/dspace/app/itemimport/ItemImportCLI.java new file mode 100644 index 0000000000..35de7b443a --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/app/itemimport/ItemImportCLI.java @@ -0,0 +1,143 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.itemimport; + +import java.io.File; +import java.sql.SQLException; +import java.util.List; +import java.util.UUID; + +import org.apache.commons.lang3.StringUtils; +import org.dspace.app.itemimport.service.ItemImportService; +import org.dspace.content.Collection; +import org.dspace.core.Context; +import org.dspace.eperson.EPerson; + +/** + * CLI variant for the {@link ItemImport} class. + * This was done to specify the specific behaviors for the CLI. + * + * @author Francesco Pio Scognamiglio (francescopio.scognamiglio at 4science.com) + */ +public class ItemImportCLI extends ItemImport { + + @Override + protected void validate(Context context) { + // can only resume for adds + if (isResume && !"add".equals(command)) { + handler.logError("Resume option only works with the --add command (run with -h flag for details)"); + throw new UnsupportedOperationException("Resume option only works with the --add command"); + } + + if (commandLine.hasOption('e')) { + eperson = commandLine.getOptionValue('e'); + } + + // check eperson identifier (email or id) + if (eperson == null) { + handler.logError("An eperson to do the importing must be specified (run with -h flag for details)"); + throw new UnsupportedOperationException("An eperson to do the importing must be specified"); + } + + File myFile = null; + try { + myFile = new File(mapfile); + } catch (Exception e) { + throw new UnsupportedOperationException("The mapfile " + mapfile + " does not exist"); + } + + if (!isResume && "add".equals(command) && myFile.exists()) { + handler.logError("The mapfile " + mapfile + " already exists. " + + "Either delete it or use --resume if attempting to resume an aborted import. " + + "(run with -h flag for details)"); + throw new UnsupportedOperationException("The mapfile " + mapfile + " already exists"); + } + + if (command == null) { + handler.logError("Must run with either add, replace, or remove (run with -h flag for details)"); + throw new UnsupportedOperationException("Must run with either add, replace, or remove"); + } else if ("add".equals(command) || "replace".equals(command)) { + if (sourcedir == null) { + handler.logError("A source directory containing items must be set (run with -h flag for details)"); + throw new UnsupportedOperationException("A source directory containing items must be set"); + } + + if (mapfile == null) { + handler.logError( + "A map file to hold importing results must be specified (run with -h flag for details)"); + throw new UnsupportedOperationException("A map file to hold importing results must be specified"); + } + } else if ("delete".equals(command)) { + if (mapfile == null) { + handler.logError("A map file must be specified (run with -h flag for details)"); + throw new UnsupportedOperationException("A map file must be specified"); + } + } + } + + @Override + protected void process(Context context, ItemImportService itemImportService, + List collections) throws Exception { + if ("add".equals(command)) { + itemImportService.addItems(context, collections, sourcedir, mapfile, template); + } else if ("replace".equals(command)) { + itemImportService.replaceItems(context, collections, sourcedir, mapfile, template); + } else if ("delete".equals(command)) { + itemImportService.deleteItems(context, mapfile); + } + } + + @Override + protected void readZip(Context context, ItemImportService itemImportService) throws Exception { + // If this is a zip archive, unzip it first + if (zip) { + workDir = new File(itemImportService.getTempWorkDir() + File.separator + TEMP_DIR + + File.separator + context.getCurrentUser().getID()); + sourcedir = itemImportService.unzip( + new File(sourcedir + File.separator + zipfilename), workDir.getAbsolutePath()); + } + } + + @Override + protected void setMapFile() { + if (commandLine.hasOption('m')) { + mapfile = commandLine.getOptionValue('m'); + } + } + + @Override + protected void setZip() { + if (commandLine.hasOption('s')) { // source + sourcedir = commandLine.getOptionValue('s'); + } + + if (commandLine.hasOption('z')) { + zip = true; + zipfilename = commandLine.getOptionValue('z'); + } + } + + @Override + protected void setEPerson(Context context) throws SQLException { + EPerson myEPerson = null; + if (StringUtils.contains(eperson, '@')) { + // @ sign, must be an email + myEPerson = epersonService.findByEmail(context, eperson); + } else { + myEPerson = epersonService.find(context, UUID.fromString(eperson)); + } + + // check eperson + if (myEPerson == null) { + handler.logError("EPerson cannot be found: " + eperson + " (run with -h flag for details)"); + throw new UnsupportedOperationException("EPerson cannot be found: " + eperson); + } + + context.setCurrentUser(myEPerson); + } +} diff --git a/dspace-api/src/main/java/org/dspace/app/itemimport/ItemImportCLIScriptConfiguration.java b/dspace-api/src/main/java/org/dspace/app/itemimport/ItemImportCLIScriptConfiguration.java new file mode 100644 index 0000000000..d265cbf4a1 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/app/itemimport/ItemImportCLIScriptConfiguration.java @@ -0,0 +1,77 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.itemimport; + +import org.apache.commons.cli.Option; +import org.apache.commons.cli.Options; +import org.dspace.scripts.configuration.ScriptConfiguration; + +/** + * The {@link ScriptConfiguration} for the {@link ItemImportCLI} script + * + * @author Francesco Pio Scognamiglio (francescopio.scognamiglio at 4science.com) + */ +public class ItemImportCLIScriptConfiguration extends ItemImportScriptConfiguration { + + @Override + public Options getOptions() { + Options options = new Options(); + + options.addOption(Option.builder("a").longOpt("add") + .desc("add items to DSpace") + .hasArg(false).required(false).build()); + options.addOption(Option.builder("r").longOpt("replace") + .desc("replace items in mapfile") + .hasArg(false).required(false).build()); + options.addOption(Option.builder("d").longOpt("delete") + .desc("delete items listed in mapfile") + .hasArg(false).required(false).build()); + options.addOption(Option.builder("s").longOpt("source") + .desc("source of items (directory)") + .hasArg().required(false).build()); + options.addOption(Option.builder("z").longOpt("zip") + .desc("name of zip file") + .hasArg().required(false).build()); + options.addOption(Option.builder("c").longOpt("collection") + .desc("destination collection(s) Handle or database ID") + .hasArg().required(false).build()); + options.addOption(Option.builder("m").longOpt("mapfile") + .desc("mapfile items in mapfile") + .hasArg().required().build()); + options.addOption(Option.builder("e").longOpt("eperson") + .desc("email of eperson doing importing") + .hasArg().required().build()); + options.addOption(Option.builder("w").longOpt("workflow") + .desc("send submission through collection's workflow") + .hasArg(false).required(false).build()); + options.addOption(Option.builder("n").longOpt("notify") + .desc("if sending submissions through the workflow, send notification emails") + .hasArg(false).required(false).build()); + options.addOption(Option.builder("v").longOpt("validate") + .desc("test run - do not actually import items") + .hasArg(false).required(false).build()); + options.addOption(Option.builder("x").longOpt("exclude-bitstreams") + .desc("do not load or expect content bitstreams") + .hasArg(false).required(false).build()); + options.addOption(Option.builder("p").longOpt("template") + .desc("apply template") + .hasArg(false).required(false).build()); + options.addOption(Option.builder("R").longOpt("resume") + .desc("resume a failed import (add only)") + .hasArg(false).required(false).build()); + options.addOption(Option.builder("q").longOpt("quiet") + .desc("don't display metadata") + .hasArg(false).required(false).build()); + + options.addOption(Option.builder("h").longOpt("help") + .desc("help") + .hasArg(false).required(false).build()); + + return options; + } +} diff --git a/dspace-api/src/main/java/org/dspace/app/itemimport/ItemImportCLITool.java b/dspace-api/src/main/java/org/dspace/app/itemimport/ItemImportCLITool.java deleted file mode 100644 index afee478f9c..0000000000 --- a/dspace-api/src/main/java/org/dspace/app/itemimport/ItemImportCLITool.java +++ /dev/null @@ -1,395 +0,0 @@ -/** - * The contents of this file are subject to the license and copyright - * detailed in the LICENSE and NOTICE files at the root of the source - * tree and available online at - * - * http://www.dspace.org/license/ - */ -package org.dspace.app.itemimport; - -import java.io.File; -import java.io.IOException; -import java.util.ArrayList; -import java.util.Date; -import java.util.List; -import java.util.UUID; - -import org.apache.commons.cli.CommandLine; -import org.apache.commons.cli.CommandLineParser; -import org.apache.commons.cli.DefaultParser; -import org.apache.commons.cli.HelpFormatter; -import org.apache.commons.cli.Options; -import org.dspace.app.itemimport.factory.ItemImportServiceFactory; -import org.dspace.app.itemimport.service.ItemImportService; -import org.dspace.content.Collection; -import org.dspace.content.factory.ContentServiceFactory; -import org.dspace.content.service.CollectionService; -import org.dspace.core.Constants; -import org.dspace.core.Context; -import org.dspace.eperson.EPerson; -import org.dspace.eperson.factory.EPersonServiceFactory; -import org.dspace.eperson.service.EPersonService; -import org.dspace.handle.factory.HandleServiceFactory; -import org.dspace.handle.service.HandleService; - -/** - * Import items into DSpace. The conventional use is upload files by copying - * them. DSpace writes the item's bitstreams into its assetstore. Metadata is - * also loaded to the DSpace database. - *

- * A second use assumes the bitstream files already exist in a storage - * resource accessible to DSpace. In this case the bitstreams are 'registered'. - * That is, the metadata is loaded to the DSpace database and DSpace is given - * the location of the file which is subsumed into DSpace. - *

- * The distinction is controlled by the format of lines in the 'contents' file. - * See comments in processContentsFile() below. - *

- * Modified by David Little, UCSD Libraries 12/21/04 to - * allow the registration of files (bitstreams) into DSpace. - */ -public class ItemImportCLITool { - - private static boolean template = false; - - private static final CollectionService collectionService = ContentServiceFactory.getInstance() - .getCollectionService(); - private static final EPersonService epersonService = EPersonServiceFactory.getInstance().getEPersonService(); - private static final HandleService handleService = HandleServiceFactory.getInstance().getHandleService(); - - /** - * Default constructor - */ - private ItemImportCLITool() { } - - public static void main(String[] argv) throws Exception { - Date startTime = new Date(); - int status = 0; - - try { - // create an options object and populate it - CommandLineParser parser = new DefaultParser(); - - Options options = new Options(); - - options.addOption("a", "add", false, "add items to DSpace"); - options.addOption("r", "replace", false, "replace items in mapfile"); - options.addOption("d", "delete", false, - "delete items listed in mapfile"); - options.addOption("s", "source", true, "source of items (directory)"); - options.addOption("z", "zip", true, "name of zip file"); - options.addOption("c", "collection", true, - "destination collection(s) Handle or database ID"); - options.addOption("m", "mapfile", true, "mapfile items in mapfile"); - options.addOption("e", "eperson", true, - "email of eperson doing importing"); - options.addOption("w", "workflow", false, - "send submission through collection's workflow"); - options.addOption("n", "notify", false, - "if sending submissions through the workflow, send notification emails"); - options.addOption("t", "test", false, - "test run - do not actually import items"); - options.addOption("p", "template", false, "apply template"); - options.addOption("R", "resume", false, - "resume a failed import (add only)"); - options.addOption("q", "quiet", false, "don't display metadata"); - - options.addOption("h", "help", false, "help"); - - CommandLine line = parser.parse(options, argv); - - String command = null; // add replace remove, etc - String sourcedir = null; - String mapfile = null; - String eperson = null; // db ID or email - String[] collections = null; // db ID or handles - boolean isTest = false; - boolean isResume = false; - boolean useWorkflow = false; - boolean useWorkflowSendEmail = false; - boolean isQuiet = false; - - if (line.hasOption('h')) { - HelpFormatter myhelp = new HelpFormatter(); - myhelp.printHelp("ItemImport\n", options); - System.out - .println("\nadding items: ItemImport -a -e eperson -c collection -s sourcedir -m mapfile"); - System.out - .println( - "\nadding items from zip file: ItemImport -a -e eperson -c collection -s sourcedir -z " + - "filename.zip -m mapfile"); - System.out - .println("replacing items: ItemImport -r -e eperson -c collection -s sourcedir -m mapfile"); - System.out - .println("deleting items: ItemImport -d -e eperson -m mapfile"); - System.out - .println( - "If multiple collections are specified, the first collection will be the one that owns the " + - "item."); - - System.exit(0); - } - - if (line.hasOption('a')) { - command = "add"; - } - - if (line.hasOption('r')) { - command = "replace"; - } - - if (line.hasOption('d')) { - command = "delete"; - } - - if (line.hasOption('w')) { - useWorkflow = true; - if (line.hasOption('n')) { - useWorkflowSendEmail = true; - } - } - - if (line.hasOption('t')) { - isTest = true; - System.out.println("**Test Run** - not actually importing items."); - } - - if (line.hasOption('p')) { - template = true; - } - - if (line.hasOption('s')) { // source - sourcedir = line.getOptionValue('s'); - } - - if (line.hasOption('m')) { // mapfile - mapfile = line.getOptionValue('m'); - } - - if (line.hasOption('e')) { // eperson - eperson = line.getOptionValue('e'); - } - - if (line.hasOption('c')) { // collections - collections = line.getOptionValues('c'); - } - - if (line.hasOption('R')) { - isResume = true; - System.out - .println("**Resume import** - attempting to import items not already imported"); - } - - if (line.hasOption('q')) { - isQuiet = true; - } - - boolean zip = false; - String zipfilename = ""; - if (line.hasOption('z')) { - zip = true; - zipfilename = line.getOptionValue('z'); - } - - //By default assume collections will be given on the command line - boolean commandLineCollections = true; - // now validate - // must have a command set - if (command == null) { - System.out - .println("Error - must run with either add, replace, or remove (run with -h flag for details)"); - System.exit(1); - } else if ("add".equals(command) || "replace".equals(command)) { - if (sourcedir == null) { - System.out - .println("Error - a source directory containing items must be set"); - System.out.println(" (run with -h flag for details)"); - System.exit(1); - } - - if (mapfile == null) { - System.out - .println("Error - a map file to hold importing results must be specified"); - System.out.println(" (run with -h flag for details)"); - System.exit(1); - } - - if (eperson == null) { - System.out - .println("Error - an eperson to do the importing must be specified"); - System.out.println(" (run with -h flag for details)"); - System.exit(1); - } - - if (collections == null) { - System.out.println("No collections given. Assuming 'collections' file inside item directory"); - commandLineCollections = false; - } - } else if ("delete".equals(command)) { - if (eperson == null) { - System.out - .println("Error - an eperson to do the importing must be specified"); - System.exit(1); - } - - if (mapfile == null) { - System.out.println("Error - a map file must be specified"); - System.exit(1); - } - } - - // can only resume for adds - if (isResume && !"add".equals(command)) { - System.out - .println("Error - resume option only works with the --add command"); - System.exit(1); - } - - // do checks around mapfile - if mapfile exists and 'add' is selected, - // resume must be chosen - File myFile = new File(mapfile); - - if (!isResume && "add".equals(command) && myFile.exists()) { - System.out.println("Error - the mapfile " + mapfile - + " already exists."); - System.out - .println("Either delete it or use --resume if attempting to resume an aborted import."); - System.exit(1); - } - - ItemImportService myloader = ItemImportServiceFactory.getInstance().getItemImportService(); - myloader.setTest(isTest); - myloader.setResume(isResume); - myloader.setUseWorkflow(useWorkflow); - myloader.setUseWorkflowSendEmail(useWorkflowSendEmail); - myloader.setQuiet(isQuiet); - - // create a context - Context c = new Context(Context.Mode.BATCH_EDIT); - - // find the EPerson, assign to context - EPerson myEPerson = null; - - if (eperson.indexOf('@') != -1) { - // @ sign, must be an email - myEPerson = epersonService.findByEmail(c, eperson); - } else { - myEPerson = epersonService.find(c, UUID.fromString(eperson)); - } - - if (myEPerson == null) { - System.out.println("Error, eperson cannot be found: " + eperson); - System.exit(1); - } - - c.setCurrentUser(myEPerson); - - // find collections - List mycollections = null; - - // don't need to validate collections set if command is "delete" - // also if no collections are given in the command line - if (!"delete".equals(command) && commandLineCollections) { - System.out.println("Destination collections:"); - - mycollections = new ArrayList<>(); - - // validate each collection arg to see if it's a real collection - for (int i = 0; i < collections.length; i++) { - - Collection resolved = null; - - if (collections[i] != null) { - - // is the ID a handle? - if (collections[i].indexOf('/') != -1) { - // string has a / so it must be a handle - try and resolve - // it - resolved = ((Collection) handleService - .resolveToObject(c, collections[i])); - - } else { - // not a handle, try and treat it as an integer collection database ID - resolved = collectionService.find(c, UUID.fromString(collections[i])); - - } - - } - - // was the collection valid? - if ((resolved == null) - || (resolved.getType() != Constants.COLLECTION)) { - throw new IllegalArgumentException("Cannot resolve " - + collections[i] + " to collection"); - } - - // add resolved collection to list - mycollections.add(resolved); - - // print progress info - String owningPrefix = ""; - - if (i == 0) { - owningPrefix = "Owning "; - } - - System.out.println(owningPrefix + " Collection: " - + resolved.getName()); - } - } // end of validating collections - - try { - // If this is a zip archive, unzip it first - if (zip) { - sourcedir = myloader.unzip(sourcedir, zipfilename); - } - - - c.turnOffAuthorisationSystem(); - - if ("add".equals(command)) { - myloader.addItems(c, mycollections, sourcedir, mapfile, template); - } else if ("replace".equals(command)) { - myloader.replaceItems(c, mycollections, sourcedir, mapfile, template); - } else if ("delete".equals(command)) { - myloader.deleteItems(c, mapfile); - } - - // complete all transactions - c.complete(); - } catch (Exception e) { - c.abort(); - e.printStackTrace(); - System.out.println(e); - status = 1; - } - - // Delete the unzipped file - try { - if (zip) { - System.gc(); - System.out.println( - "Deleting temporary zip directory: " + myloader.getTempWorkDirFile().getAbsolutePath()); - myloader.cleanupZipTemp(); - } - } catch (IOException ex) { - System.out.println("Unable to delete temporary zip archive location: " + myloader.getTempWorkDirFile() - .getAbsolutePath()); - } - - - if (isTest) { - System.out.println("***End of Test Run***"); - } - } finally { - Date endTime = new Date(); - System.out.println("Started: " + startTime.getTime()); - System.out.println("Ended: " + endTime.getTime()); - System.out.println( - "Elapsed time: " + ((endTime.getTime() - startTime.getTime()) / 1000) + " secs (" + (endTime - .getTime() - startTime.getTime()) + " msecs)"); - } - - System.exit(status); - } -} diff --git a/dspace-api/src/main/java/org/dspace/app/itemimport/ItemImportScriptConfiguration.java b/dspace-api/src/main/java/org/dspace/app/itemimport/ItemImportScriptConfiguration.java new file mode 100644 index 0000000000..a3149040c4 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/app/itemimport/ItemImportScriptConfiguration.java @@ -0,0 +1,103 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.itemimport; + +import java.io.InputStream; +import java.sql.SQLException; + +import org.apache.commons.cli.Option; +import org.apache.commons.cli.Options; +import org.dspace.authorize.service.AuthorizeService; +import org.dspace.core.Context; +import org.dspace.scripts.configuration.ScriptConfiguration; +import org.springframework.beans.factory.annotation.Autowired; + +/** + * The {@link ScriptConfiguration} for the {@link ItemImport} script + * + * @author Francesco Pio Scognamiglio (francescopio.scognamiglio at 4science.com) + */ +public class ItemImportScriptConfiguration extends ScriptConfiguration { + + @Autowired + private AuthorizeService authorizeService; + + private Class dspaceRunnableClass; + + @Override + public Class getDspaceRunnableClass() { + return dspaceRunnableClass; + } + + @Override + public void setDspaceRunnableClass(Class dspaceRunnableClass) { + this.dspaceRunnableClass = dspaceRunnableClass; + } + + @Override + public boolean isAllowedToExecute(final Context context) { + try { + return authorizeService.isAdmin(context); + } catch (SQLException e) { + throw new RuntimeException("SQLException occurred when checking if the current user is an admin", e); + } + } + + @Override + public Options getOptions() { + Options options = new Options(); + + options.addOption(Option.builder("a").longOpt("add") + .desc("add items to DSpace") + .hasArg(false).required(false).build()); + options.addOption(Option.builder("r").longOpt("replace") + .desc("replace items in mapfile") + .hasArg(false).required(false).build()); + options.addOption(Option.builder("d").longOpt("delete") + .desc("delete items listed in mapfile") + .hasArg(false).required(false).build()); + options.addOption(Option.builder("z").longOpt("zip") + .desc("name of zip file") + .type(InputStream.class) + .hasArg().required().build()); + options.addOption(Option.builder("c").longOpt("collection") + .desc("destination collection(s) Handle or database ID") + .hasArg().required(false).build()); + options.addOption(Option.builder("m").longOpt("mapfile") + .desc("mapfile items in mapfile") + .type(InputStream.class) + .hasArg().required(false).build()); + options.addOption(Option.builder("w").longOpt("workflow") + .desc("send submission through collection's workflow") + .hasArg(false).required(false).build()); + options.addOption(Option.builder("n").longOpt("notify") + .desc("if sending submissions through the workflow, send notification emails") + .hasArg(false).required(false).build()); + options.addOption(Option.builder("v").longOpt("validate") + .desc("test run - do not actually import items") + .hasArg(false).required(false).build()); + options.addOption(Option.builder("x").longOpt("exclude-bitstreams") + .desc("do not load or expect content bitstreams") + .hasArg(false).required(false).build()); + options.addOption(Option.builder("p").longOpt("template") + .desc("apply template") + .hasArg(false).required(false).build()); + options.addOption(Option.builder("R").longOpt("resume") + .desc("resume a failed import (add only)") + .hasArg(false).required(false).build()); + options.addOption(Option.builder("q").longOpt("quiet") + .desc("don't display metadata") + .hasArg(false).required(false).build()); + + options.addOption(Option.builder("h").longOpt("help") + .desc("help") + .hasArg(false).required(false).build()); + + return options; + } +} diff --git a/dspace-api/src/main/java/org/dspace/app/itemimport/ItemImportServiceImpl.java b/dspace-api/src/main/java/org/dspace/app/itemimport/ItemImportServiceImpl.java index 6a6a70d574..076cc8ebe2 100644 --- a/dspace-api/src/main/java/org/dspace/app/itemimport/ItemImportServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/app/itemimport/ItemImportServiceImpl.java @@ -51,6 +51,10 @@ import javax.xml.parsers.DocumentBuilder; import javax.xml.parsers.DocumentBuilderFactory; import javax.xml.parsers.ParserConfigurationException; import javax.xml.transform.TransformerException; +import javax.xml.xpath.XPath; +import javax.xml.xpath.XPathConstants; +import javax.xml.xpath.XPathExpressionException; +import javax.xml.xpath.XPathFactory; import org.apache.commons.collections4.ComparatorUtils; import org.apache.commons.io.FileDeleteStrategy; @@ -58,8 +62,8 @@ import org.apache.commons.io.FileUtils; import org.apache.commons.lang3.RandomStringUtils; import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.exception.ExceptionUtils; +import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; -import org.apache.xpath.XPathAPI; import org.dspace.app.itemimport.service.ItemImportService; import org.dspace.app.util.LocalSchemaFilenameFilter; import org.dspace.app.util.RelationshipUtils; @@ -102,6 +106,7 @@ import org.dspace.eperson.Group; import org.dspace.eperson.service.EPersonService; import org.dspace.eperson.service.GroupService; import org.dspace.handle.service.HandleService; +import org.dspace.scripts.handler.DSpaceRunnableHandler; import org.dspace.services.ConfigurationService; import org.dspace.workflow.WorkflowItem; import org.dspace.workflow.WorkflowService; @@ -131,7 +136,9 @@ import org.xml.sax.SAXException; * allow the registration of files (bitstreams) into DSpace. */ public class ItemImportServiceImpl implements ItemImportService, InitializingBean { - private final Logger log = org.apache.logging.log4j.LogManager.getLogger(ItemImportServiceImpl.class); + private final Logger log = LogManager.getLogger(); + + private DSpaceRunnableHandler handler; @Autowired(required = true) protected AuthorizeService authorizeService; @@ -175,6 +182,7 @@ public class ItemImportServiceImpl implements ItemImportService, InitializingBea protected String tempWorkDir; protected boolean isTest = false; + protected boolean isExcludeContent = false; protected boolean isResume = false; protected boolean useWorkflow = false; protected boolean useWorkflowSendEmail = false; @@ -191,11 +199,13 @@ public class ItemImportServiceImpl implements ItemImportService, InitializingBea if (!tempWorkDirFile.exists()) { boolean success = tempWorkDirFile.mkdir(); if (success) { - log.info("Created org.dspace.app.batchitemimport.work.dir of: " + tempWorkDir); + logInfo("Created org.dspace.app.batchitemimport.work.dir of: " + tempWorkDir); } else { - log.error("Cannot create batch import directory! " + tempWorkDir); + logError("Cannot create batch import directory! " + tempWorkDir); } } + // clean work dir path from duplicate separators + tempWorkDir = StringUtils.replace(tempWorkDir, File.separator + File.separator, File.separator); } // File listing filter to look for metadata files @@ -221,9 +231,9 @@ public class ItemImportServiceImpl implements ItemImportService, InitializingBea try { addItems(c, mycollections, sourceDir, mapFile, template); } catch (Exception addException) { - log.error("AddItems encountered an error, will try to revert. Error: " + addException.getMessage()); + logError("AddItems encountered an error, will try to revert. Error: " + addException.getMessage()); deleteItems(c, mapFile); - log.info("Attempted to delete partial (errored) import"); + logInfo("Attempted to delete partial (errored) import"); throw addException; } } @@ -241,10 +251,8 @@ public class ItemImportServiceImpl implements ItemImportService, InitializingBea itemFolderMap = new HashMap<>(); - System.out.println("Adding items from directory: " + sourceDir); - log.debug("Adding items from directory: " + sourceDir); - System.out.println("Generating mapfile: " + mapFile); - log.debug("Generating mapfile: " + mapFile); + logDebug("Adding items from directory: " + sourceDir); + logDebug("Generating mapfile: " + mapFile); boolean directoryFileCollections = false; if (mycollections == null) { @@ -261,16 +269,12 @@ public class ItemImportServiceImpl implements ItemImportService, InitializingBea // sneaky isResume == true means open file in append mode outFile = new File(mapFile); mapOut = new PrintWriter(new FileWriter(outFile, isResume)); - - if (mapOut == null) { - throw new Exception("can't open mapfile: " + mapFile); - } } // open and process the source directory File d = new java.io.File(sourceDir); - if (d == null || !d.isDirectory()) { + if (!d.isDirectory()) { throw new Exception("Error, cannot open source directory " + sourceDir); } @@ -280,7 +284,7 @@ public class ItemImportServiceImpl implements ItemImportService, InitializingBea for (int i = 0; i < dircontents.length; i++) { if (skipItems.containsKey(dircontents[i])) { - System.out.println("Skipping import of " + dircontents[i]); + logInfo("Skipping import of " + dircontents[i]); //we still need the item in the map for relationship linking String skippedHandle = skipItems.get(dircontents[i]); @@ -294,13 +298,12 @@ public class ItemImportServiceImpl implements ItemImportService, InitializingBea try { List cols = processCollectionFile(c, path, "collections"); if (cols == null) { - System.out - .println("No collections specified for item " + dircontents[i] + ". Skipping."); + logError("No collections specified for item " + dircontents[i] + ". Skipping."); continue; } clist = cols; } catch (IllegalArgumentException e) { - System.out.println(e.getMessage() + " Skipping."); + logError(e.getMessage() + " Skipping."); continue; } } else { @@ -312,7 +315,7 @@ public class ItemImportServiceImpl implements ItemImportService, InitializingBea itemFolderMap.put(dircontents[i], item); c.uncacheEntity(item); - System.out.println(i + " " + dircontents[i]); + logInfo(i + " " + dircontents[i]); } } @@ -354,7 +357,7 @@ public class ItemImportServiceImpl implements ItemImportService, InitializingBea for (String itemIdentifier : identifierList) { if (isTest) { - System.out.println("\tAdding relationship (type: " + relationshipType + + logInfo("\tAdding relationship (type: " + relationshipType + ") from " + folderName + " to " + itemIdentifier); continue; } @@ -365,50 +368,7 @@ public class ItemImportServiceImpl implements ItemImportService, InitializingBea throw new Exception("Could not find item for " + itemIdentifier); } - //get entity type of entity and item - String itemEntityType = getEntityType(item); - String relatedEntityType = getEntityType(relationItem); - - //find matching relationship type - List relTypes = relationshipTypeService.findByLeftwardOrRightwardTypeName( - c, relationshipType); - RelationshipType foundRelationshipType = RelationshipUtils.matchRelationshipType( - relTypes, relatedEntityType, itemEntityType, relationshipType); - - if (foundRelationshipType == null) { - throw new Exception("No Relationship type found for:\n" + - "Target type: " + relatedEntityType + "\n" + - "Origin referer type: " + itemEntityType + "\n" + - "with typeName: " + relationshipType - ); - } - - boolean left = false; - if (foundRelationshipType.getLeftwardType().equalsIgnoreCase(relationshipType)) { - left = true; - } - - // Placeholder items for relation placing - Item leftItem = null; - Item rightItem = null; - if (left) { - leftItem = item; - rightItem = relationItem; - } else { - leftItem = relationItem; - rightItem = item; - } - - // Create the relationship - int leftPlace = relationshipService.findNextLeftPlaceByLeftItem(c, leftItem); - int rightPlace = relationshipService.findNextRightPlaceByRightItem(c, rightItem); - Relationship persistedRelationship = relationshipService.create( - c, leftItem, rightItem, foundRelationshipType, leftPlace, rightPlace); - // relationshipService.update(c, persistedRelationship); - - System.out.println("\tAdded relationship (type: " + relationshipType + ") from " + - leftItem.getHandle() + " to " + rightItem.getHandle()); - + addRelationship(c, item, relationItem, relationshipType); } } @@ -419,25 +379,84 @@ public class ItemImportServiceImpl implements ItemImportService, InitializingBea } + /** + * Add relationship. + * @param c the context + * @param item the item + * @param relationItem the related item + * @param relationshipType the relation type name + * @throws SQLException + * @throws AuthorizeException + */ + protected void addRelationship(Context c, Item item, Item relationItem, String relationshipType) + throws SQLException, AuthorizeException { + // get entity type of entity and item + String itemEntityType = getEntityType(item); + String relatedEntityType = getEntityType(relationItem); + + // find matching relationship type + List relTypes = relationshipTypeService.findByLeftwardOrRightwardTypeName( + c, relationshipType); + RelationshipType foundRelationshipType = RelationshipUtils.matchRelationshipType( + relTypes, relatedEntityType, itemEntityType, relationshipType); + + if (foundRelationshipType == null) { + throw new IllegalArgumentException("No Relationship type found for:\n" + + "Target type: " + relatedEntityType + "\n" + + "Origin referer type: " + itemEntityType + "\n" + + "with typeName: " + relationshipType + ); + } + + boolean left = false; + if (foundRelationshipType.getLeftwardType().equalsIgnoreCase(relationshipType)) { + left = true; + } + + // placeholder items for relation placing + Item leftItem = null; + Item rightItem = null; + if (left) { + leftItem = item; + rightItem = relationItem; + } else { + leftItem = relationItem; + rightItem = item; + } + + // Create the relationship, appending to the end + Relationship persistedRelationship = relationshipService.create( + c, leftItem, rightItem, foundRelationshipType, -1, -1 + ); + relationshipService.update(c, persistedRelationship); + + logInfo("\tAdded relationship (type: " + relationshipType + ") from " + + leftItem.getHandle() + " to " + rightItem.getHandle()); + } + /** * Get the item's entity type from meta. * * @param item * @return */ - protected String getEntityType(Item item) throws Exception { + protected String getEntityType(Item item) { return itemService.getMetadata(item, "dspace", "entity", "type", Item.ANY).get(0).getValue(); } /** * Read the relationship manifest file. * - * Each line in the file contains a relationship type id and an item identifier in the following format: - * - * relation. - * - * The input_item_folder should refer the folder name of another item in this import batch. - * + * Each line in the file contains a relationship type id and an item + * identifier in the following format: + * + *

+ * {@code relation. } + * + *

+ * The {@code input_item_folder} should refer the folder name of another + * item in this import batch. + * * @param path The main import folder path. * @param filename The name of the manifest file to check ('relationships') * @return Map of found relationships @@ -450,7 +469,7 @@ public class ItemImportServiceImpl implements ItemImportService, InitializingBea if (file.exists()) { - System.out.println("\tProcessing relationships file: " + filename); + logInfo("\tProcessing relationships file: " + filename); BufferedReader br = null; try { @@ -491,13 +510,13 @@ public class ItemImportServiceImpl implements ItemImportService, InitializingBea } } catch (FileNotFoundException e) { - System.out.println("\tNo relationships file found."); + logWarn("\tNo relationships file found."); } finally { if (br != null) { try { br.close(); } catch (IOException e) { - System.out.println("Non-critical problem releasing resources."); + logError("Non-critical problem releasing resources."); } } } @@ -541,25 +560,41 @@ public class ItemImportServiceImpl implements ItemImportService, InitializingBea } - } else if (itemIdentifier.indexOf('/') != -1) { - //resolve by handle - return (Item) handleService.resolveToObject(c, itemIdentifier); - - } else { - //try to resolve by UUID - return itemService.findByIdOrLegacyId(c, itemIdentifier); } - return null; + // resolve item by handle or UUID + return resolveItem(c, itemIdentifier); } + /** + * Resolve an item identifier. + * + * @param c Context + * @param itemIdentifier The identifier string found in the import file (handle or UUID) + * @return Item if found, or null. + * @throws SQLException + * @throws IllegalStateException + * @throws Exception + */ + protected Item resolveItem(Context c, String itemIdentifier) + throws IllegalStateException, SQLException { + if (itemIdentifier.indexOf('/') != -1) { + // resolve by handle + return (Item) handleService.resolveToObject(c, itemIdentifier); + } + + // resolve by UUID + return itemService.findByIdOrLegacyId(c, itemIdentifier); + } + /** * Lookup an item by a (unique) meta value. * - * @param metaKey - * @param metaValue - * @return Item + * @param c current DSpace session. + * @param metaKey name of the metadata field to match. + * @param metaValue value to be matched. + * @return the matching Item. * @throws Exception if single item not found. */ protected Item findItemByMetaValue(Context c, String metaKey, String metaValue) throws Exception { @@ -603,7 +638,7 @@ public class ItemImportServiceImpl implements ItemImportService, InitializingBea // verify the source directory File d = new java.io.File(sourceDir); - if (d == null || !d.isDirectory()) { + if (!d.isDirectory()) { throw new Exception("Error, cannot open source directory " + sourceDir); } @@ -621,7 +656,7 @@ public class ItemImportServiceImpl implements ItemImportService, InitializingBea Item oldItem = null; if (oldHandle.indexOf('/') != -1) { - System.out.println("\tReplacing: " + oldHandle); + logInfo("\tReplacing: " + oldHandle); // add new item, locate old one oldItem = (Item) handleService.resolveToObject(c, oldHandle); @@ -642,10 +677,6 @@ public class ItemImportServiceImpl implements ItemImportService, InitializingBea File handleFile = new File(sourceDir + File.separatorChar + newItemName + File.separatorChar + "handle"); PrintWriter handleOut = new PrintWriter(new FileWriter(handleFile, true)); - if (handleOut == null) { - throw new Exception("can't open handle file: " + handleFile.getCanonicalPath()); - } - handleOut.println(oldHandle); handleOut.close(); @@ -658,7 +689,7 @@ public class ItemImportServiceImpl implements ItemImportService, InitializingBea @Override public void deleteItems(Context c, String mapFile) throws Exception { - System.out.println("Deleting items listed in mapfile: " + mapFile); + logInfo("Deleting items listed in mapfile: " + mapFile); // read in the mapfile Map myhash = readMapFile(mapFile); @@ -671,12 +702,12 @@ public class ItemImportServiceImpl implements ItemImportService, InitializingBea if (itemID.indexOf('/') != -1) { String myhandle = itemID; - System.out.println("Deleting item " + myhandle); + logInfo("Deleting item " + myhandle); deleteItem(c, myhandle); } else { // it's an ID Item myitem = itemService.findByIdOrLegacyId(c, itemID); - System.out.println("Deleting item " + itemID); + logInfo("Deleting item " + itemID); deleteItem(c, myitem); c.uncacheEntity(myitem); } @@ -699,8 +730,7 @@ public class ItemImportServiceImpl implements ItemImportService, InitializingBea String itemname, PrintWriter mapOut, boolean template) throws Exception { String mapOutputString = null; - System.out.println("Adding item from directory " + itemname); - log.debug("adding item from directory " + itemname); + logDebug("adding item from directory " + itemname); // create workspace item Item myitem = null; @@ -747,7 +777,7 @@ public class ItemImportServiceImpl implements ItemImportService, InitializingBea installItemService.installItem(c, wi, myhandle); } catch (Exception e) { workspaceItemService.deleteAll(c, wi); - log.error("Exception after install item, try to revert...", e); + logError("Exception after install item, try to revert...", e); throw e; } @@ -759,7 +789,7 @@ public class ItemImportServiceImpl implements ItemImportService, InitializingBea // set permissions if specified in contents file if (options.size() > 0) { - System.out.println("Processing options"); + logInfo("Processing options"); processOptions(c, myitem, options); } } @@ -810,7 +840,7 @@ public class ItemImportServiceImpl implements ItemImportService, InitializingBea Item myitem = (Item) handleService.resolveToObject(c, myhandle); if (myitem == null) { - System.out.println("Error - cannot locate item - already deleted?"); + logError("Error - cannot locate item - already deleted?"); } else { deleteItem(c, myitem); c.uncacheEntity(myitem); @@ -863,7 +893,7 @@ public class ItemImportServiceImpl implements ItemImportService, InitializingBea // Load all metadata schemas into the item. protected void loadMetadata(Context c, Item myitem, String path) throws SQLException, IOException, ParserConfigurationException, - SAXException, TransformerException, AuthorizeException { + SAXException, TransformerException, AuthorizeException, XPathExpressionException { // Load the dublin core metadata loadDublinCore(c, myitem, path + "dublin_core.xml"); @@ -877,14 +907,15 @@ public class ItemImportServiceImpl implements ItemImportService, InitializingBea protected void loadDublinCore(Context c, Item myitem, String filename) throws SQLException, IOException, ParserConfigurationException, - SAXException, TransformerException, AuthorizeException { + SAXException, TransformerException, AuthorizeException, XPathExpressionException { Document document = loadXML(filename); // Get the schema, for backward compatibility we will default to the // dublin core schema if the schema name is not available in the import // file String schema; - NodeList metadata = XPathAPI.selectNodeList(document, "/dublin_core"); + XPath xPath = XPathFactory.newInstance().newXPath(); + NodeList metadata = (NodeList) xPath.compile("/dublin_core").evaluate(document, XPathConstants.NODESET); Node schemaAttr = metadata.item(0).getAttributes().getNamedItem( "schema"); if (schemaAttr == null) { @@ -894,11 +925,10 @@ public class ItemImportServiceImpl implements ItemImportService, InitializingBea } // Get the nodes corresponding to formats - NodeList dcNodes = XPathAPI.selectNodeList(document, - "/dublin_core/dcvalue"); + NodeList dcNodes = (NodeList) xPath.compile("/dublin_core/dcvalue").evaluate(document, XPathConstants.NODESET); if (!isQuiet) { - System.out.println("\tLoading dublin core from " + filename); + logInfo("\tLoading dublin core from " + filename); } // Add each one as a new format to the registry @@ -928,7 +958,7 @@ public class ItemImportServiceImpl implements ItemImportService, InitializingBea } if (!isQuiet) { - System.out.println("\tSchema: " + schema + " Element: " + element + " Qualifier: " + qualifier + logInfo("\tSchema: " + schema + " Element: " + element + " Qualifier: " + qualifier + " Value: " + value); } @@ -937,20 +967,28 @@ public class ItemImportServiceImpl implements ItemImportService, InitializingBea } // only add metadata if it is no test and there is an actual value if (!isTest && !value.equals("")) { - itemService.addMetadata(c, i, schema, element, qualifier, language, value); + if (StringUtils.equals(schema, MetadataSchemaEnum.RELATION.getName())) { + Item relationItem = resolveItem(c, value); + if (relationItem == null) { + throw new IllegalArgumentException("No item found with id=" + value); + } + addRelationship(c, i, relationItem, element); + } else { + itemService.addMetadata(c, i, schema, element, qualifier, language, value); + } } else { // If we're just test the import, let's check that the actual metadata field exists. MetadataSchema foundSchema = metadataSchemaService.find(c, schema); if (foundSchema == null) { - System.out.println("ERROR: schema '" + schema + "' was not found in the registry."); + logError("ERROR: schema '" + schema + "' was not found in the registry."); return; } MetadataField foundField = metadataFieldService.findByElement(c, foundSchema, element, qualifier); if (foundField == null) { - System.out.println( + logError( "ERROR: Metadata field: '" + schema + "." + element + "." + qualifier + "' was not found in the " + "registry."); return; @@ -977,7 +1015,7 @@ public class ItemImportServiceImpl implements ItemImportService, InitializingBea File file = new File(path + File.separatorChar + filename); ArrayList collections = new ArrayList<>(); List result = null; - System.out.println("Processing collections file: " + filename); + logInfo("Processing collections file: " + filename); if (file.exists()) { BufferedReader br = null; @@ -1004,13 +1042,13 @@ public class ItemImportServiceImpl implements ItemImportService, InitializingBea result = collections; } catch (FileNotFoundException e) { - System.out.println("No collections file found."); + logWarn("No collections file found."); } finally { if (br != null) { try { br.close(); } catch (IOException e) { - System.out.println("Non-critical problem releasing resources."); + logError("Non-critical problem releasing resources."); } } } @@ -1032,7 +1070,7 @@ public class ItemImportServiceImpl implements ItemImportService, InitializingBea File file = new File(path + File.separatorChar + filename); String result = null; - System.out.println("Processing handle file: " + filename); + logInfo("Processing handle file: " + filename); if (file.exists()) { BufferedReader is = null; try { @@ -1041,14 +1079,14 @@ public class ItemImportServiceImpl implements ItemImportService, InitializingBea // result gets contents of file, or null result = is.readLine(); - System.out.println("read handle: '" + result + "'"); + logInfo("read handle: '" + result + "'"); } catch (FileNotFoundException e) { // probably no handle file, just return null - System.out.println("It appears there is no handle file -- generating one"); + logWarn("It appears there is no handle file -- generating one"); } catch (IOException e) { // probably no handle file, just return null - System.out.println("It appears there is no handle file -- generating one"); + logWarn("It appears there is no handle file -- generating one"); } finally { if (is != null) { try { @@ -1060,7 +1098,7 @@ public class ItemImportServiceImpl implements ItemImportService, InitializingBea } } else { // probably no handle file, just return null - System.out.println("It appears there is no handle file -- generating one"); + logWarn("It appears there is no handle file -- generating one"); } return result; @@ -1087,7 +1125,7 @@ public class ItemImportServiceImpl implements ItemImportService, InitializingBea String line = ""; List options = new ArrayList<>(); - System.out.println("\tProcessing contents file: " + contentsFile); + logInfo("\tProcessing contents file: " + contentsFile); if (contentsFile.exists()) { BufferedReader is = null; @@ -1134,8 +1172,8 @@ public class ItemImportServiceImpl implements ItemImportService, InitializingBea } } // while if (iAssetstore == -1 || sFilePath == null) { - System.out.println("\tERROR: invalid contents file line"); - System.out.println("\t\tSkipping line: " + logError("\tERROR: invalid contents file line"); + logInfo("\t\tSkipping line: " + sRegistrationLine); continue; } @@ -1159,7 +1197,7 @@ public class ItemImportServiceImpl implements ItemImportService, InitializingBea } registerBitstream(c, i, iAssetstore, sFilePath, sBundle, sDescription); - System.out.println("\tRegistering Bitstream: " + sFilePath + logInfo("\tRegistering Bitstream: " + sFilePath + "\tAssetstore: " + iAssetstore + "\tBundle: " + sBundle + "\tDescription: " + sDescription); @@ -1171,7 +1209,7 @@ public class ItemImportServiceImpl implements ItemImportService, InitializingBea if (bitstreamEndIndex == -1) { // no extra info processContentFileEntry(c, i, path, line, null, false); - System.out.println("\tBitstream: " + line); + logInfo("\tBitstream: " + line); } else { String bitstreamName = line.substring(0, bitstreamEndIndex); @@ -1283,17 +1321,17 @@ public class ItemImportServiceImpl implements ItemImportService, InitializingBea + bundleMarker.length(), bEndIndex).trim(); processContentFileEntry(c, i, path, bitstreamName, bundleName, primary); - System.out.println("\tBitstream: " + bitstreamName + + logInfo("\tBitstream: " + bitstreamName + "\tBundle: " + bundleName + primaryStr); } else { processContentFileEntry(c, i, path, bitstreamName, null, primary); - System.out.println("\tBitstream: " + bitstreamName + primaryStr); + logInfo("\tBitstream: " + bitstreamName + primaryStr); } if (permissionsExist || descriptionExists || labelExists || heightExists || widthExists || tocExists) { - System.out.println("Gathering options."); + logInfo("Gathering options."); String extraInfo = bitstreamName; if (permissionsExist) { @@ -1340,12 +1378,12 @@ public class ItemImportServiceImpl implements ItemImportService, InitializingBea String[] dirListing = dir.list(); for (String fileName : dirListing) { if (!"dublin_core.xml".equals(fileName) && !fileName.equals("handle") && !metadataFileFilter - .accept(dir, fileName)) { + .accept(dir, fileName) && !"collections".equals(fileName) && !"relationships".equals(fileName)) { throw new FileNotFoundException("No contents file found"); } } - System.out.println("No contents file found - but only metadata files found. Assuming metadata only."); + logInfo("No contents file found - but only metadata files found. Assuming metadata only."); } return options; @@ -1367,6 +1405,10 @@ public class ItemImportServiceImpl implements ItemImportService, InitializingBea protected void processContentFileEntry(Context c, Item i, String path, String fileName, String bundleName, boolean primary) throws SQLException, IOException, AuthorizeException { + if (isExcludeContent) { + return; + } + String fullpath = path + File.separatorChar + fileName; // get an input stream @@ -1507,9 +1549,9 @@ public class ItemImportServiceImpl implements ItemImportService, InitializingBea */ protected void processOptions(Context c, Item myItem, List options) throws SQLException, AuthorizeException { - System.out.println("Processing options."); + logInfo("Processing options."); for (String line : options) { - System.out.println("\tprocessing " + line); + logInfo("\tprocessing " + line); boolean permissionsExist = false; boolean descriptionExists = false; @@ -1626,7 +1668,7 @@ public class ItemImportServiceImpl implements ItemImportService, InitializingBea try { myGroup = groupService.findByName(c, groupName); } catch (SQLException sqle) { - System.out.println("SQL Exception finding group name: " + logError("SQL Exception finding group name: " + groupName); // do nothing, will check for null group later } @@ -1667,42 +1709,41 @@ public class ItemImportServiceImpl implements ItemImportService, InitializingBea .trim(); } + if (isTest) { + continue; + } + Bitstream bs = null; - boolean notfound = true; boolean updateRequired = false; - if (!isTest) { - // find bitstream - List bitstreams = itemService.getNonInternalBitstreams(c, myItem); - for (int j = 0; j < bitstreams.size() && notfound; j++) { - if (bitstreams.get(j).getName().equals(bitstreamName)) { - bs = bitstreams.get(j); - notfound = false; - } + // find bitstream + List bitstreams = itemService.getNonInternalBitstreams(c, myItem); + for (Bitstream bitstream : bitstreams) { + if (bitstream.getName().equals(bitstreamName)) { + bs = bitstream; + break; } } - if (notfound && !isTest) { + if (null == bs) { // this should never happen - System.out.println("\tdefault permissions set for " - + bitstreamName); - } else if (!isTest) { + logInfo("\tdefault permissions set for " + bitstreamName); + } else { if (permissionsExist) { if (myGroup == null) { - System.out.println("\t" + groupName + logInfo("\t" + groupName + " not found, permissions set to default"); } else if (actionID == -1) { - System.out - .println("\tinvalid permissions flag, permissions set to default"); + logInfo("\tinvalid permissions flag, permissions set to default"); } else { - System.out.println("\tSetting special permissions for " + logInfo("\tSetting special permissions for " + bitstreamName); setPermission(c, myGroup, actionID, bs); } } if (descriptionExists) { - System.out.println("\tSetting description for " + logInfo("\tSetting description for " + bitstreamName); bs.setDescription(c, thisDescription); updateRequired = true; @@ -1711,7 +1752,7 @@ public class ItemImportServiceImpl implements ItemImportService, InitializingBea if (labelExists) { MetadataField metadataField = metadataFieldService .findByElement(c, METADATA_IIIF_SCHEMA, METADATA_IIIF_LABEL_ELEMENT, null); - System.out.println("\tSetting label to " + thisLabel + " in element " + logInfo("\tSetting label to " + thisLabel + " in element " + metadataField.getElement() + " on " + bitstreamName); bitstreamService.addMetadata(c, bs, metadataField, null, thisLabel); updateRequired = true; @@ -1721,7 +1762,7 @@ public class ItemImportServiceImpl implements ItemImportService, InitializingBea MetadataField metadataField = metadataFieldService .findByElement(c, METADATA_IIIF_SCHEMA, METADATA_IIIF_IMAGE_ELEMENT, METADATA_IIIF_HEIGHT_QUALIFIER); - System.out.println("\tSetting height to " + thisHeight + " in element " + logInfo("\tSetting height to " + thisHeight + " in element " + metadataField.getElement() + " on " + bitstreamName); bitstreamService.addMetadata(c, bs, metadataField, null, thisHeight); updateRequired = true; @@ -1730,7 +1771,7 @@ public class ItemImportServiceImpl implements ItemImportService, InitializingBea MetadataField metadataField = metadataFieldService .findByElement(c, METADATA_IIIF_SCHEMA, METADATA_IIIF_IMAGE_ELEMENT, METADATA_IIIF_WIDTH_QUALIFIER); - System.out.println("\tSetting width to " + thisWidth + " in element " + logInfo("\tSetting width to " + thisWidth + " in element " + metadataField.getElement() + " on " + bitstreamName); bitstreamService.addMetadata(c, bs, metadataField, null, thisWidth); updateRequired = true; @@ -1738,7 +1779,7 @@ public class ItemImportServiceImpl implements ItemImportService, InitializingBea if (tocExists) { MetadataField metadataField = metadataFieldService .findByElement(c, METADATA_IIIF_SCHEMA, METADATA_IIIF_TOC_ELEMENT, null); - System.out.println("\tSetting toc to " + thisToc + " in element " + logInfo("\tSetting toc to " + thisToc + " in element " + metadataField.getElement() + " on " + bitstreamName); bitstreamService.addMetadata(c, bs, metadataField, null, thisToc); updateRequired = true; @@ -1777,9 +1818,9 @@ public class ItemImportServiceImpl implements ItemImportService, InitializingBea resourcePolicyService.update(c, rp); } else { if (actionID == Constants.READ) { - System.out.println("\t\tpermissions: READ for " + g.getName()); + logInfo("\t\tpermissions: READ for " + g.getName()); } else if (actionID == Constants.WRITE) { - System.out.println("\t\tpermissions: WRITE for " + g.getName()); + logInfo("\t\tpermissions: WRITE for " + g.getName()); } } @@ -1860,7 +1901,7 @@ public class ItemImportServiceImpl implements ItemImportService, InitializingBea deleteDirectory(files[i]); } else { if (!files[i].delete()) { - log.error("Unable to delete file: " + files[i].getName()); + logError("Unable to delete file: " + files[i].getName()); } } } @@ -1880,7 +1921,7 @@ public class ItemImportServiceImpl implements ItemImportService, InitializingBea // 2 // does the zip file exist and can we write to the temp directory if (!zipfile.canRead()) { - log.error("Zip file '" + zipfile.getAbsolutePath() + "' does not exist, or is not readable."); + logError("Zip file '" + zipfile.getAbsolutePath() + "' does not exist, or is not readable."); } String destinationDir = destDir; @@ -1890,13 +1931,13 @@ public class ItemImportServiceImpl implements ItemImportService, InitializingBea File tempdir = new File(destinationDir); if (!tempdir.isDirectory()) { - log.error("'" + configurationService.getProperty("org.dspace.app.itemexport.work.dir") + - "' as defined by the key 'org.dspace.app.itemexport.work.dir' in dspace.cfg " + + logError("'" + configurationService.getProperty("org.dspace.app.batchitemimport.work.dir") + + "' as defined by the key 'org.dspace.app.batchitemimport.work.dir' in dspace.cfg " + "is not a valid directory"); } if (!tempdir.exists() && !tempdir.mkdirs()) { - log.error("Unable to create temporary directory: " + tempdir.getAbsolutePath()); + logError("Unable to create temporary directory: " + tempdir.getAbsolutePath()); } String sourcedir = destinationDir + System.getProperty("file.separator") + zipfile.getName(); String zipDir = destinationDir + System.getProperty("file.separator") + zipfile.getName() + System @@ -1908,71 +1949,71 @@ public class ItemImportServiceImpl implements ItemImportService, InitializingBea ZipFile zf = new ZipFile(zipfile); ZipEntry entry; Enumeration entries = zf.entries(); - while (entries.hasMoreElements()) { - entry = entries.nextElement(); - if (entry.isDirectory()) { - if (!new File(zipDir + entry.getName()).mkdirs()) { - log.error("Unable to create contents directory: " + zipDir + entry.getName()); - } - } else { - String entryName = entry.getName(); - File outFile = new File(zipDir + entryName); - // Verify that this file will be extracted into our zipDir (and not somewhere else!) - if (!outFile.toPath().normalize().startsWith(zipDir)) { - throw new IOException("Bad zip entry: '" + entryName - + "' in file '" + zipfile.getAbsolutePath() + "'!" - + " Cannot process this file."); - } else { - System.out.println("Extracting file: " + entryName); - log.info("Extracting file: " + entryName); - - int index = entryName.lastIndexOf('/'); - if (index == -1) { - // Was it created on Windows instead? - index = entryName.lastIndexOf('\\'); + try { + while (entries.hasMoreElements()) { + entry = entries.nextElement(); + if (entry.isDirectory()) { + if (!new File(zipDir + entry.getName()).mkdirs()) { + logError("Unable to create contents directory: " + zipDir + entry.getName()); } - if (index > 0) { - File dir = new File(zipDir + entryName.substring(0, index)); - if (!dir.exists() && !dir.mkdirs()) { - log.error("Unable to create directory: " + dir.getAbsolutePath()); + } else { + String entryName = entry.getName(); + File outFile = new File(zipDir + entryName); + // Verify that this file will be extracted into our zipDir (and not somewhere else!) + if (!outFile.toPath().normalize().startsWith(zipDir)) { + throw new IOException("Bad zip entry: '" + entryName + + "' in file '" + zipfile.getAbsolutePath() + "'!" + + " Cannot process this file."); + } else { + logInfo("Extracting file: " + entryName); + + int index = entryName.lastIndexOf('/'); + if (index == -1) { + // Was it created on Windows instead? + index = entryName.lastIndexOf('\\'); } + if (index > 0) { + File dir = new File(zipDir + entryName.substring(0, index)); + if (!dir.exists() && !dir.mkdirs()) { + logError("Unable to create directory: " + dir.getAbsolutePath()); + } - //Entries could have too many directories, and we need to adjust the sourcedir - // file1.zip (SimpleArchiveFormat / item1 / contents|dublin_core|... - // SimpleArchiveFormat / item2 / contents|dublin_core|... - // or - // file2.zip (item1 / contents|dublin_core|... - // item2 / contents|dublin_core|... + //Entries could have too many directories, and we need to adjust the sourcedir + // file1.zip (SimpleArchiveFormat / item1 / contents|dublin_core|... + // SimpleArchiveFormat / item2 / contents|dublin_core|... + // or + // file2.zip (item1 / contents|dublin_core|... + // item2 / contents|dublin_core|... - //regex supports either windows or *nix file paths - String[] entryChunks = entryName.split("/|\\\\"); - if (entryChunks.length > 2) { - if (StringUtils.equals(sourceDirForZip, sourcedir)) { - sourceDirForZip = sourcedir + "/" + entryChunks[0]; + //regex supports either windows or *nix file paths + String[] entryChunks = entryName.split("/|\\\\"); + if (entryChunks.length > 2) { + if (StringUtils.equals(sourceDirForZip, sourcedir)) { + sourceDirForZip = sourcedir + "/" + entryChunks[0]; + } } } + byte[] buffer = new byte[1024]; + int len; + InputStream in = zf.getInputStream(entry); + BufferedOutputStream out = new BufferedOutputStream( + new FileOutputStream(outFile)); + while ((len = in.read(buffer)) >= 0) { + out.write(buffer, 0, len); + } + in.close(); + out.close(); } - byte[] buffer = new byte[1024]; - int len; - InputStream in = zf.getInputStream(entry); - BufferedOutputStream out = new BufferedOutputStream( - new FileOutputStream(outFile)); - while ((len = in.read(buffer)) >= 0) { - out.write(buffer, 0, len); - } - in.close(); - out.close(); } } + } finally { + //Close zip file + zf.close(); } - //Close zip file - zf.close(); - if (!StringUtils.equals(sourceDirForZip, sourcedir)) { sourcedir = sourceDirForZip; - System.out.println("Set sourceDir using path inside of Zip: " + sourcedir); - log.info("Set sourceDir using path inside of Zip: " + sourcedir); + logInfo("Set sourceDir using path inside of Zip: " + sourcedir); } return sourcedir; @@ -2022,20 +2063,15 @@ public class ItemImportServiceImpl implements ItemImportService, InitializingBea final String theFilePath = filepath; final String theInputType = inputType; final String theResumeDir = resumeDir; - final boolean useTemplateItem = template; Thread go = new Thread() { @Override public void run() { - Context context = null; - + Context context = new Context(); String importDir = null; EPerson eperson = null; try { - - // create a new dspace context - context = new Context(); eperson = ePersonService.find(context, oldEPerson.getID()); context.setCurrentUser(eperson); context.turnOffAuthorisationSystem(); @@ -2046,7 +2082,8 @@ public class ItemImportServiceImpl implements ItemImportService, InitializingBea if (theOtherCollections != null) { for (String colID : theOtherCollections) { UUID colId = UUID.fromString(colID); - if (!theOwningCollection.getID().equals(colId)) { + if (theOwningCollection != null + && !theOwningCollection.getID().equals(colId)) { Collection col = collectionService.find(context, colId); if (col != null) { collectionList.add(col); @@ -2065,7 +2102,7 @@ public class ItemImportServiceImpl implements ItemImportService, InitializingBea if (!importDirFile.exists()) { boolean success = importDirFile.mkdirs(); if (!success) { - log.info("Cannot create batch import directory!"); + logInfo("Cannot create batch import directory!"); throw new Exception("Cannot create batch import directory!"); } } @@ -2197,14 +2234,14 @@ public class ItemImportServiceImpl implements ItemImportService, InitializingBea email.send(); } catch (Exception e) { - log.warn(LogHelper.getHeader(context, "emailSuccessMessage", "cannot notify user of import"), e); + logError(LogHelper.getHeader(context, "emailSuccessMessage", "cannot notify user of import"), e); } } @Override public void emailErrorMessage(EPerson eperson, String error) throws MessagingException { - log.warn("An error occurred during item import, the user will be notified. " + error); + logError("An error occurred during item import, the user will be notified. " + error); try { Locale supportedLocale = I18nUtil.getEPersonLocale(eperson); Email email = Email.getEmail(I18nUtil.getEmailFilename(supportedLocale, "bte_batch_import_error")); @@ -2214,7 +2251,7 @@ public class ItemImportServiceImpl implements ItemImportService, InitializingBea email.send(); } catch (Exception e) { - log.warn("error during item import error notification", e); + logError("error during item import error notification", e); } } @@ -2292,18 +2329,17 @@ public class ItemImportServiceImpl implements ItemImportService, InitializingBea + tempDirFile.getAbsolutePath() + " could not be created."); } else { - log.debug("Created directory " + tempDirFile.getAbsolutePath()); + logDebug("Created directory " + tempDirFile.getAbsolutePath()); } } else { - log.debug("Work directory exists: " + tempDirFile.getAbsolutePath()); + logDebug("Work directory exists: " + tempDirFile.getAbsolutePath()); } return tempDirFile; } @Override public void cleanupZipTemp() { - System.out.println("Deleting temporary zip directory: " + tempWorkDir); - log.debug("Deleting temporary zip directory: " + tempWorkDir); + logDebug("Deleting temporary zip directory: " + tempWorkDir); deleteDirectory(new File(tempWorkDir)); } @@ -2312,6 +2348,11 @@ public class ItemImportServiceImpl implements ItemImportService, InitializingBea this.isTest = isTest; } + @Override + public void setExcludeContent(boolean isExcludeContent) { + this.isExcludeContent = isExcludeContent; + } + @Override public void setResume(boolean isResume) { this.isResume = isResume; @@ -2332,4 +2373,81 @@ public class ItemImportServiceImpl implements ItemImportService, InitializingBea this.isQuiet = isQuiet; } + @Override + public void setHandler(DSpaceRunnableHandler handler) { + this.handler = handler; + } + + private void logInfo(String message) { + logInfo(message, null); + } + + private void logInfo(String message, Exception e) { + if (handler != null) { + handler.logInfo(message); + return; + } + + if (e != null) { + log.info(message, e); + } else { + log.info(message); + } + } + + private void logDebug(String message) { + logDebug(message, null); + } + + private void logDebug(String message, Exception e) { + if (handler != null) { + handler.logDebug(message); + return; + } + + if (e != null) { + log.debug(message, e); + } else { + log.debug(message); + } + } + + private void logWarn(String message) { + logWarn(message, null); + } + + private void logWarn(String message, Exception e) { + if (handler != null) { + handler.logWarning(message); + return; + } + + if (e != null) { + log.warn(message, e); + } else { + log.warn(message); + } + } + + private void logError(String message) { + logError(message, null); + } + + private void logError(String message, Exception e) { + if (handler != null) { + if (e != null) { + handler.logError(message, e); + } else { + handler.logError(message); + } + return; + } + + if (e != null) { + log.error(message, e); + } else { + log.error(message); + } + } + } diff --git a/dspace-api/src/main/java/org/dspace/app/itemimport/service/ItemImportService.java b/dspace-api/src/main/java/org/dspace/app/itemimport/service/ItemImportService.java index 2d648e2416..e99ece31b9 100644 --- a/dspace-api/src/main/java/org/dspace/app/itemimport/service/ItemImportService.java +++ b/dspace-api/src/main/java/org/dspace/app/itemimport/service/ItemImportService.java @@ -16,6 +16,7 @@ import org.dspace.app.itemimport.BatchUpload; import org.dspace.content.Collection; import org.dspace.core.Context; import org.dspace.eperson.EPerson; +import org.dspace.scripts.handler.DSpaceRunnableHandler; /** * Import items into DSpace. The conventional use is upload files by copying @@ -210,6 +211,13 @@ public interface ItemImportService { */ public void setTest(boolean isTest); + /** + * Set exclude-content flag. + * + * @param isExcludeContent true or false + */ + public void setExcludeContent(boolean isExcludeContent); + /** * Set resume flag * @@ -235,4 +243,10 @@ public interface ItemImportService { * @param isQuiet true or false */ public void setQuiet(boolean isQuiet); + + /** + * Set the DSpace Runnable Handler + * @param handler + */ + public void setHandler(DSpaceRunnableHandler handler); } diff --git a/dspace-api/src/main/java/org/dspace/app/itemupdate/AddBitstreamsAction.java b/dspace-api/src/main/java/org/dspace/app/itemupdate/AddBitstreamsAction.java index e9693fb3d1..644745304a 100644 --- a/dspace-api/src/main/java/org/dspace/app/itemupdate/AddBitstreamsAction.java +++ b/dspace-api/src/main/java/org/dspace/app/itemupdate/AddBitstreamsAction.java @@ -77,7 +77,7 @@ public class AddBitstreamsAction extends UpdateBitstreamsAction { ItemUpdate.pr("Contents bitstream count: " + contents.size()); String[] files = dir.list(ItemUpdate.fileFilter); - List fileList = new ArrayList(); + List fileList = new ArrayList<>(); for (String filename : files) { fileList.add(filename); ItemUpdate.pr("file: " + filename); @@ -134,9 +134,6 @@ public class AddBitstreamsAction extends UpdateBitstreamsAction { ItemUpdate.pr("contents entry for bitstream: " + ce.toString()); File f = new File(dir, ce.filename); - // get an input stream - BufferedInputStream bis = new BufferedInputStream(new FileInputStream(f)); - Bitstream bs = null; String newBundleName = ce.bundlename; @@ -173,7 +170,9 @@ public class AddBitstreamsAction extends UpdateBitstreamsAction { targetBundle = bundles.iterator().next(); } - bs = bitstreamService.create(context, targetBundle, bis); + try (BufferedInputStream bis = new BufferedInputStream(new FileInputStream(f));) { + bs = bitstreamService.create(context, targetBundle, bis); + } bs.setName(context, ce.filename); // Identify the format diff --git a/dspace-api/src/main/java/org/dspace/app/itemupdate/ItemUpdate.java b/dspace-api/src/main/java/org/dspace/app/itemupdate/ItemUpdate.java index b6aa875f29..a3fe0b2321 100644 --- a/dspace-api/src/main/java/org/dspace/app/itemupdate/ItemUpdate.java +++ b/dspace-api/src/main/java/org/dspace/app/itemupdate/ItemUpdate.java @@ -39,29 +39,34 @@ import org.dspace.handle.factory.HandleServiceFactory; import org.dspace.handle.service.HandleService; /** - * Provides some batch editing capabilities for items in DSpace: - * Metadata fields - Add, Delete - * Bitstreams - Add, Delete + * Provides some batch editing capabilities for items in DSpace. + *

    + *
  • Metadata fields - Add, Delete
  • + *
  • Bitstreams - Add, Delete
  • + *
* - * The design has been for compatibility with ItemImporter + *

+ * The design has been for compatibility with + * {@link org.dspace.app.itemimport.service.ItemImportService} * in the use of the DSpace archive format which is used to * specify changes on a per item basis. The directory names * to correspond to each item are arbitrary and will only be * used for logging purposes. The reference to the item is - * from a required dc.identifier with the item handle to be - * included in the dublin_core.xml (or similar metadata) file. + * from a required {@code dc.identifier} with the item handle to be + * included in the {@code dublin_core.xml} (or similar metadata) file. * - * Any combination of these actions is permitted in a single run of this class + *

+ * Any combination of these actions is permitted in a single run of this class. * The order of actions is important when used in combination. - * It is the responsibility of the calling class (here, ItemUpdate) - * to register UpdateAction classes in the order to which they are + * It is the responsibility of the calling class (here, {@code ItemUpdate}) + * to register {@link UpdateAction} classes in the order which they are * to be performed. * - * - * It is unfortunate that so much code needs to be borrowed - * from ItemImport as it is not reusable in private methods, etc. - * Some of this has been placed into the MetadataUtilities class - * for possible reuse elsewhere. + *

+ * It is unfortunate that so much code needs to be borrowed from + * {@link org.dspace.app.itemimport.service.ItemImportService} as it is not + * reusable in private methods, etc. Some of this has been placed into the + * {@link MetadataUtilities} class for possible reuse elsewhere. * * @author W. Hays based on a conceptual design by R. Rodgers */ @@ -73,7 +78,7 @@ public class ItemUpdate { public static final String DELETE_CONTENTS_FILE = "delete_contents"; public static String HANDLE_PREFIX = null; - public static final Map filterAliases = new HashMap(); + public static final Map filterAliases = new HashMap<>(); public static boolean verbose = false; @@ -375,7 +380,7 @@ public class ItemUpdate { // open and process the source directory File sourceDir = new File(sourceDirPath); - if ((sourceDir == null) || !sourceDir.exists() || !sourceDir.isDirectory()) { + if (!sourceDir.exists() || !sourceDir.isDirectory()) { pr("Error, cannot open archive source directory " + sourceDirPath); throw new Exception("error with archive source directory " + sourceDirPath); } diff --git a/dspace-api/src/main/java/org/dspace/app/itemupdate/MetadataUtilities.java b/dspace-api/src/main/java/org/dspace/app/itemupdate/MetadataUtilities.java index 5c2138a590..910eb434d1 100644 --- a/dspace-api/src/main/java/org/dspace/app/itemupdate/MetadataUtilities.java +++ b/dspace-api/src/main/java/org/dspace/app/itemupdate/MetadataUtilities.java @@ -27,10 +27,12 @@ import javax.xml.transform.TransformerConfigurationException; import javax.xml.transform.TransformerException; import javax.xml.transform.dom.DOMSource; import javax.xml.transform.stream.StreamResult; +import javax.xml.xpath.XPath; +import javax.xml.xpath.XPathConstants; +import javax.xml.xpath.XPathExpressionException; +import javax.xml.xpath.XPathFactory; import org.apache.commons.lang3.StringUtils; -import org.apache.xpath.XPathAPI; -import org.dspace.authorize.AuthorizeException; import org.dspace.content.Item; import org.dspace.content.MetadataField; import org.dspace.content.MetadataSchema; @@ -170,24 +172,21 @@ public class MetadataUtilities { * @param docBuilder DocumentBuilder * @param is - InputStream of dublin_core.xml * @return list of DtoMetadata representing the metadata fields relating to an Item - * @throws SQLException if database error * @throws IOException if IO error * @throws ParserConfigurationException if parser config error * @throws SAXException if XML error - * @throws TransformerException if transformer error - * @throws AuthorizeException if authorization error */ public static List loadDublinCore(DocumentBuilder docBuilder, InputStream is) - throws SQLException, IOException, ParserConfigurationException, - SAXException, TransformerException, AuthorizeException { + throws IOException, XPathExpressionException, SAXException { Document document = docBuilder.parse(is); List dtomList = new ArrayList(); // Get the schema, for backward compatibility we will default to the // dublin core schema if the schema name is not available in the import file - String schema = null; - NodeList metadata = XPathAPI.selectNodeList(document, "/dublin_core"); + String schema; + XPath xPath = XPathFactory.newInstance().newXPath(); + NodeList metadata = (NodeList) xPath.compile("/dublin_core").evaluate(document, XPathConstants.NODESET); Node schemaAttr = metadata.item(0).getAttributes().getNamedItem("schema"); if (schemaAttr == null) { schema = MetadataSchemaEnum.DC.getName(); @@ -196,7 +195,7 @@ public class MetadataUtilities { } // Get the nodes corresponding to formats - NodeList dcNodes = XPathAPI.selectNodeList(document, "/dublin_core/dcvalue"); + NodeList dcNodes = (NodeList) xPath.compile("/dublin_core/dcvalue").evaluate(document, XPathConstants.NODESET); for (int i = 0; i < dcNodes.getLength(); i++) { Node n = dcNodes.item(i); diff --git a/dspace-api/src/main/java/org/dspace/app/launcher/CommandRunner.java b/dspace-api/src/main/java/org/dspace/app/launcher/CommandRunner.java index ce33b6655b..06c2ddb483 100644 --- a/dspace-api/src/main/java/org/dspace/app/launcher/CommandRunner.java +++ b/dspace-api/src/main/java/org/dspace/app/launcher/CommandRunner.java @@ -16,7 +16,7 @@ import java.io.StreamTokenizer; import java.util.ArrayList; import java.util.List; -import org.jdom.Document; +import org.jdom2.Document; /** * @author mwood diff --git a/dspace-api/src/main/java/org/dspace/app/launcher/ScriptLauncher.java b/dspace-api/src/main/java/org/dspace/app/launcher/ScriptLauncher.java index d445f9bbf3..fcb2098bd0 100644 --- a/dspace-api/src/main/java/org/dspace/app/launcher/ScriptLauncher.java +++ b/dspace-api/src/main/java/org/dspace/app/launcher/ScriptLauncher.java @@ -29,9 +29,9 @@ import org.dspace.scripts.service.ScriptService; import org.dspace.servicemanager.DSpaceKernelImpl; import org.dspace.servicemanager.DSpaceKernelInit; import org.dspace.services.RequestService; -import org.jdom.Document; -import org.jdom.Element; -import org.jdom.input.SAXBuilder; +import org.jdom2.Document; +import org.jdom2.Element; +import org.jdom2.input.SAXBuilder; /** * A DSpace script launcher. diff --git a/dspace-api/src/main/java/org/dspace/app/mediafilter/Brand.java b/dspace-api/src/main/java/org/dspace/app/mediafilter/Brand.java index 2d963dd3da..9e28edad45 100644 --- a/dspace-api/src/main/java/org/dspace/app/mediafilter/Brand.java +++ b/dspace-api/src/main/java/org/dspace/app/mediafilter/Brand.java @@ -21,10 +21,10 @@ import java.awt.image.BufferedImage; */ public class Brand { - private int brandWidth; - private int brandHeight; - private Font font; - private int xOffset; + private final int brandWidth; + private final int brandHeight; + private final Font font; + private final int xOffset; /** * Constructor to set up footer image attributes. @@ -92,7 +92,7 @@ public class Brand { * do the text placements and preparatory work for the brand image generation * * @param brandImage a BufferedImage object where the image is created - * @param identifier and Identifier object describing what text is to be placed in what + * @param brandText an Identifier object describing what text is to be placed in what * position within the brand */ private void drawImage(BufferedImage brandImage, diff --git a/dspace-api/src/main/java/org/dspace/app/mediafilter/BrandText.java b/dspace-api/src/main/java/org/dspace/app/mediafilter/BrandText.java index ae77f6048b..9110740643 100644 --- a/dspace-api/src/main/java/org/dspace/app/mediafilter/BrandText.java +++ b/dspace-api/src/main/java/org/dspace/app/mediafilter/BrandText.java @@ -39,7 +39,7 @@ class BrandText { * its location within a rectangular area. * * @param location one of the class location constants e.g. Identifier.BL - * @param the text associated with the location + * @param text text associated with the location */ public BrandText(String location, String text) { this.location = location; diff --git a/dspace-api/src/main/java/org/dspace/app/mediafilter/ExcelFilter.java b/dspace-api/src/main/java/org/dspace/app/mediafilter/ExcelFilter.java deleted file mode 100644 index c17d168c04..0000000000 --- a/dspace-api/src/main/java/org/dspace/app/mediafilter/ExcelFilter.java +++ /dev/null @@ -1,99 +0,0 @@ -/** - * The contents of this file are subject to the license and copyright - * detailed in the LICENSE and NOTICE files at the root of the source - * tree and available online at - * - * http://www.dspace.org/license/ - */ -package org.dspace.app.mediafilter; - -import java.io.InputStream; -import java.nio.charset.StandardCharsets; - -import org.apache.commons.io.IOUtils; -import org.apache.logging.log4j.Logger; -import org.apache.poi.POITextExtractor; -import org.apache.poi.extractor.ExtractorFactory; -import org.apache.poi.hssf.extractor.ExcelExtractor; -import org.apache.poi.xssf.extractor.XSSFExcelExtractor; -import org.dspace.content.Item; - -/* - * ExcelFilter - * - * Entries you must add to dspace.cfg: - * - * filter.plugins = blah, \ - * Excel Text Extractor - * - * plugin.named.org.dspace.app.mediafilter.FormatFilter = \ - * blah = blah, \ - * org.dspace.app.mediafilter.ExcelFilter = Excel Text Extractor - * - * #Configure each filter's input Formats - * filter.org.dspace.app.mediafilter.ExcelFilter.inputFormats = Microsoft Excel, Microsoft Excel XML - * - */ -public class ExcelFilter extends MediaFilter { - - private static Logger log = org.apache.logging.log4j.LogManager.getLogger(ExcelFilter.class); - - public String getFilteredName(String oldFilename) { - return oldFilename + ".txt"; - } - - /** - * @return String bundle name - */ - public String getBundleName() { - return "TEXT"; - } - - /** - * @return String bitstream format - */ - public String getFormatString() { - return "Text"; - } - - /** - * @return String description - */ - public String getDescription() { - return "Extracted text"; - } - - /** - * @param item item - * @param source source input stream - * @param verbose verbose mode - * @return InputStream the resulting input stream - * @throws Exception if error - */ - @Override - public InputStream getDestinationStream(Item item, InputStream source, boolean verbose) - throws Exception { - String extractedText = null; - - try { - POITextExtractor theExtractor = ExtractorFactory.createExtractor(source); - if (theExtractor instanceof ExcelExtractor) { - // for xls file - extractedText = (theExtractor).getText(); - } else if (theExtractor instanceof XSSFExcelExtractor) { - // for xlsx file - extractedText = (theExtractor).getText(); - } - } catch (Exception e) { - log.error("Error filtering bitstream: " + e.getMessage(), e); - throw e; - } - - if (extractedText != null) { - // generate an input stream with the extracted text - return IOUtils.toInputStream(extractedText, StandardCharsets.UTF_8); - } - - return null; - } -} diff --git a/dspace-api/src/main/java/org/dspace/app/mediafilter/HTMLFilter.java b/dspace-api/src/main/java/org/dspace/app/mediafilter/HTMLFilter.java deleted file mode 100644 index 5e10f2841d..0000000000 --- a/dspace-api/src/main/java/org/dspace/app/mediafilter/HTMLFilter.java +++ /dev/null @@ -1,82 +0,0 @@ -/** - * The contents of this file are subject to the license and copyright - * detailed in the LICENSE and NOTICE files at the root of the source - * tree and available online at - * - * http://www.dspace.org/license/ - */ -package org.dspace.app.mediafilter; - -import java.io.ByteArrayInputStream; -import java.io.InputStream; -import java.nio.charset.StandardCharsets; -import javax.swing.text.Document; -import javax.swing.text.html.HTMLEditorKit; - -import org.dspace.content.Item; - -/* - * - * to do: helpful error messages - can't find mediafilter.cfg - can't - * instantiate filter - bitstream format doesn't exist - * - */ -public class HTMLFilter extends MediaFilter { - - @Override - public String getFilteredName(String oldFilename) { - return oldFilename + ".txt"; - } - - /** - * @return String bundle name - */ - @Override - public String getBundleName() { - return "TEXT"; - } - - /** - * @return String bitstream format - */ - @Override - public String getFormatString() { - return "Text"; - } - - /** - * @return String description - */ - @Override - public String getDescription() { - return "Extracted text"; - } - - /** - * @param currentItem item - * @param source source input stream - * @param verbose verbose mode - * @return InputStream the resulting input stream - * @throws Exception if error - */ - @Override - public InputStream getDestinationStream(Item currentItem, InputStream source, boolean verbose) - throws Exception { - // try and read the document - set to ignore character set directive, - // assuming that the input stream is already set properly (I hope) - HTMLEditorKit kit = new HTMLEditorKit(); - Document doc = kit.createDefaultDocument(); - - doc.putProperty("IgnoreCharsetDirective", Boolean.TRUE); - - kit.read(source, doc, 0); - - String extractedText = doc.getText(0, doc.getLength()); - - // generate an input stream with the extracted text - byte[] textBytes = extractedText.getBytes(StandardCharsets.UTF_8); - ByteArrayInputStream bais = new ByteArrayInputStream(textBytes); - - return bais; - } -} diff --git a/dspace-api/src/main/java/org/dspace/app/mediafilter/ImageMagickThumbnailFilter.java b/dspace-api/src/main/java/org/dspace/app/mediafilter/ImageMagickThumbnailFilter.java index a79fd42d59..d16243e3e3 100644 --- a/dspace-api/src/main/java/org/dspace/app/mediafilter/ImageMagickThumbnailFilter.java +++ b/dspace-api/src/main/java/org/dspace/app/mediafilter/ImageMagickThumbnailFilter.java @@ -14,6 +14,9 @@ import java.io.InputStream; import java.util.regex.Pattern; import java.util.regex.PatternSyntaxException; +import org.apache.pdfbox.pdmodel.PDDocument; +import org.apache.pdfbox.pdmodel.PDPage; +import org.apache.pdfbox.pdmodel.common.PDRectangle; import org.dspace.content.Bitstream; import org.dspace.content.Bundle; import org.dspace.content.Item; @@ -119,6 +122,39 @@ public abstract class ImageMagickThumbnailFilter extends MediaFilter { f2.deleteOnExit(); ConvertCmd cmd = new ConvertCmd(); IMOperation op = new IMOperation(); + + // Optionally override ImageMagick's default density of 72 DPI to use a + // "supersample" when creating the PDF thumbnail. Note that I prefer to + // use the getProperty() method here instead of getIntPropert() because + // the latter always returns an integer (0 in the case it's not set). I + // would prefer to keep ImageMagick's default to itself rather than for + // us to set one. Also note that the density option *must* come before + // we open the input file. + String density = configurationService.getProperty(PRE + ".density"); + if (density != null) { + op.density(Integer.valueOf(density)); + } + + // Check the PDF's MediaBox and CropBox to see if they are the same. + // If not, then tell ImageMagick to use the CropBox when generating + // the thumbnail because the CropBox is generally used to define the + // area displayed when a user opens the PDF on a screen, whereas the + // MediaBox is used for print. Not all PDFs set these correctly, so + // we can use ImageMagick's default behavior unless we see an explit + // CropBox. Note: we don't need to do anything special to detect if + // the CropBox is missing or empty because pdfbox will set it to the + // same size as the MediaBox if it doesn't exist. Also note that we + // only need to check the first page, since that's what we use for + // generating the thumbnail (PDDocument uses a zero-based index). + PDPage pdfPage = PDDocument.load(f).getPage(0); + PDRectangle pdfPageMediaBox = pdfPage.getMediaBox(); + PDRectangle pdfPageCropBox = pdfPage.getCropBox(); + + // This option must come *before* we open the input file. + if (pdfPageCropBox != pdfPageMediaBox) { + op.define("pdf:use-cropbox=true"); + } + String s = "[" + page + "]"; op.addImage(f.getAbsolutePath() + s); if (configurationService.getBooleanProperty(PRE + ".flatten", true)) { diff --git a/dspace-api/src/main/java/org/dspace/app/mediafilter/MediaFilterScriptConfiguration.java b/dspace-api/src/main/java/org/dspace/app/mediafilter/MediaFilterScriptConfiguration.java index 49ee23b924..26347c56ee 100644 --- a/dspace-api/src/main/java/org/dspace/app/mediafilter/MediaFilterScriptConfiguration.java +++ b/dspace-api/src/main/java/org/dspace/app/mediafilter/MediaFilterScriptConfiguration.java @@ -50,15 +50,11 @@ public class MediaFilterScriptConfiguration extends public Options getOptions() { Options options = new Options(); options.addOption("v", "verbose", false, "print all extracted text and other details to STDOUT"); - options.getOption("v").setType(boolean.class); options.addOption("q", "quiet", false, "do not print anything except in the event of errors."); - options.getOption("q").setType(boolean.class); options.addOption("f", "force", false, "force all bitstreams to be processed"); - options.getOption("f").setType(boolean.class); options.addOption("i", "identifier", true, "ONLY process bitstreams belonging to identifier"); options.addOption("m", "maximum", true, "process no more than maximum items"); options.addOption("h", "help", false, "help"); - options.getOption("h").setType(boolean.class); Option pluginOption = Option.builder("p") .longOpt("plugins") diff --git a/dspace-api/src/main/java/org/dspace/app/mediafilter/PDFFilter.java b/dspace-api/src/main/java/org/dspace/app/mediafilter/PDFFilter.java deleted file mode 100644 index c90d7c5a6e..0000000000 --- a/dspace-api/src/main/java/org/dspace/app/mediafilter/PDFFilter.java +++ /dev/null @@ -1,137 +0,0 @@ -/** - * The contents of this file are subject to the license and copyright - * detailed in the LICENSE and NOTICE files at the root of the source - * tree and available online at - * - * http://www.dspace.org/license/ - */ -package org.dspace.app.mediafilter; - -import java.io.ByteArrayInputStream; -import java.io.ByteArrayOutputStream; -import java.io.File; -import java.io.FileInputStream; -import java.io.FileOutputStream; -import java.io.InputStream; -import java.io.OutputStreamWriter; -import java.io.Writer; - -import org.apache.logging.log4j.Logger; -import org.apache.pdfbox.pdmodel.PDDocument; -import org.apache.pdfbox.pdmodel.encryption.InvalidPasswordException; -import org.apache.pdfbox.text.PDFTextStripper; -import org.dspace.content.Item; -import org.dspace.services.ConfigurationService; -import org.dspace.services.factory.DSpaceServicesFactory; - -/* - * - * to do: helpful error messages - can't find mediafilter.cfg - can't - * instantiate filter - bitstream format doesn't exist - * - */ -public class PDFFilter extends MediaFilter { - - private static Logger log = org.apache.logging.log4j.LogManager.getLogger(PDFFilter.class); - - @Override - public String getFilteredName(String oldFilename) { - return oldFilename + ".txt"; - } - - /** - * @return String bundle name - */ - @Override - public String getBundleName() { - return "TEXT"; - } - - /** - * @return String bitstreamformat - */ - @Override - public String getFormatString() { - return "Text"; - } - - /** - * @return String description - */ - @Override - public String getDescription() { - return "Extracted text"; - } - - /** - * @param currentItem item - * @param source source input stream - * @param verbose verbose mode - * @return InputStream the resulting input stream - * @throws Exception if error - */ - @Override - public InputStream getDestinationStream(Item currentItem, InputStream source, boolean verbose) - throws Exception { - ConfigurationService configurationService - = DSpaceServicesFactory.getInstance().getConfigurationService(); - try { - boolean useTemporaryFile = configurationService.getBooleanProperty("pdffilter.largepdfs", false); - - // get input stream from bitstream - // pass to filter, get string back - PDFTextStripper pts = new PDFTextStripper(); - pts.setSortByPosition(true); - PDDocument pdfDoc = null; - Writer writer = null; - File tempTextFile = null; - ByteArrayOutputStream byteStream = null; - - if (useTemporaryFile) { - tempTextFile = File.createTempFile("dspacepdfextract" + source.hashCode(), ".txt"); - tempTextFile.deleteOnExit(); - writer = new OutputStreamWriter(new FileOutputStream(tempTextFile)); - } else { - byteStream = new ByteArrayOutputStream(); - writer = new OutputStreamWriter(byteStream); - } - - try { - pdfDoc = PDDocument.load(source); - pts.writeText(pdfDoc, writer); - } catch (InvalidPasswordException ex) { - log.error("PDF is encrypted. Cannot extract text (item: {})", - () -> currentItem.getHandle()); - return null; - } finally { - try { - if (pdfDoc != null) { - pdfDoc.close(); - } - } catch (Exception e) { - log.error("Error closing PDF file: " + e.getMessage(), e); - } - - try { - writer.close(); - } catch (Exception e) { - log.error("Error closing temporary extract file: " + e.getMessage(), e); - } - } - - if (useTemporaryFile) { - return new FileInputStream(tempTextFile); - } else { - byte[] bytes = byteStream.toByteArray(); - return new ByteArrayInputStream(bytes); - } - } catch (OutOfMemoryError oome) { - log.error("Error parsing PDF document " + oome.getMessage(), oome); - if (!configurationService.getBooleanProperty("pdffilter.skiponmemoryexception", false)) { - throw oome; - } - } - - return null; - } -} diff --git a/dspace-api/src/main/java/org/dspace/app/mediafilter/PoiWordFilter.java b/dspace-api/src/main/java/org/dspace/app/mediafilter/PoiWordFilter.java deleted file mode 100644 index 8c198c4477..0000000000 --- a/dspace-api/src/main/java/org/dspace/app/mediafilter/PoiWordFilter.java +++ /dev/null @@ -1,72 +0,0 @@ -/** - * The contents of this file are subject to the license and copyright - * detailed in the LICENSE and NOTICE files at the root of the source - * tree and available online at - * - * http://www.dspace.org/license/ - */ -package org.dspace.app.mediafilter; - -import java.io.ByteArrayInputStream; -import java.io.IOException; -import java.io.InputStream; -import java.nio.charset.StandardCharsets; - -import org.apache.poi.POITextExtractor; -import org.apache.poi.extractor.ExtractorFactory; -import org.apache.poi.openxml4j.exceptions.OpenXML4JException; -import org.apache.xmlbeans.XmlException; -import org.dspace.content.Item; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -/** - * Extract flat text from Microsoft Word documents (.doc, .docx). - */ -public class PoiWordFilter - extends MediaFilter { - private static final Logger LOG = LoggerFactory.getLogger(PoiWordFilter.class); - - @Override - public String getFilteredName(String oldFilename) { - return oldFilename + ".txt"; - } - - @Override - public String getBundleName() { - return "TEXT"; - } - - @Override - public String getFormatString() { - return "Text"; - } - - @Override - public String getDescription() { - return "Extracted text"; - } - - @Override - public InputStream getDestinationStream(Item currentItem, InputStream source, boolean verbose) - throws Exception { - String text; - try { - // get input stream from bitstream, pass to filter, get string back - POITextExtractor extractor = ExtractorFactory.createExtractor(source); - text = extractor.getText(); - } catch (IOException | OpenXML4JException | XmlException e) { - System.err.format("Invalid File Format: %s%n", e.getMessage()); - LOG.error("Unable to parse the bitstream: ", e); - throw e; - } - - // if verbose flag is set, print out extracted text to STDOUT - if (verbose) { - System.out.println(text); - } - - // return the extracted text as a stream. - return new ByteArrayInputStream(text.getBytes(StandardCharsets.UTF_8)); - } -} diff --git a/dspace-api/src/main/java/org/dspace/app/mediafilter/PowerPointFilter.java b/dspace-api/src/main/java/org/dspace/app/mediafilter/PowerPointFilter.java deleted file mode 100644 index 86b7096f68..0000000000 --- a/dspace-api/src/main/java/org/dspace/app/mediafilter/PowerPointFilter.java +++ /dev/null @@ -1,113 +0,0 @@ -/** - * The contents of this file are subject to the license and copyright - * detailed in the LICENSE and NOTICE files at the root of the source - * tree and available online at - * - * http://www.dspace.org/license/ - */ -package org.dspace.app.mediafilter; - -import java.io.ByteArrayInputStream; -import java.io.InputStream; - -import org.apache.logging.log4j.Logger; -import org.apache.poi.POITextExtractor; -import org.apache.poi.extractor.ExtractorFactory; -import org.apache.poi.hslf.extractor.PowerPointExtractor; -import org.apache.poi.xslf.extractor.XSLFPowerPointExtractor; -import org.dspace.content.Item; - -/* - * TODO: Allow user to configure extraction of only text or only notes - * - */ -public class PowerPointFilter extends MediaFilter { - - private static Logger log = org.apache.logging.log4j.LogManager.getLogger(PowerPointFilter.class); - - @Override - public String getFilteredName(String oldFilename) { - return oldFilename + ".txt"; - } - - /** - * @return String bundle name - */ - @Override - public String getBundleName() { - return "TEXT"; - } - - /** - * @return String bitstream format - * - * TODO: Check that this is correct - */ - @Override - public String getFormatString() { - return "Text"; - } - - /** - * @return String description - */ - @Override - public String getDescription() { - return "Extracted text"; - } - - /** - * @param currentItem item - * @param source source input stream - * @param verbose verbose mode - * @return InputStream the resulting input stream - * @throws Exception if error - */ - @Override - public InputStream getDestinationStream(Item currentItem, InputStream source, boolean verbose) - throws Exception { - - try { - - String extractedText = null; - new ExtractorFactory(); - POITextExtractor pptExtractor = ExtractorFactory - .createExtractor(source); - - // PowerPoint XML files and legacy format PowerPoint files - // require different classes and APIs for text extraction - - // If this is a PowerPoint XML file, extract accordingly - if (pptExtractor instanceof XSLFPowerPointExtractor) { - - // The true method arguments indicate that text from - // the slides and the notes is desired - extractedText = ((XSLFPowerPointExtractor) pptExtractor) - .getText(true, true); - } else if (pptExtractor instanceof PowerPointExtractor) { // Legacy PowerPoint files - - extractedText = ((PowerPointExtractor) pptExtractor).getText() - + " " + ((PowerPointExtractor) pptExtractor).getNotes(); - - } - if (extractedText != null) { - // if verbose flag is set, print out extracted text - // to STDOUT - if (verbose) { - System.out.println(extractedText); - } - - // generate an input stream with the extracted text - byte[] textBytes = extractedText.getBytes(); - ByteArrayInputStream bais = new ByteArrayInputStream(textBytes); - - return bais; - } - } catch (Exception e) { - log.error("Error filtering bitstream: " + e.getMessage(), e); - throw e; - } - - return null; - } -} diff --git a/dspace-api/src/main/java/org/dspace/app/mediafilter/TikaTextExtractionFilter.java b/dspace-api/src/main/java/org/dspace/app/mediafilter/TikaTextExtractionFilter.java new file mode 100644 index 0000000000..e83bf706ed --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/app/mediafilter/TikaTextExtractionFilter.java @@ -0,0 +1,183 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.mediafilter; + +import java.io.ByteArrayInputStream; +import java.io.File; +import java.io.FileInputStream; +import java.io.FileWriter; +import java.io.IOException; +import java.io.InputStream; +import java.nio.charset.StandardCharsets; + +import org.apache.commons.lang.StringUtils; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.apache.tika.Tika; +import org.apache.tika.exception.TikaException; +import org.apache.tika.metadata.Metadata; +import org.apache.tika.parser.AutoDetectParser; +import org.apache.tika.sax.BodyContentHandler; +import org.apache.tika.sax.ContentHandlerDecorator; +import org.dspace.content.Item; +import org.dspace.services.ConfigurationService; +import org.dspace.services.factory.DSpaceServicesFactory; +import org.xml.sax.SAXException; + +/** + * Text Extraction media filter which uses Apache Tika to extract text from a large number of file formats (including + * all Microsoft formats, PDF, HTML, Text, etc). For a more complete list of file formats supported by Tika see the + * Tika documentation: https://tika.apache.org/2.3.0/formats.html + */ +public class TikaTextExtractionFilter + extends MediaFilter { + private final static Logger log = LogManager.getLogger(); + + @Override + public String getFilteredName(String oldFilename) { + return oldFilename + ".txt"; + } + + @Override + public String getBundleName() { + return "TEXT"; + } + + @Override + public String getFormatString() { + return "Text"; + } + + @Override + public String getDescription() { + return "Extracted text"; + } + + @Override + public InputStream getDestinationStream(Item currentItem, InputStream source, boolean verbose) + throws Exception { + ConfigurationService configurationService = DSpaceServicesFactory.getInstance().getConfigurationService(); + boolean useTemporaryFile = configurationService.getBooleanProperty("textextractor.use-temp-file", false); + + if (useTemporaryFile) { + // Extract text out of source file using a temp file, returning results as InputStream + return extractUsingTempFile(source, verbose); + } + + // Not using temporary file. We'll use Tika's default in-memory parsing. + // Get maximum characters to extract. Default is 100,000 chars, which is also Tika's default setting. + String extractedText; + int maxChars = configurationService.getIntProperty("textextractor.max-chars", 100000); + try { + // Use Tika to extract text from input. Tika will automatically detect the file type. + Tika tika = new Tika(); + tika.setMaxStringLength(maxChars); // Tell Tika the maximum number of characters to extract + extractedText = tika.parseToString(source); + } catch (IOException e) { + System.err.format("Unable to extract text from bitstream in Item %s%n", currentItem.getID().toString()); + e.printStackTrace(); + log.error("Unable to extract text from bitstream in Item {}", currentItem.getID().toString(), e); + throw e; + } catch (OutOfMemoryError oe) { + System.err.format("OutOfMemoryError occurred when extracting text from bitstream in Item %s. " + + "You may wish to enable 'textextractor.use-temp-file'.%n", currentItem.getID().toString()); + oe.printStackTrace(); + log.error("OutOfMemoryError occurred when extracting text from bitstream in Item {}. " + + "You may wish to enable 'textextractor.use-temp-file'.", currentItem.getID().toString(), oe); + throw oe; + } + + if (StringUtils.isNotEmpty(extractedText)) { + // if verbose flag is set, print out extracted text to STDOUT + if (verbose) { + System.out.println("(Verbose mode) Extracted text:"); + System.out.println(extractedText); + } + + // return the extracted text as a UTF-8 stream. + return new ByteArrayInputStream(extractedText.getBytes(StandardCharsets.UTF_8)); + } + return null; + } + + /** + * Extracts the text out of a given source InputStream, using a temporary file. This decreases the amount of memory + * necessary for text extraction, but can be slower as it requires writing extracted text to a temporary file. + * @param source source InputStream + * @param verbose verbose mode enabled/disabled + * @return InputStream for temporary file containing extracted text + * @throws IOException + * @throws SAXException + * @throws TikaException + */ + private InputStream extractUsingTempFile(InputStream source, boolean verbose) + throws IOException, TikaException, SAXException { + File tempExtractedTextFile = File.createTempFile("dspacetextextract" + source.hashCode(), ".txt"); + + if (verbose) { + System.out.println("(Verbose mode) Extracted text was written to temporary file at " + + tempExtractedTextFile.getAbsolutePath()); + } else { + tempExtractedTextFile.deleteOnExit(); + } + + // Open temp file for writing + try (FileWriter writer = new FileWriter(tempExtractedTextFile, StandardCharsets.UTF_8)) { + // Initialize a custom ContentHandlerDecorator which is a BodyContentHandler. + // This mimics the behavior of Tika().parseToString(), which only extracts text from the body of the file. + // This custom Handler writes any extracted text to the temp file. + ContentHandlerDecorator handler = new BodyContentHandler(new ContentHandlerDecorator() { + /** + * Write all extracted characters directly to the temp file. + */ + @Override + public void characters(char[] ch, int start, int length) throws SAXException { + try { + writer.append(new String(ch), start, length); + } catch (IOException e) { + String errorMsg = String.format("Could not append to temporary file at %s " + + "when performing text extraction", + tempExtractedTextFile.getAbsolutePath()); + log.error(errorMsg, e); + throw new SAXException(errorMsg, e); + } + } + + /** + * Write all ignorable whitespace directly to the temp file. + * This mimics the behaviour of Tika().parseToString() which extracts ignorableWhitespace characters + * (like blank lines, indentations, etc.), so that we get the same extracted text either way. + */ + @Override + public void ignorableWhitespace(char[] ch, int start, int length) throws SAXException { + try { + writer.append(new String(ch), start, length); + } catch (IOException e) { + String errorMsg = String.format("Could not append to temporary file at %s " + + "when performing text extraction", + tempExtractedTextFile.getAbsolutePath()); + log.error(errorMsg, e); + throw new SAXException(errorMsg, e); + } + } + }); + + AutoDetectParser parser = new AutoDetectParser(); + Metadata metadata = new Metadata(); + // parse our source InputStream using the above custom handler + parser.parse(source, handler, metadata); + } + + // At this point, all extracted text is written to our temp file. So, return a FileInputStream for that file + return new FileInputStream(tempExtractedTextFile); + } + + + + +} diff --git a/dspace-api/src/main/java/org/dspace/app/packager/Packager.java b/dspace-api/src/main/java/org/dspace/app/packager/Packager.java index 0e985bd244..21d1562686 100644 --- a/dspace-api/src/main/java/org/dspace/app/packager/Packager.java +++ b/dspace-api/src/main/java/org/dspace/app/packager/Packager.java @@ -631,7 +631,7 @@ public class Packager { //otherwise, just disseminate a single object to a single package file dip.disseminate(context, dso, pkgParams, pkgFile); - if (pkgFile != null && pkgFile.exists()) { + if (pkgFile.exists()) { System.out.println("\nCREATED package file: " + pkgFile.getCanonicalPath()); } } diff --git a/dspace-api/src/main/java/org/dspace/app/requestitem/CollectionAdministratorsRequestItemStrategy.java b/dspace-api/src/main/java/org/dspace/app/requestitem/CollectionAdministratorsRequestItemStrategy.java new file mode 100644 index 0000000000..135406069a --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/app/requestitem/CollectionAdministratorsRequestItemStrategy.java @@ -0,0 +1,46 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ + +package org.dspace.app.requestitem; + +import java.sql.SQLException; +import java.util.ArrayList; +import java.util.List; + +import org.dspace.content.Collection; +import org.dspace.content.Item; +import org.dspace.core.Context; +import org.dspace.eperson.EPerson; +import org.springframework.lang.NonNull; + +/** + * Derive request recipients from groups of the Collection which owns an Item. + * The list will include all members of the administrators group. If the + * resulting list is empty, delegates to {@link RequestItemHelpdeskStrategy}. + * + * @author Mark H. Wood + */ +public class CollectionAdministratorsRequestItemStrategy + extends RequestItemHelpdeskStrategy { + @Override + @NonNull + public List getRequestItemAuthor(Context context, + Item item) + throws SQLException { + List recipients = new ArrayList<>(); + Collection collection = item.getOwningCollection(); + for (EPerson admin : collection.getAdministrators().getMembers()) { + recipients.add(new RequestItemAuthor(admin)); + } + if (recipients.isEmpty()) { + return super.getRequestItemAuthor(context, item); + } else { + return recipients; + } + } +} diff --git a/dspace-api/src/main/java/org/dspace/app/requestitem/CombiningRequestItemStrategy.java b/dspace-api/src/main/java/org/dspace/app/requestitem/CombiningRequestItemStrategy.java new file mode 100644 index 0000000000..8292c1a728 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/app/requestitem/CombiningRequestItemStrategy.java @@ -0,0 +1,61 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.requestitem; + +import java.sql.SQLException; +import java.util.ArrayList; +import java.util.List; + +import org.dspace.content.Item; +import org.dspace.core.Context; +import org.springframework.lang.NonNull; +import org.springframework.util.Assert; + +/** + * Assemble a list of recipients from the results of other strategies. + * The list of strategy classes is injected as the constructor argument + * {@code strategies}. + * If the strategy list is not configured, returns an empty List. + * + * @author Mark H. Wood + */ +public class CombiningRequestItemStrategy + implements RequestItemAuthorExtractor { + /** The strategies to combine. */ + private final List strategies; + + /** + * Initialize a combination of strategies. + * @param strategies the author extraction strategies to combine. + */ + public CombiningRequestItemStrategy(@NonNull List strategies) { + Assert.notNull(strategies, "Strategy list may not be null"); + this.strategies = strategies; + } + + /** + * Do not call. + * @throws IllegalArgumentException always + */ + private CombiningRequestItemStrategy() { + throw new IllegalArgumentException(); + } + + @Override + @NonNull + public List getRequestItemAuthor(Context context, Item item) + throws SQLException { + List recipients = new ArrayList<>(); + + for (RequestItemAuthorExtractor strategy : strategies) { + recipients.addAll(strategy.getRequestItemAuthor(context, item)); + } + + return recipients; + } +} diff --git a/dspace-api/src/main/java/org/dspace/app/requestitem/RequestItem.java b/dspace-api/src/main/java/org/dspace/app/requestitem/RequestItem.java index 9e675e97a7..cdefd1298c 100644 --- a/dspace-api/src/main/java/org/dspace/app/requestitem/RequestItem.java +++ b/dspace-api/src/main/java/org/dspace/app/requestitem/RequestItem.java @@ -27,7 +27,7 @@ import org.dspace.core.Context; import org.dspace.core.ReloadableEntity; /** - * Object representing an Item Request + * Object representing an Item Request. */ @Entity @Table(name = "requestitem") @@ -94,6 +94,9 @@ public class RequestItem implements ReloadableEntity { this.allfiles = allfiles; } + /** + * @return {@code true} if all of the Item's files are requested. + */ public boolean isAllfiles() { return allfiles; } @@ -102,6 +105,9 @@ public class RequestItem implements ReloadableEntity { this.reqMessage = reqMessage; } + /** + * @return a message from the requester. + */ public String getReqMessage() { return reqMessage; } @@ -110,6 +116,9 @@ public class RequestItem implements ReloadableEntity { this.reqName = reqName; } + /** + * @return Human-readable name of the user requesting access. + */ public String getReqName() { return reqName; } @@ -118,6 +127,9 @@ public class RequestItem implements ReloadableEntity { this.reqEmail = reqEmail; } + /** + * @return address of the user requesting access. + */ public String getReqEmail() { return reqEmail; } @@ -126,6 +138,9 @@ public class RequestItem implements ReloadableEntity { this.token = token; } + /** + * @return a unique request identifier which can be emailed. + */ public String getToken() { return token; } diff --git a/dspace-api/src/main/java/org/dspace/app/requestitem/RequestItemAuthor.java b/dspace-api/src/main/java/org/dspace/app/requestitem/RequestItemAuthor.java index 49e26fe00b..a189e4a5ef 100644 --- a/dspace-api/src/main/java/org/dspace/app/requestitem/RequestItemAuthor.java +++ b/dspace-api/src/main/java/org/dspace/app/requestitem/RequestItemAuthor.java @@ -11,20 +11,31 @@ import org.dspace.eperson.EPerson; /** * Simple DTO to transfer data about the corresponding author for the Request - * Copy feature + * Copy feature. * * @author Andrea Bollini */ public class RequestItemAuthor { - private String fullName; - private String email; + private final String fullName; + private final String email; + /** + * Construct an author record from given data. + * + * @param fullName the author's full name. + * @param email the author's email address. + */ public RequestItemAuthor(String fullName, String email) { super(); this.fullName = fullName; this.email = email; } + /** + * Construct an author from an EPerson's metadata. + * + * @param ePerson the EPerson. + */ public RequestItemAuthor(EPerson ePerson) { super(); this.fullName = ePerson.getFullName(); diff --git a/dspace-api/src/main/java/org/dspace/app/requestitem/RequestItemAuthorExtractor.java b/dspace-api/src/main/java/org/dspace/app/requestitem/RequestItemAuthorExtractor.java index 9b66030e90..bc97bc64bf 100644 --- a/dspace-api/src/main/java/org/dspace/app/requestitem/RequestItemAuthorExtractor.java +++ b/dspace-api/src/main/java/org/dspace/app/requestitem/RequestItemAuthorExtractor.java @@ -8,26 +8,28 @@ package org.dspace.app.requestitem; import java.sql.SQLException; +import java.util.List; import org.dspace.content.Item; import org.dspace.core.Context; +import org.springframework.lang.NonNull; /** - * Interface to abstract the strategy for select the author to contact for - * request copy + * Interface to abstract the strategy for selecting the author to contact for + * request copy. * * @author Andrea Bollini */ public interface RequestItemAuthorExtractor { - /** - * Retrieve the auhtor to contact for a request copy of the give item. + * Retrieve the author to contact for requesting a copy of the given item. * * @param context DSpace context object * @param item item to request - * @return An object containing name an email address to send the request to - * or null if no valid email address was found. + * @return Names and email addresses to send the request to. * @throws SQLException if database error */ - public RequestItemAuthor getRequestItemAuthor(Context context, Item item) throws SQLException; + @NonNull + public List getRequestItemAuthor(Context context, Item item) + throws SQLException; } diff --git a/dspace-api/src/main/java/org/dspace/app/requestitem/RequestItemEmailNotifier.java b/dspace-api/src/main/java/org/dspace/app/requestitem/RequestItemEmailNotifier.java index d72e42eac1..435fa0f9cc 100644 --- a/dspace-api/src/main/java/org/dspace/app/requestitem/RequestItemEmailNotifier.java +++ b/dspace-api/src/main/java/org/dspace/app/requestitem/RequestItemEmailNotifier.java @@ -72,28 +72,48 @@ public class RequestItemEmailNotifier { static public void sendRequest(Context context, RequestItem ri, String responseLink) throws IOException, SQLException { // Who is making this request? - RequestItemAuthor author = requestItemAuthorExtractor + List authors = requestItemAuthorExtractor .getRequestItemAuthor(context, ri.getItem()); - String authorEmail = author.getEmail(); - String authorName = author.getFullName(); // Build an email to the approver. Email email = Email.getEmail(I18nUtil.getEmailFilename(context.getCurrentLocale(), "request_item.author")); - email.addRecipient(authorEmail); + for (RequestItemAuthor author : authors) { + email.addRecipient(author.getEmail()); + } email.setReplyTo(ri.getReqEmail()); // Requester's address + email.addArgument(ri.getReqName()); // {0} Requester's name + email.addArgument(ri.getReqEmail()); // {1} Requester's address + email.addArgument(ri.isAllfiles() // {2} All bitstreams or just one? ? I18nUtil.getMessage("itemRequest.all") : ri.getBitstream().getName()); - email.addArgument(handleService.getCanonicalForm(ri.getItem().getHandle())); + + email.addArgument(handleService.getCanonicalForm(ri.getItem().getHandle())); // {3} + email.addArgument(ri.getItem().getName()); // {4} requested item's title + email.addArgument(ri.getReqMessage()); // {5} message from requester + email.addArgument(responseLink); // {6} Link back to DSpace for action - email.addArgument(authorName); // {7} corresponding author name - email.addArgument(authorEmail); // {8} corresponding author email - email.addArgument(configurationService.getProperty("dspace.name")); - email.addArgument(configurationService.getProperty("mail.helpdesk")); + + StringBuilder names = new StringBuilder(); + StringBuilder addresses = new StringBuilder(); + for (RequestItemAuthor author : authors) { + if (names.length() > 0) { + names.append("; "); + addresses.append("; "); + } + names.append(author.getFullName()); + addresses.append(author.getEmail()); + } + email.addArgument(names.toString()); // {7} corresponding author name + email.addArgument(addresses.toString()); // {8} corresponding author email + + email.addArgument(configurationService.getProperty("dspace.name")); // {9} + + email.addArgument(configurationService.getProperty("mail.helpdesk")); // {10} // Send the email. try { @@ -134,9 +154,9 @@ public class RequestItemEmailNotifier { email.setContent("body", message); email.setSubject(subject); email.addRecipient(ri.getReqEmail()); - if (ri.isAccept_request()) { - // Attach bitstreams. - try { + // Attach bitstreams. + try { + if (ri.isAccept_request()) { if (ri.isAllfiles()) { Item item = ri.getItem(); List bundles = item.getBundles("ORIGINAL"); @@ -159,11 +179,19 @@ public class RequestItemEmailNotifier { bitstream.getFormat(context).getMIMEType()); } email.send(); - } catch (MessagingException | IOException | SQLException | AuthorizeException e) { - LOG.warn(LogHelper.getHeader(context, - "error_mailing_requestItem", e.getMessage())); - throw new IOException("Reply not sent: " + e.getMessage()); + } else { + boolean sendRejectEmail = configurationService + .getBooleanProperty("request.item.reject.email", true); + // Not all sites want the "refusal" to be sent back to the requester via + // email. However, by default, the rejection email is sent back. + if (sendRejectEmail) { + email.send(); + } } + } catch (MessagingException | IOException | SQLException | AuthorizeException e) { + LOG.warn(LogHelper.getHeader(context, + "error_mailing_requestItem", e.getMessage())); + throw new IOException("Reply not sent: " + e.getMessage()); } LOG.info(LogHelper.getHeader(context, "sent_attach_requestItem", "token={}"), ri.getToken()); diff --git a/dspace-api/src/main/java/org/dspace/app/requestitem/RequestItemHelpdeskStrategy.java b/dspace-api/src/main/java/org/dspace/app/requestitem/RequestItemHelpdeskStrategy.java index 7b63d3ea8d..f440ba380a 100644 --- a/dspace-api/src/main/java/org/dspace/app/requestitem/RequestItemHelpdeskStrategy.java +++ b/dspace-api/src/main/java/org/dspace/app/requestitem/RequestItemHelpdeskStrategy.java @@ -8,6 +8,8 @@ package org.dspace.app.requestitem; import java.sql.SQLException; +import java.util.ArrayList; +import java.util.List; import org.apache.commons.lang3.StringUtils; import org.dspace.content.Item; @@ -16,11 +18,11 @@ import org.dspace.core.I18nUtil; import org.dspace.eperson.EPerson; import org.dspace.eperson.service.EPersonService; import org.dspace.services.ConfigurationService; -import org.dspace.services.factory.DSpaceServicesFactory; import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.lang.NonNull; /** - * RequestItem strategy to allow DSpace support team's helpdesk to receive requestItem request + * RequestItem strategy to allow DSpace support team's helpdesk to receive requestItem request. * With this enabled, then the Item author/submitter doesn't receive the request, but the helpdesk instead does. * * Failover to the RequestItemSubmitterStrategy, which means the submitter would get the request if there is no @@ -33,19 +35,24 @@ public class RequestItemHelpdeskStrategy extends RequestItemSubmitterStrategy { @Autowired(required = true) protected EPersonService ePersonService; + @Autowired(required = true) + private ConfigurationService configuration; + public RequestItemHelpdeskStrategy() { } @Override - public RequestItemAuthor getRequestItemAuthor(Context context, Item item) throws SQLException { - ConfigurationService configurationService - = DSpaceServicesFactory.getInstance().getConfigurationService(); - boolean helpdeskOverridesSubmitter = configurationService + @NonNull + public List getRequestItemAuthor(Context context, Item item) + throws SQLException { + boolean helpdeskOverridesSubmitter = configuration .getBooleanProperty("request.item.helpdesk.override", false); - String helpDeskEmail = configurationService.getProperty("mail.helpdesk"); + String helpDeskEmail = configuration.getProperty("mail.helpdesk"); if (helpdeskOverridesSubmitter && StringUtils.isNotBlank(helpDeskEmail)) { - return getHelpDeskPerson(context, helpDeskEmail); + List authors = new ArrayList<>(1); + authors.add(getHelpDeskPerson(context, helpDeskEmail)); + return authors; } else { //Fallback to default logic (author of Item) if helpdesk isn't fully enabled or setup return super.getRequestItemAuthor(context, item); diff --git a/dspace-api/src/main/java/org/dspace/app/requestitem/RequestItemMetadataStrategy.java b/dspace-api/src/main/java/org/dspace/app/requestitem/RequestItemMetadataStrategy.java index 9838e58697..4372ab9b09 100644 --- a/dspace-api/src/main/java/org/dspace/app/requestitem/RequestItemMetadataStrategy.java +++ b/dspace-api/src/main/java/org/dspace/app/requestitem/RequestItemMetadataStrategy.java @@ -8,6 +8,8 @@ package org.dspace.app.requestitem; import java.sql.SQLException; +import java.util.ArrayList; +import java.util.Collections; import java.util.List; import org.apache.commons.lang3.StringUtils; @@ -16,12 +18,13 @@ import org.dspace.content.MetadataValue; import org.dspace.content.service.ItemService; import org.dspace.core.Context; import org.dspace.core.I18nUtil; -import org.dspace.services.factory.DSpaceServicesFactory; +import org.dspace.services.ConfigurationService; import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.lang.NonNull; /** * Try to look to an item metadata for the corresponding author name and email. - * Failover to the RequestItemSubmitterStrategy + * Failover to the RequestItemSubmitterStrategy. * * @author Andrea Bollini */ @@ -30,6 +33,9 @@ public class RequestItemMetadataStrategy extends RequestItemSubmitterStrategy { protected String emailMetadata; protected String fullNameMetadata; + @Autowired(required = true) + protected ConfigurationService configurationService; + @Autowired(required = true) protected ItemService itemService; @@ -37,59 +43,72 @@ public class RequestItemMetadataStrategy extends RequestItemSubmitterStrategy { } @Override - public RequestItemAuthor getRequestItemAuthor(Context context, Item item) + @NonNull + public List getRequestItemAuthor(Context context, Item item) throws SQLException { - RequestItemAuthor author = null; + List authors; if (emailMetadata != null) { List vals = itemService.getMetadataByMetadataString(item, emailMetadata); - if (vals.size() > 0) { - String email = vals.iterator().next().getValue(); - String fullname = null; - if (fullNameMetadata != null) { - List nameVals = itemService.getMetadataByMetadataString(item, fullNameMetadata); - if (nameVals.size() > 0) { - fullname = nameVals.iterator().next().getValue(); + List nameVals; + if (null != fullNameMetadata) { + nameVals = itemService.getMetadataByMetadataString(item, fullNameMetadata); + } else { + nameVals = Collections.EMPTY_LIST; + } + boolean useNames = vals.size() == nameVals.size(); + if (!vals.isEmpty()) { + authors = new ArrayList<>(vals.size()); + for (int authorIndex = 0; authorIndex < vals.size(); authorIndex++) { + String email = vals.get(authorIndex).getValue(); + String fullname = null; + if (useNames) { + fullname = nameVals.get(authorIndex).getValue(); } + + if (StringUtils.isBlank(fullname)) { + fullname = I18nUtil.getMessage( + "org.dspace.app.requestitem.RequestItemMetadataStrategy.unnamed", + context); + } + RequestItemAuthor author = new RequestItemAuthor( + fullname, email); + authors.add(author); } - if (StringUtils.isBlank(fullname)) { - fullname = I18nUtil - .getMessage( - "org.dspace.app.requestitem.RequestItemMetadataStrategy.unnamed", - context); - } - author = new RequestItemAuthor(fullname, email); - return author; + return authors; + } else { + return Collections.EMPTY_LIST; } } else { // Uses the basic strategy to look for the original submitter - author = super.getRequestItemAuthor(context, item); - // Is the author or his email null, so get the help desk or admin name and email - if (null == author || null == author.getEmail()) { - String email = null; - String name = null; + authors = super.getRequestItemAuthor(context, item); + + // Remove from the list authors that do not have email addresses. + for (RequestItemAuthor author : authors) { + if (null == author.getEmail()) { + authors.remove(author); + } + } + + if (authors.isEmpty()) { // No author email addresses! Fall back //First get help desk name and email - email = DSpaceServicesFactory.getInstance() - .getConfigurationService().getProperty("mail.helpdesk"); - name = DSpaceServicesFactory.getInstance() - .getConfigurationService().getProperty("mail.helpdesk.name"); + String email = configurationService.getProperty("mail.helpdesk"); + String name = configurationService.getProperty("mail.helpdesk.name"); // If help desk mail is null get the mail and name of admin if (email == null) { - email = DSpaceServicesFactory.getInstance() - .getConfigurationService().getProperty("mail.admin"); - name = DSpaceServicesFactory.getInstance() - .getConfigurationService().getProperty("mail.admin.name"); + email = configurationService.getProperty("mail.admin"); + name = configurationService.getProperty("mail.admin.name"); } - author = new RequestItemAuthor(name, email); + authors.add(new RequestItemAuthor(name, email)); } + return authors; } - return author; } - public void setEmailMetadata(String emailMetadata) { + public void setEmailMetadata(@NonNull String emailMetadata) { this.emailMetadata = emailMetadata; } - public void setFullNameMetadata(String fullNameMetadata) { + public void setFullNameMetadata(@NonNull String fullNameMetadata) { this.fullNameMetadata = fullNameMetadata; } diff --git a/dspace-api/src/main/java/org/dspace/app/requestitem/RequestItemSubmitterStrategy.java b/dspace-api/src/main/java/org/dspace/app/requestitem/RequestItemSubmitterStrategy.java index 2708c24ba9..dcc1a3e80e 100644 --- a/dspace-api/src/main/java/org/dspace/app/requestitem/RequestItemSubmitterStrategy.java +++ b/dspace-api/src/main/java/org/dspace/app/requestitem/RequestItemSubmitterStrategy.java @@ -8,10 +8,13 @@ package org.dspace.app.requestitem; import java.sql.SQLException; +import java.util.ArrayList; +import java.util.List; import org.dspace.content.Item; import org.dspace.core.Context; import org.dspace.eperson.EPerson; +import org.springframework.lang.NonNull; /** * Basic strategy that looks to the original submitter. @@ -24,21 +27,23 @@ public class RequestItemSubmitterStrategy implements RequestItemAuthorExtractor } /** - * Returns the submitter of an Item as RequestItemAuthor or null if the - * Submitter is deleted. + * Returns the submitter of an Item as RequestItemAuthor or an empty List if + * the Submitter is deleted. * - * @return The submitter of the item or null if the submitter is deleted + * @return The submitter of the item or empty List if the submitter is deleted * @throws SQLException if database error */ @Override - public RequestItemAuthor getRequestItemAuthor(Context context, Item item) + @NonNull + public List getRequestItemAuthor(Context context, Item item) throws SQLException { EPerson submitter = item.getSubmitter(); - RequestItemAuthor author = null; + List authors = new ArrayList<>(1); if (null != submitter) { - author = new RequestItemAuthor( - submitter.getFullName(), submitter.getEmail()); + RequestItemAuthor author = new RequestItemAuthor( + submitter.getFullName(), submitter.getEmail()); + authors.add(author); } - return author; + return authors; } } diff --git a/dspace-api/src/main/java/org/dspace/app/sherpa/SHERPAService.java b/dspace-api/src/main/java/org/dspace/app/sherpa/SHERPAService.java index 87198fe172..ead725e842 100644 --- a/dspace-api/src/main/java/org/dspace/app/sherpa/SHERPAService.java +++ b/dspace-api/src/main/java/org/dspace/app/sherpa/SHERPAService.java @@ -31,6 +31,7 @@ import org.dspace.app.sherpa.v2.SHERPAResponse; import org.dspace.app.sherpa.v2.SHERPAUtils; import org.dspace.services.ConfigurationService; import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.cache.annotation.Cacheable; /** * SHERPAService is responsible for making the HTTP call to the SHERPA v2 API @@ -43,6 +44,7 @@ import org.springframework.beans.factory.annotation.Autowired; * @author Kim Shepherd */ public class SHERPAService { + private CloseableHttpClient client = null; private int maxNumberOfTries; @@ -91,6 +93,7 @@ public class SHERPAService { * @param query ISSN string to pass in an "issn equals" API query * @return SHERPAResponse containing an error or journal policies */ + @Cacheable(key = "#query", cacheNames = "sherpa.searchByJournalISSN") public SHERPAResponse searchByJournalISSN(String query) { return performRequest("publication", "issn", "equals", query, 0, 1); } @@ -413,4 +416,5 @@ public class SHERPAService { public void setTimeout(int timeout) { this.timeout = timeout; } -} + +} \ No newline at end of file diff --git a/dspace-api/src/main/java/org/dspace/app/sherpa/cache/SherpaCacheEvictService.java b/dspace-api/src/main/java/org/dspace/app/sherpa/cache/SherpaCacheEvictService.java new file mode 100644 index 0000000000..94ecfb5e21 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/app/sherpa/cache/SherpaCacheEvictService.java @@ -0,0 +1,71 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.sherpa.cache; + +import java.util.Objects; +import java.util.Set; + +import org.dspace.app.sherpa.submit.SHERPASubmitService; +import org.dspace.content.Item; +import org.dspace.core.Context; +import org.springframework.cache.CacheManager; + +/** + * This service is responsible to deal with the SherpaService cache. + * + * @author Mykhaylo Boychuk (mykhaylo.boychuk at 4science.com) + */ +public class SherpaCacheEvictService { + + // The cache that is managed by this service. + static final String CACHE_NAME = "sherpa.searchByJournalISSN"; + + private CacheManager cacheManager; + + private SHERPASubmitService sherpaSubmitService; + + /** + * Remove immediately from the cache all the response that are related to a specific item + * extracting the ISSNs from the item + * + * @param context The DSpace context + * @param item an Item + */ + public void evictCacheValues(Context context, Item item) { + Set ISSNs = sherpaSubmitService.getISSNs(context, item); + for (String issn : ISSNs) { + Objects.requireNonNull(cacheManager.getCache(CACHE_NAME)).evictIfPresent(issn); + } + } + + /** + * Invalidate immediately the Sherpa cache + */ + public void evictAllCacheValues() { + Objects.requireNonNull(cacheManager.getCache(CACHE_NAME)).invalidate(); + } + + /** + * Set the reference to the cacheManager + * + * @param cacheManager + */ + public void setCacheManager(CacheManager cacheManager) { + this.cacheManager = cacheManager; + } + + /** + * Set the reference to the SherpaSubmitService + * + * @param sherpaSubmitService + */ + public void setSherpaSubmitService(SHERPASubmitService sherpaSubmitService) { + this.sherpaSubmitService = sherpaSubmitService; + } + +} \ No newline at end of file diff --git a/dspace-api/src/main/java/org/dspace/app/sherpa/cache/SherpaCacheLogger.java b/dspace-api/src/main/java/org/dspace/app/sherpa/cache/SherpaCacheLogger.java new file mode 100644 index 0000000000..e84fb7775a --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/app/sherpa/cache/SherpaCacheLogger.java @@ -0,0 +1,34 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.sherpa.cache; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.ehcache.event.CacheEvent; +import org.ehcache.event.CacheEventListener; + +/** + * This is a EHCache listner responsible for logging sherpa cache events. It is + * bound to the sherpa cache via the dspace/config/ehcache.xml file. We need a + * dedicated Logger for each cache as the CacheEvent doesn't include details + * about where the event occur + * + * @author Mykhaylo Boychuk (mykhaylo.boychuk at 4science.com) + * + */ +public class SherpaCacheLogger implements CacheEventListener { + + private static final Logger log = LogManager.getLogger(SherpaCacheLogger.class); + + @Override + public void onEvent(CacheEvent cacheEvent) { + log.debug("Sherpa Cache Event Type: {} | Key: {} ", + cacheEvent.getType(), cacheEvent.getKey()); + } + +} \ No newline at end of file diff --git a/dspace-api/src/main/java/org/dspace/app/sherpa/submit/SHERPASubmitService.java b/dspace-api/src/main/java/org/dspace/app/sherpa/submit/SHERPASubmitService.java index f34e2b6d57..b795c8a2b2 100644 --- a/dspace-api/src/main/java/org/dspace/app/sherpa/submit/SHERPASubmitService.java +++ b/dspace-api/src/main/java/org/dspace/app/sherpa/submit/SHERPASubmitService.java @@ -9,7 +9,6 @@ package org.dspace.app.sherpa.submit; import java.util.Iterator; import java.util.LinkedHashSet; -import java.util.LinkedList; import java.util.List; import java.util.Set; @@ -63,19 +62,19 @@ public class SHERPASubmitService { * issnItemExtractor(s) in the SHERPA spring configuration. * The ISSNs are not validated with a regular expression or other rules - any values * extracted will be included in API queries. + * Return the first not empty response from Sherpa * @see "dspace-dspace-addon-sherpa-configuration-services.xml" * @param context DSpace context * @param item DSpace item containing ISSNs to be checked * @return SHERPA v2 API response (policy data) */ - public List searchRelatedJournals(Context context, Item item) { + public SHERPAResponse searchRelatedJournals(Context context, Item item) { Set issns = getISSNs(context, item); if (issns == null || issns.size() == 0) { return null; } else { // SHERPA v2 API no longer supports "OR'd" ISSN search, perform individual searches instead Iterator issnIterator = issns.iterator(); - List responses = new LinkedList<>(); while (issnIterator.hasNext()) { String issn = issnIterator.next(); SHERPAResponse response = sherpaService.searchByJournalISSN(issn); @@ -83,14 +82,13 @@ public class SHERPASubmitService { // Continue with loop log.warn("Failed to look up SHERPA ROMeO result for ISSN: " + issn + ": " + response.getMessage()); + return response; + } else if (!response.getJournals().isEmpty()) { + // return this response, if it is not empty + return response; } - // Store this response, even if it has an error (useful for UI reporting) - responses.add(response); } - if (responses.isEmpty()) { - responses.add(new SHERPAResponse("SHERPA ROMeO lookup failed")); - } - return responses; + return new SHERPAResponse(); } } diff --git a/dspace-api/src/main/java/org/dspace/app/sherpa/v2/SHERPAEmbargo.java b/dspace-api/src/main/java/org/dspace/app/sherpa/v2/SHERPAEmbargo.java new file mode 100644 index 0000000000..c6a0bb7942 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/app/sherpa/v2/SHERPAEmbargo.java @@ -0,0 +1,45 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.sherpa.v2; + +import java.io.Serializable; + +/** + * Model class for the Embargo of SHERPAv2 API (JSON) + * + * @author Mykhaylo Boychuk (mykhaylo.boychuk@4science.com) + */ +public class SHERPAEmbargo implements Serializable { + + private static final long serialVersionUID = 6140668058547523656L; + + private int amount; + private String units; + + public SHERPAEmbargo(int amount, String units) { + this.amount = amount; + this.units = units; + } + + public int getAmount() { + return amount; + } + + public void setAmount(int amount) { + this.amount = amount; + } + + public String getUnits() { + return units; + } + + public void setUnits(String units) { + this.units = units; + } + +} \ No newline at end of file diff --git a/dspace-api/src/main/java/org/dspace/app/sherpa/v2/SHERPAJournal.java b/dspace-api/src/main/java/org/dspace/app/sherpa/v2/SHERPAJournal.java index b668dbd927..8728eb1a79 100644 --- a/dspace-api/src/main/java/org/dspace/app/sherpa/v2/SHERPAJournal.java +++ b/dspace-api/src/main/java/org/dspace/app/sherpa/v2/SHERPAJournal.java @@ -7,6 +7,7 @@ */ package org.dspace.app.sherpa.v2; +import java.io.Serializable; import java.util.List; /** @@ -21,7 +22,7 @@ import java.util.List; * * @author Kim Shepherd */ -public class SHERPAJournal { +public class SHERPAJournal implements Serializable { private List titles; private String url; diff --git a/dspace-api/src/main/java/org/dspace/app/sherpa/v2/SHERPAPermittedVersion.java b/dspace-api/src/main/java/org/dspace/app/sherpa/v2/SHERPAPermittedVersion.java index 3a810c8e9e..85d5f8960a 100644 --- a/dspace-api/src/main/java/org/dspace/app/sherpa/v2/SHERPAPermittedVersion.java +++ b/dspace-api/src/main/java/org/dspace/app/sherpa/v2/SHERPAPermittedVersion.java @@ -7,6 +7,7 @@ */ package org.dspace.app.sherpa.v2; +import java.io.Serializable; import java.util.List; /** @@ -28,7 +29,9 @@ import java.util.List; * * @see SHERPAPublisherPolicy */ -public class SHERPAPermittedVersion { +public class SHERPAPermittedVersion implements Serializable { + + private static final long serialVersionUID = 4992181606327727442L; // Version (submitted, accepted, published) private String articleVersion; @@ -47,11 +50,6 @@ public class SHERPAPermittedVersion { // Embargo private SHERPAEmbargo embargo; - protected static class SHERPAEmbargo { - String units; - int amount; - } - public String getArticleVersion() { return articleVersion; } diff --git a/dspace-api/src/main/java/org/dspace/app/sherpa/v2/SHERPAPublisher.java b/dspace-api/src/main/java/org/dspace/app/sherpa/v2/SHERPAPublisher.java index 0097ec2fb3..ee1491ed8b 100644 --- a/dspace-api/src/main/java/org/dspace/app/sherpa/v2/SHERPAPublisher.java +++ b/dspace-api/src/main/java/org/dspace/app/sherpa/v2/SHERPAPublisher.java @@ -7,6 +7,8 @@ */ package org.dspace.app.sherpa.v2; +import java.io.Serializable; + /** * Plain java representation of a SHERPA Publisher object, based on SHERPA API v2 responses. * @@ -18,7 +20,7 @@ package org.dspace.app.sherpa.v2; * @see SHERPAJournal * @see SHERPAPublisherResponse */ -public class SHERPAPublisher { +public class SHERPAPublisher implements Serializable { private String name = null; private String relationshipType; private String country; diff --git a/dspace-api/src/main/java/org/dspace/app/sherpa/v2/SHERPAPublisherPolicy.java b/dspace-api/src/main/java/org/dspace/app/sherpa/v2/SHERPAPublisherPolicy.java index 2a04564e28..3e76c5cd37 100644 --- a/dspace-api/src/main/java/org/dspace/app/sherpa/v2/SHERPAPublisherPolicy.java +++ b/dspace-api/src/main/java/org/dspace/app/sherpa/v2/SHERPAPublisherPolicy.java @@ -7,6 +7,7 @@ */ package org.dspace.app.sherpa.v2; +import java.io.Serializable; import java.util.List; import java.util.Map; @@ -22,7 +23,7 @@ import java.util.Map; * @see SHERPAJournal * @see SHERPAPermittedVersion */ -public class SHERPAPublisherPolicy { +public class SHERPAPublisherPolicy implements Serializable { private int id; private boolean openAccessPermitted; diff --git a/dspace-api/src/main/java/org/dspace/app/sherpa/v2/SHERPAResponse.java b/dspace-api/src/main/java/org/dspace/app/sherpa/v2/SHERPAResponse.java index a40814bafe..83dd1e0d3c 100644 --- a/dspace-api/src/main/java/org/dspace/app/sherpa/v2/SHERPAResponse.java +++ b/dspace-api/src/main/java/org/dspace/app/sherpa/v2/SHERPAResponse.java @@ -10,12 +10,15 @@ package org.dspace.app.sherpa.v2; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; +import java.io.Serializable; import java.nio.charset.StandardCharsets; import java.util.ArrayList; +import java.util.Date; import java.util.List; import java.util.Map; import java.util.TreeMap; +import com.fasterxml.jackson.annotation.JsonIgnore; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.json.JSONArray; @@ -33,7 +36,10 @@ import org.json.JSONTokener; * @author Kim Shepherd * */ -public class SHERPAResponse { +public class SHERPAResponse implements Serializable { + + private static final long serialVersionUID = 2732963970169240597L; + // Is this response to be treated as an error? private boolean error; @@ -52,6 +58,9 @@ public class SHERPAResponse { // SHERPA URI (the human page version of this API response) private String uri; + @JsonIgnore + private Date retrievalTime = new Date(); + // Format enum - currently only JSON is supported public enum SHERPAFormat { JSON, XML @@ -71,6 +80,11 @@ public class SHERPAResponse { } } + /** + * Create an empty SHERPAResponse representation + */ + public SHERPAResponse() {} + /** * Parse the SHERPA v2 API JSON and construct Romeo policy data for display * This method does not return a value, but rather populates the metadata and journals objects @@ -479,6 +493,12 @@ public class SHERPAResponse { } permittedVersion.setLicenses(sherpaLicenses); + if (permitted.has("embargo")) { + JSONObject embargo = permitted.getJSONObject("embargo"); + SHERPAEmbargo SHERPAEmbargo = new SHERPAEmbargo(embargo.getInt("amount"), embargo.getString("units")); + permittedVersion.setEmbargo(SHERPAEmbargo); + } + return permittedVersion; } @@ -542,4 +562,8 @@ public class SHERPAResponse { public SHERPASystemMetadata getMetadata() { return metadata; } + + public Date getRetrievalTime() { + return retrievalTime; + } } diff --git a/dspace-api/src/main/java/org/dspace/app/sherpa/v2/SHERPASystemMetadata.java b/dspace-api/src/main/java/org/dspace/app/sherpa/v2/SHERPASystemMetadata.java index 2a807940bb..65b07c1811 100644 --- a/dspace-api/src/main/java/org/dspace/app/sherpa/v2/SHERPASystemMetadata.java +++ b/dspace-api/src/main/java/org/dspace/app/sherpa/v2/SHERPASystemMetadata.java @@ -7,6 +7,8 @@ */ package org.dspace.app.sherpa.v2; +import java.io.Serializable; + /** * Plain java representation of a SHERPA System Metadata object, based on SHERPA API v2 responses. * @@ -18,7 +20,7 @@ package org.dspace.app.sherpa.v2; * * @author Kim Shepherd */ -public class SHERPASystemMetadata { +public class SHERPASystemMetadata implements Serializable { private int id; private String uri; diff --git a/dspace-api/src/main/java/org/dspace/app/solrdatabaseresync/SolrDatabaseResyncCli.java b/dspace-api/src/main/java/org/dspace/app/solrdatabaseresync/SolrDatabaseResyncCli.java new file mode 100644 index 0000000000..f901c9ca56 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/app/solrdatabaseresync/SolrDatabaseResyncCli.java @@ -0,0 +1,175 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.solrdatabaseresync; + +import static org.dspace.discovery.indexobject.ItemIndexFactoryImpl.STATUS_FIELD; +import static org.dspace.discovery.indexobject.ItemIndexFactoryImpl.STATUS_FIELD_PREDB; + +import java.io.IOException; +import java.sql.SQLException; +import java.util.Calendar; +import java.util.HashMap; +import java.util.Map; +import java.util.Optional; + +import org.apache.commons.cli.ParseException; +import org.apache.logging.log4j.Logger; +import org.apache.solr.client.solrj.SolrQuery; +import org.apache.solr.client.solrj.SolrServerException; +import org.apache.solr.client.solrj.response.QueryResponse; +import org.apache.solr.common.SolrDocument; +import org.dspace.core.Context; +import org.dspace.discovery.IndexableObject; +import org.dspace.discovery.IndexingService; +import org.dspace.discovery.SearchServiceException; +import org.dspace.discovery.SearchUtils; +import org.dspace.discovery.SolrSearchCore; +import org.dspace.discovery.indexobject.IndexableItem; +import org.dspace.discovery.indexobject.factory.IndexObjectFactoryFactory; +import org.dspace.scripts.DSpaceRunnable; +import org.dspace.services.ConfigurationService; +import org.dspace.services.factory.DSpaceServicesFactory; +import org.dspace.util.SolrUtils; +import org.dspace.utils.DSpace; + +/** + * {@link DSpaceRunnable} implementation to update solr items with "predb" status to either: + * - Delete them from solr if they're not present in the database + * - Remove their status if they're present in the database + */ +public class SolrDatabaseResyncCli extends DSpaceRunnable { + /* Log4j logger */ + private static final Logger log = org.apache.logging.log4j.LogManager.getLogger(SolrDatabaseResyncCli.class); + + public static final String TIME_UNTIL_REINDEX_PROPERTY = "solr-database-resync.time-until-reindex"; + + private IndexingService indexingService; + private SolrSearchCore solrSearchCore; + private IndexObjectFactoryFactory indexObjectServiceFactory; + private ConfigurationService configurationService; + + private int timeUntilReindex = 0; + private String maxTime; + + @Override + public SolrDatabaseResyncCliScriptConfiguration getScriptConfiguration() { + return new DSpace().getServiceManager() + .getServiceByName("solr-database-resync", SolrDatabaseResyncCliScriptConfiguration.class); + } + + public static void runScheduled() throws Exception { + SolrDatabaseResyncCli script = new SolrDatabaseResyncCli(); + script.setup(); + script.internalRun(); + } + + @Override + public void setup() throws ParseException { + indexingService = DSpaceServicesFactory.getInstance().getServiceManager() + .getServiceByName(IndexingService.class.getName(), IndexingService.class); + solrSearchCore = DSpaceServicesFactory.getInstance().getServiceManager() + .getServicesByType(SolrSearchCore.class).get(0); + indexObjectServiceFactory = IndexObjectFactoryFactory.getInstance(); + configurationService = DSpaceServicesFactory.getInstance().getConfigurationService(); + } + + @Override + public void internalRun() throws Exception { + logInfoAndOut("Starting Item resync of Solr and Database..."); + + timeUntilReindex = getTimeUntilReindex(); + maxTime = getMaxTime(); + + Context context = new Context(); + + try { + context.turnOffAuthorisationSystem(); + performStatusUpdate(context); + } finally { + context.restoreAuthSystemState(); + context.complete(); + } + } + + private void performStatusUpdate(Context context) throws SearchServiceException, SolrServerException, IOException { + SolrQuery solrQuery = new SolrQuery(); + solrQuery.setQuery(STATUS_FIELD + ":" + STATUS_FIELD_PREDB); + solrQuery.addFilterQuery(SearchUtils.RESOURCE_TYPE_FIELD + ":" + IndexableItem.TYPE); + String dateRangeFilter = SearchUtils.LAST_INDEXED_FIELD + ":[* TO " + maxTime + "]"; + logDebugAndOut("Date range filter used; " + dateRangeFilter); + solrQuery.addFilterQuery(dateRangeFilter); + solrQuery.addField(SearchUtils.RESOURCE_ID_FIELD); + solrQuery.addField(SearchUtils.RESOURCE_UNIQUE_ID); + QueryResponse response = solrSearchCore.getSolr().query(solrQuery, solrSearchCore.REQUEST_METHOD); + + if (response != null) { + logInfoAndOut(response.getResults().size() + " items found to process"); + + for (SolrDocument doc : response.getResults()) { + String uuid = (String) doc.getFirstValue(SearchUtils.RESOURCE_ID_FIELD); + String uniqueId = (String) doc.getFirstValue(SearchUtils.RESOURCE_UNIQUE_ID); + logDebugAndOut("Processing item with UUID: " + uuid); + + Optional indexableObject = Optional.empty(); + try { + indexableObject = indexObjectServiceFactory + .getIndexableObjectFactory(uniqueId).findIndexableObject(context, uuid); + } catch (SQLException e) { + log.warn("An exception occurred when attempting to retrieve item with UUID \"" + uuid + + "\" from the database, removing related solr document", e); + } + + try { + if (indexableObject.isPresent()) { + logDebugAndOut("Item exists in DB, updating solr document"); + updateItem(context, indexableObject.get()); + } else { + logDebugAndOut("Item doesn't exist in DB, removing solr document"); + removeItem(context, uniqueId); + } + } catch (SQLException | IOException e) { + log.error(e.getMessage(), e); + } + } + } + + indexingService.commit(); + } + + private void updateItem(Context context, IndexableObject indexableObject) throws SolrServerException, IOException { + Map fieldModifier = new HashMap<>(1); + fieldModifier.put("remove", STATUS_FIELD_PREDB); + indexingService.atomicUpdate(context, indexableObject.getUniqueIndexID(), STATUS_FIELD, fieldModifier); + } + + private void removeItem(Context context, String uniqueId) throws IOException, SQLException { + indexingService.unIndexContent(context, uniqueId); + } + + private String getMaxTime() { + Calendar cal = Calendar.getInstance(); + if (timeUntilReindex > 0) { + cal.add(Calendar.MILLISECOND, -timeUntilReindex); + } + return SolrUtils.getDateFormatter().format(cal.getTime()); + } + + private int getTimeUntilReindex() { + return configurationService.getIntProperty(TIME_UNTIL_REINDEX_PROPERTY, 0); + } + + private void logInfoAndOut(String message) { + log.info(message); + System.out.println(message); + } + + private void logDebugAndOut(String message) { + log.debug(message); + System.out.println(message); + } +} diff --git a/dspace-api/src/main/java/org/dspace/app/solrdatabaseresync/SolrDatabaseResyncCliScriptConfiguration.java b/dspace-api/src/main/java/org/dspace/app/solrdatabaseresync/SolrDatabaseResyncCliScriptConfiguration.java new file mode 100644 index 0000000000..b238ccf061 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/app/solrdatabaseresync/SolrDatabaseResyncCliScriptConfiguration.java @@ -0,0 +1,42 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.solrdatabaseresync; + +import org.apache.commons.cli.Options; +import org.dspace.core.Context; +import org.dspace.scripts.configuration.ScriptConfiguration; + +/** + * The {@link ScriptConfiguration} for the {@link SolrDatabaseResyncCli} script. + */ +public class SolrDatabaseResyncCliScriptConfiguration extends ScriptConfiguration { + private Class dspaceRunnableClass; + + @Override + public Class getDspaceRunnableClass() { + return dspaceRunnableClass; + } + + @Override + public void setDspaceRunnableClass(Class dspaceRunnableClass) { + this.dspaceRunnableClass = dspaceRunnableClass; + } + + @Override + public boolean isAllowedToExecute(Context context) { + return true; + } + + @Override + public Options getOptions() { + if (options == null) { + options = new Options(); + } + return options; + } +} diff --git a/dspace-api/src/main/java/org/dspace/app/statistics/LogAnalyser.java b/dspace-api/src/main/java/org/dspace/app/statistics/LogAnalyser.java index 264fb1b317..2e4ed69b26 100644 --- a/dspace-api/src/main/java/org/dspace/app/statistics/LogAnalyser.java +++ b/dspace-api/src/main/java/org/dspace/app/statistics/LogAnalyser.java @@ -29,6 +29,10 @@ import java.util.TimeZone; import java.util.regex.Matcher; import java.util.regex.Pattern; +import org.apache.commons.cli.CommandLine; +import org.apache.commons.cli.DefaultParser; +import org.apache.commons.cli.Option; +import org.apache.commons.cli.Options; import org.apache.commons.lang3.StringUtils; import org.dspace.core.Context; import org.dspace.core.LogHelper; @@ -44,6 +48,7 @@ import org.dspace.services.factory.DSpaceServicesFactory; * files. Most input can be configured; use the -help flag for a full list * of usage information. * + *

* The output of this file is plain text and forms an "aggregation" file which * can then be used for display purposes using the related ReportGenerator * class. @@ -167,7 +172,7 @@ public class LogAnalyser { /** * the average number of views per item */ - private static int views = 0; + private static long views = 0; /////////////////////// // regular expressions @@ -236,12 +241,12 @@ public class LogAnalyser { /** * pattern to match commented out lines from the config file */ - private static final Pattern comment = Pattern.compile("^#"); + private static final Pattern COMMENT = Pattern.compile("^#"); /** * pattern to match genuine lines from the config file */ - private static final Pattern real = Pattern.compile("^(.+)=(.+)"); + private static final Pattern REAL = Pattern.compile("^(.+)=(.+)"); /** * pattern to match all search types @@ -337,44 +342,73 @@ public class LogAnalyser { Date myEndDate = null; boolean myLookUp = false; - // read in our command line options - for (int i = 0; i < argv.length; i++) { - if (argv[i].equals("-log")) { - myLogDir = argv[i + 1]; - } + // Define command line options. + Options options = new Options(); + Option option; - if (argv[i].equals("-file")) { - myFileTemplate = argv[i + 1]; - } + option = Option.builder().longOpt("log").hasArg().build(); + options.addOption(option); - if (argv[i].equals("-cfg")) { - myConfigFile = argv[i + 1]; - } + option = Option.builder().longOpt("file").hasArg().build(); + options.addOption(option); - if (argv[i].equals("-out")) { - myOutFile = argv[i + 1]; - } + option = Option.builder().longOpt("cfg").hasArg().build(); + options.addOption(option); - if (argv[i].equals("-help")) { - LogAnalyser.usage(); - System.exit(0); - } + option = Option.builder().longOpt("out").hasArg().build(); + options.addOption(option); - if (argv[i].equals("-start")) { - myStartDate = parseDate(argv[i + 1]); - } + option = Option.builder().longOpt("help").build(); + options.addOption(option); - if (argv[i].equals("-end")) { - myEndDate = parseDate(argv[i + 1]); - } + option = Option.builder().longOpt("start").hasArg().build(); + options.addOption(option); - if (argv[i].equals("-lookup")) { - myLookUp = true; - } + option = Option.builder().longOpt("end").hasArg().build(); + options.addOption(option); + + option = Option.builder().longOpt("lookup").build(); + options.addOption(option); + + // Parse the command. + DefaultParser cmdParser = new DefaultParser(); + CommandLine cmd = cmdParser.parse(options, argv); + + // Analyze the command. + if (cmd.hasOption("help")) { + LogAnalyser.usage(); + System.exit(0); } + if (cmd.hasOption("log")) { + myLogDir = cmd.getOptionValue("log"); + } + + if (cmd.hasOption("file")) { + myFileTemplate = cmd.getOptionValue("file"); + } + + if (cmd.hasOption("cfg")) { + myConfigFile = cmd.getOptionValue("cfg"); + } + + if (cmd.hasOption("out")) { + myOutFile = cmd.getOptionValue("out"); + } + + if (cmd.hasOption("start")) { + myStartDate = parseDate(cmd.getOptionValue("start")); + } + + if (cmd.hasOption("end")) { + myEndDate = parseDate(cmd.getOptionValue("end")); + } + + myLookUp = cmd.hasOption("lookup"); + // now call the method which actually processes the logs - processLogs(context, myLogDir, myFileTemplate, myConfigFile, myOutFile, myStartDate, myEndDate, myLookUp); + processLogs(context, myLogDir, myFileTemplate, myConfigFile, myOutFile, + myStartDate, myEndDate, myLookUp); } /** @@ -406,18 +440,18 @@ public class LogAnalyser { startTime = new GregorianCalendar(); //instantiate aggregators - actionAggregator = new HashMap(); - searchAggregator = new HashMap(); - userAggregator = new HashMap(); - itemAggregator = new HashMap(); - archiveStats = new HashMap(); + actionAggregator = new HashMap<>(); + searchAggregator = new HashMap<>(); + userAggregator = new HashMap<>(); + itemAggregator = new HashMap<>(); + archiveStats = new HashMap<>(); //instantiate lists - generalSummary = new ArrayList(); - excludeWords = new ArrayList(); - excludeTypes = new ArrayList(); - excludeChars = new ArrayList(); - itemTypes = new ArrayList(); + generalSummary = new ArrayList<>(); + excludeWords = new ArrayList<>(); + excludeTypes = new ArrayList<>(); + excludeChars = new ArrayList<>(); + itemTypes = new ArrayList<>(); // set the parameters for this analysis setParameters(myLogDir, myFileTemplate, myConfigFile, myOutFile, myStartDate, myEndDate, myLookUp); @@ -529,10 +563,11 @@ public class LogAnalyser { // for each search word add to the aggregator or // increment the aggregator's counter - for (int j = 0; j < words.length; j++) { + for (String word : words) { // FIXME: perhaps aggregators ought to be objects // themselves - searchAggregator.put(words[j], increment(searchAggregator, words[j])); + searchAggregator.put(word, + increment(searchAggregator, word)); } } @@ -591,13 +626,13 @@ public class LogAnalyser { } // do the average views analysis - if ((archiveStats.get("All Items")).intValue() != 0) { + if ((archiveStats.get("All Items")) != 0) { // FIXME: this is dependent on their being a query on the db, which // there might not always be if it becomes configurable - Double avg = Math.ceil( + double avg = Math.ceil( (actionAggregator.get("view_item")).doubleValue() / (archiveStats.get("All Items")).doubleValue()); - views = avg.intValue(); + views = Math.round(avg); } // finally, write the output @@ -672,55 +707,55 @@ public class LogAnalyser { Iterator keys = null; // output the number of lines parsed - summary.append("log_lines=" + Integer.toString(lineCount) + "\n"); + summary.append("log_lines=").append(Integer.toString(lineCount)).append("\n"); // output the number of warnings encountered - summary.append("warnings=" + Integer.toString(warnCount) + "\n"); - summary.append("exceptions=" + Integer.toString(excCount) + "\n"); + summary.append("warnings=").append(Integer.toString(warnCount)).append("\n"); + summary.append("exceptions=").append(Integer.toString(excCount)).append("\n"); // set the general summary config up in the aggregator file for (int i = 0; i < generalSummary.size(); i++) { - summary.append("general_summary=" + generalSummary.get(i) + "\n"); + summary.append("general_summary=").append(generalSummary.get(i)).append("\n"); } // output the host name - summary.append("server_name=" + hostName + "\n"); + summary.append("server_name=").append(hostName).append("\n"); // output the service name - summary.append("service_name=" + name + "\n"); + summary.append("service_name=").append(name).append("\n"); // output the date information if necessary SimpleDateFormat sdf = new SimpleDateFormat("dd'/'MM'/'yyyy"); if (startDate != null) { - summary.append("start_date=" + sdf.format(startDate) + "\n"); + summary.append("start_date=").append(sdf.format(startDate)).append("\n"); } else if (logStartDate != null) { - summary.append("start_date=" + sdf.format(logStartDate) + "\n"); + summary.append("start_date=").append(sdf.format(logStartDate)).append("\n"); } if (endDate != null) { - summary.append("end_date=" + sdf.format(endDate) + "\n"); + summary.append("end_date=").append(sdf.format(endDate)).append("\n"); } else if (logEndDate != null) { - summary.append("end_date=" + sdf.format(logEndDate) + "\n"); + summary.append("end_date=").append(sdf.format(logEndDate)).append("\n"); } // write out the archive stats keys = archiveStats.keySet().iterator(); while (keys.hasNext()) { String key = keys.next(); - summary.append("archive." + key + "=" + archiveStats.get(key) + "\n"); + summary.append("archive.").append(key).append("=").append(archiveStats.get(key)).append("\n"); } // write out the action aggregation results keys = actionAggregator.keySet().iterator(); while (keys.hasNext()) { String key = keys.next(); - summary.append("action." + key + "=" + actionAggregator.get(key) + "\n"); + summary.append("action.").append(key).append("=").append(actionAggregator.get(key)).append("\n"); } // depending on the config settings for reporting on emails output the // login information - summary.append("user_email=" + userEmail + "\n"); + summary.append("user_email=").append(userEmail).append("\n"); int address = 1; keys = userAggregator.keySet().iterator(); @@ -731,9 +766,10 @@ public class LogAnalyser { String key = keys.next(); summary.append("user."); if (userEmail.equals("on")) { - summary.append(key + "=" + userAggregator.get(key) + "\n"); + summary.append(key).append("=").append(userAggregator.get(key)).append("\n"); } else if (userEmail.equals("alias")) { - summary.append("Address " + Integer.toString(address++) + "=" + userAggregator.get(key) + "\n"); + summary.append("Address ").append(Integer.toString(address++)) + .append("=").append(userAggregator.get(key)).append("\n"); } } @@ -742,12 +778,13 @@ public class LogAnalyser { // the listing there are // output the search word information - summary.append("search_floor=" + searchFloor + "\n"); + summary.append("search_floor=").append(searchFloor).append("\n"); keys = searchAggregator.keySet().iterator(); while (keys.hasNext()) { String key = keys.next(); - if ((searchAggregator.get(key)).intValue() >= searchFloor) { - summary.append("search." + key + "=" + searchAggregator.get(key) + "\n"); + if ((searchAggregator.get(key)) >= searchFloor) { + summary.append("search.").append(key).append("=") + .append(searchAggregator.get(key)).append("\n"); } } @@ -759,35 +796,35 @@ public class LogAnalyser { // be the same thing. // item viewing information - summary.append("item_floor=" + itemFloor + "\n"); - summary.append("host_url=" + url + "\n"); - summary.append("item_lookup=" + itemLookup + "\n"); + summary.append("item_floor=").append(itemFloor).append("\n"); + summary.append("host_url=").append(url).append("\n"); + summary.append("item_lookup=").append(itemLookup).append("\n"); // write out the item access information keys = itemAggregator.keySet().iterator(); while (keys.hasNext()) { String key = keys.next(); - if ((itemAggregator.get(key)).intValue() >= itemFloor) { - summary.append("item." + key + "=" + itemAggregator.get(key) + "\n"); + if ((itemAggregator.get(key)) >= itemFloor) { + summary.append("item.").append(key).append("=") + .append(itemAggregator.get(key)).append("\n"); } } // output the average views per item if (views > 0) { - summary.append("avg_item_views=" + views + "\n"); + summary.append("avg_item_views=").append(views).append("\n"); } // insert the analysis processing time information Calendar endTime = new GregorianCalendar(); long timeInMillis = (endTime.getTimeInMillis() - startTime.getTimeInMillis()); - summary.append("analysis_process_time=" + Long.toString(timeInMillis / 1000) + "\n"); + summary.append("analysis_process_time=") + .append(Long.toString(timeInMillis / 1000)).append("\n"); // finally write the string into the output file - try { - BufferedWriter out = new BufferedWriter(new FileWriter(outFile)); + try (BufferedWriter out = new BufferedWriter(new FileWriter(outFile));) { out.write(summary.toString()); out.flush(); - out.close(); } catch (IOException e) { System.out.println("Unable to write to output file " + outFile); System.exit(0); @@ -891,11 +928,11 @@ public class LogAnalyser { if (i > 0) { wordRXString.append("|"); } - wordRXString.append(" " + excludeWords.get(i) + " "); + wordRXString.append(" ").append(excludeWords.get(i)).append(" "); wordRXString.append("|"); - wordRXString.append("^" + excludeWords.get(i) + " "); + wordRXString.append("^").append(excludeWords.get(i)).append(" "); wordRXString.append("|"); - wordRXString.append(" " + excludeWords.get(i) + "$"); + wordRXString.append(" ").append(excludeWords.get(i)).append("$"); } wordRXString.append(")"); wordRX = Pattern.compile(wordRXString.toString()); @@ -956,8 +993,8 @@ public class LogAnalyser { // read in the config file and set up our instance variables while ((record = br.readLine()) != null) { // check to see what kind of line we have - Matcher matchComment = comment.matcher(record); - Matcher matchReal = real.matcher(record); + Matcher matchComment = COMMENT.matcher(record); + Matcher matchReal = REAL.matcher(record); // if the line is not a comment and is real, read it in if (!matchComment.matches() && matchReal.matches()) { @@ -968,7 +1005,7 @@ public class LogAnalyser { // read the config values into our instance variables (see // documentation for more info on config params) if (key.equals("general.summary")) { - actionAggregator.put(value, Integer.valueOf(0)); + actionAggregator.put(value, 0); generalSummary.add(value); } @@ -1022,9 +1059,9 @@ public class LogAnalyser { Integer newValue = null; if (map.containsKey(key)) { // FIXME: this seems like a ridiculous way to add Integers - newValue = Integer.valueOf((map.get(key)).intValue() + 1); + newValue = (map.get(key)) + 1; } else { - newValue = Integer.valueOf(1); + newValue = 1; } return newValue; } diff --git a/dspace-api/src/main/java/org/dspace/app/statistics/ReportGenerator.java b/dspace-api/src/main/java/org/dspace/app/statistics/ReportGenerator.java index 25c6d8cb9c..c5fe0072f5 100644 --- a/dspace-api/src/main/java/org/dspace/app/statistics/ReportGenerator.java +++ b/dspace-api/src/main/java/org/dspace/app/statistics/ReportGenerator.java @@ -27,6 +27,10 @@ import java.util.StringTokenizer; import java.util.regex.Matcher; import java.util.regex.Pattern; +import org.apache.commons.cli.CommandLine; +import org.apache.commons.cli.DefaultParser; +import org.apache.commons.cli.Option; +import org.apache.commons.cli.Options; import org.dspace.content.Item; import org.dspace.content.MetadataSchemaEnum; import org.dspace.content.MetadataValue; @@ -162,7 +166,7 @@ public class ReportGenerator { /** * pattern that matches an unqualified aggregator property */ - private static final Pattern real = Pattern.compile("^(.+)=(.+)"); + private static final Pattern REAL = Pattern.compile("^(.+)=(.+)"); ////////////////////////// // Miscellaneous variables @@ -221,28 +225,46 @@ public class ReportGenerator { String myOutput = null; String myMap = null; - // read in our command line options - for (int i = 0; i < argv.length; i++) { - if (argv[i].equals("-format")) { - myFormat = argv[i + 1].toLowerCase(); - } + Options options = new Options(); + Option option; - if (argv[i].equals("-in")) { - myInput = argv[i + 1]; - } + option = Option.builder().longOpt("format").hasArg().build(); + options.addOption(option); - if (argv[i].equals("-out")) { - myOutput = argv[i + 1]; - } + option = Option.builder().longOpt("in").hasArg().build(); + options.addOption(option); - if (argv[i].equals("-map")) { - myMap = argv[i + 1]; - } + option = Option.builder().longOpt("out").hasArg().build(); + options.addOption(option); - if (argv[i].equals("-help")) { - usage(); - System.exit(0); - } + option = Option.builder().longOpt("map").hasArg().build(); + options.addOption(option); + + option = Option.builder().longOpt("help").build(); + options.addOption(option); + + DefaultParser parser = new DefaultParser(); + CommandLine cmd = parser.parse(options, argv); + + if (cmd.hasOption("help")) { + usage(); + System.exit(0); + } + + if (cmd.hasOption("format")) { + myFormat = cmd.getOptionValue("format"); + } + + if (cmd.hasOption("in")) { + myInput = cmd.getOptionValue("in"); + } + + if (cmd.hasOption("out")) { + myOutput = cmd.getOptionValue("out"); + } + + if (cmd.hasOption("map")) { + myMap = cmd.getOptionValue("map"); } processReport(context, myFormat, myInput, myOutput, myMap); @@ -576,7 +598,7 @@ public class ReportGenerator { // loop through the map file and read in the values while ((record = br.readLine()) != null) { - Matcher matchReal = real.matcher(record); + Matcher matchReal = REAL.matcher(record); // if the line is real then read it in if (matchReal.matches()) { @@ -650,7 +672,7 @@ public class ReportGenerator { // loop through the aggregator file and read in the values while ((record = br.readLine()) != null) { // match real lines - Matcher matchReal = real.matcher(record); + Matcher matchReal = REAL.matcher(record); // pre-prepare our input strings String section = null; diff --git a/dspace-api/src/main/java/org/dspace/app/statistics/StatisticsLoader.java b/dspace-api/src/main/java/org/dspace/app/statistics/StatisticsLoader.java index fd72b3b805..cc8a7024f1 100644 --- a/dspace-api/src/main/java/org/dspace/app/statistics/StatisticsLoader.java +++ b/dspace-api/src/main/java/org/dspace/app/statistics/StatisticsLoader.java @@ -324,11 +324,7 @@ public class StatisticsLoader { ConfigurationService configurationService = DSpaceServicesFactory.getInstance().getConfigurationService(); File reportDir = new File(configurationService.getProperty("log.report.dir")); - if (reportDir != null) { - return reportDir.listFiles(new AnalysisAndReportFilter()); - } - - return null; + return reportDir.listFiles(new AnalysisAndReportFilter()); } /** diff --git a/dspace-api/src/main/java/org/dspace/app/util/CacheSnooper.java b/dspace-api/src/main/java/org/dspace/app/util/CacheSnooper.java deleted file mode 100644 index 22ad518ea3..0000000000 --- a/dspace-api/src/main/java/org/dspace/app/util/CacheSnooper.java +++ /dev/null @@ -1,58 +0,0 @@ -/** - * The contents of this file are subject to the license and copyright - * detailed in the LICENSE and NOTICE files at the root of the source - * tree and available online at - * - * http://www.dspace.org/license/ - */ - -package org.dspace.app.util; - -import net.sf.ehcache.Cache; -import net.sf.ehcache.CacheManager; -import org.dspace.core.Context; -import org.dspace.servicemanager.DSpaceKernelImpl; -import org.dspace.servicemanager.DSpaceKernelInit; -import org.dspace.services.CachingService; - -/** - * List all EhCache CacheManager and Cache instances. - * - *

This is a debugging tool, not used in the daily operation of DSpace. - * Just run it from the installed instance using - * {@code bin/dspace dsrun org.dspace.app.util.CacheSnooper} - * to check that the cache configuration is what you expect it to be, - * given your configuration. - * - *

This was created to prove a specific cache configuration patch, - * but I leave it here in the hope that it may be useful to others. - * - * @author Mark H. Wood - */ -public class CacheSnooper { - private CacheSnooper() { } - - public static void main(String[] argv) { - // Ensure that the DSpace kernel is started. - DSpaceKernelImpl kernel = DSpaceKernelInit.getKernel(null); - - // Ensure that the services cache manager is started. - CachingService serviceCaches = kernel.getServiceManager() - .getServiceByName(null, CachingService.class); - - // Ensure that the database layer is started. - Context ctx = new Context(); - - // List those caches! - for (CacheManager manager : CacheManager.ALL_CACHE_MANAGERS) { - System.out.format("CacheManager: %s%n", manager); - for (String cacheName : manager.getCacheNames()) { - Cache cache = manager.getCache(cacheName); - System.out.format(" Cache: '%s'; maxHeap: %d; maxDisk: %d%n", - cacheName, - cache.getCacheConfiguration().getMaxEntriesLocalHeap(), - cache.getCacheConfiguration().getMaxEntriesLocalDisk()); - } - } - } -} diff --git a/dspace-api/src/main/java/org/dspace/app/util/Configuration.java b/dspace-api/src/main/java/org/dspace/app/util/Configuration.java index e9b125c41c..e4a59eeb4d 100644 --- a/dspace-api/src/main/java/org/dspace/app/util/Configuration.java +++ b/dspace-api/src/main/java/org/dspace/app/util/Configuration.java @@ -37,6 +37,7 @@ public class Configuration { *

  • {@code --property name} prints the value of the DSpace configuration * property {@code name} to the standard output.
  • *
  • {@code --raw} suppresses parameter substitution in the output.
  • + *
  • {@code --first} print only the first of multiple values.
  • *
  • {@code --help} describes these options.
  • * * If the property does not exist, nothing is written. @@ -51,6 +52,8 @@ public class Configuration { "optional name of the module in which 'property' exists"); options.addOption("r", "raw", false, "do not do property substitution on the value"); + options.addOption("f", "first", false, + "display only the first value of an array property"); options.addOption("?", "Get help"); options.addOption("h", "help", false, "Get help"); @@ -90,19 +93,36 @@ public class Configuration { propNameBuilder.append(cmd.getOptionValue('p')); String propName = propNameBuilder.toString(); - // Print the property's value, if it exists + // Print the property's value(s), if it exists ConfigurationService cfg = DSpaceServicesFactory.getInstance().getConfigurationService(); if (!cfg.hasProperty(propName)) { System.out.println(); } else { - String val; if (cmd.hasOption('r')) { - val = cfg.getPropertyValue(propName).toString(); + // Print "raw" values (without property substitutions) + Object rawValue = cfg.getPropertyValue(propName); + if (rawValue.getClass().isArray()) { + for (Object value : (Object[]) rawValue) { + System.out.println(value.toString()); + if (cmd.hasOption('f')) { + break; // If --first print only one value + } + } + } else { // Not an array + System.out.println(rawValue.toString()); + } } else { - val = cfg.getProperty(propName); + // Print values with property substitutions + String[] values = cfg.getArrayProperty(propName); + for (String value : values) { + System.out.println(value); + if (cmd.hasOption('f')) { + break; // If --first print only one value + } + } } - System.out.println(val); } + System.exit(0); } } diff --git a/dspace-api/src/main/java/org/dspace/app/util/DCInput.java b/dspace-api/src/main/java/org/dspace/app/util/DCInput.java index 32fd5d634d..11f9aadd86 100644 --- a/dspace-api/src/main/java/org/dspace/app/util/DCInput.java +++ b/dspace-api/src/main/java/org/dspace/app/util/DCInput.java @@ -10,6 +10,7 @@ package org.dspace.app.util; import java.util.ArrayList; import java.util.List; import java.util.Map; +import java.util.Optional; import java.util.regex.Pattern; import java.util.regex.PatternSyntaxException; import javax.annotation.Nullable; @@ -131,10 +132,15 @@ public class DCInput { private boolean closedVocabulary = false; /** - * the regex to comply with, null if nothing + * the regex in ECMAScript standard format, usable also by rests. */ private String regex = null; + /** + * the computed pattern, null if nothing + */ + private Pattern pattern = null; + /** * allowed document types */ @@ -144,8 +150,8 @@ public class DCInput { private boolean isMetadataField = false; private String relationshipType = null; private String searchConfiguration = null; - private String filter; - private List externalSources; + private final String filter; + private final List externalSources; /** * The scope of the input sets, this restricts hidden metadata fields from @@ -178,7 +184,7 @@ public class DCInput { //check if the input have a language tag language = Boolean.valueOf(fieldMap.get("language")); - valueLanguageList = new ArrayList(); + valueLanguageList = new ArrayList<>(); if (language) { String languageNameTmp = fieldMap.get("value-pairs-name"); if (StringUtils.isBlank(languageNameTmp)) { @@ -191,7 +197,7 @@ public class DCInput { repeatable = "true".equalsIgnoreCase(repStr) || "yes".equalsIgnoreCase(repStr); String nameVariantsString = fieldMap.get("name-variants"); - nameVariants = (StringUtils.isNotBlank(nameVariantsString)) ? + nameVariants = StringUtils.isNotBlank(nameVariantsString) ? nameVariantsString.equalsIgnoreCase("true") : false; label = fieldMap.get("label"); inputType = fieldMap.get("input-type"); @@ -203,11 +209,11 @@ public class DCInput { } hint = fieldMap.get("hint"); warning = fieldMap.get("required"); - required = (warning != null && warning.length() > 0); + required = warning != null && warning.length() > 0; visibility = fieldMap.get("visibility"); readOnly = fieldMap.get("readonly"); vocabulary = fieldMap.get("vocabulary"); - regex = fieldMap.get("regex"); + this.initRegex(fieldMap.get("regex")); String closedVocabularyStr = fieldMap.get("closedVocabulary"); closedVocabulary = "true".equalsIgnoreCase(closedVocabularyStr) || "yes".equalsIgnoreCase(closedVocabularyStr); @@ -238,6 +244,22 @@ public class DCInput { } + protected void initRegex(String regex) { + this.regex = null; + this.pattern = null; + if (regex != null) { + try { + Optional.ofNullable(RegexPatternUtils.computePattern(regex)) + .ifPresent(pattern -> { + this.pattern = pattern; + this.regex = regex; + }); + } catch (PatternSyntaxException e) { + log.warn("The regex field of input {} with value {} is invalid!", this.label, regex); + } + } + } + /** * Is this DCInput for display in the given scope? The scope should be * either "workflow" or "submit", as per the input forms definition. If the @@ -248,7 +270,7 @@ public class DCInput { * @return whether the input should be displayed or not */ public boolean isVisible(String scope) { - return (visibility == null || visibility.equals(scope)); + return visibility == null || visibility.equals(scope); } /** @@ -381,7 +403,7 @@ public class DCInput { /** * Get the style for this form field - * + * * @return the style */ public String getStyle() { @@ -512,8 +534,12 @@ public class DCInput { return visibility; } + public Pattern getPattern() { + return this.pattern; + } + public String getRegex() { - return regex; + return this.regex; } public String getFieldName() { @@ -546,34 +572,45 @@ public class DCInput { public boolean validate(String value) { if (StringUtils.isNotBlank(value)) { try { - if (StringUtils.isNotBlank(regex)) { - Pattern pattern = Pattern.compile(regex); + if (this.pattern != null) { if (!pattern.matcher(value).matches()) { return false; } } } catch (PatternSyntaxException ex) { - log.error("Regex validation failed!", ex.getMessage()); + log.error("Regex validation failed! {}", ex.getMessage()); } } - return true; } /** - * Verify whether the current field contains an entity relationship - * This also implies a relationship type is defined for this field - * The field can contain both an entity relationship and a metadata field simultaneously + * Get the type bind list for use in determining whether + * to display this field in angular dynamic form building + * @return list of bound types + */ + public List getTypeBindList() { + return typeBind; + } + + /** + * Verify whether the current field contains an entity relationship. + * This also implies a relationship type is defined for this field. + * The field can contain both an entity relationship and a metadata field + * simultaneously. + * @return true if the field contains a relationship. */ public boolean isRelationshipField() { return isRelationshipField; } /** - * Verify whether the current field contains a metadata field - * This also implies a field type is defined for this field - * The field can contain both an entity relationship and a metadata field simultaneously + * Verify whether the current field contains a metadata field. + * This also implies a field type is defined for this field. + * The field can contain both an entity relationship and a metadata field + * simultaneously. + * @return true if the field contains a metadata field. */ public boolean isMetadataField() { return isMetadataField; diff --git a/dspace-api/src/main/java/org/dspace/app/util/DCInputSet.java b/dspace-api/src/main/java/org/dspace/app/util/DCInputSet.java index bfd4270cf2..e903c11b13 100644 --- a/dspace-api/src/main/java/org/dspace/app/util/DCInputSet.java +++ b/dspace-api/src/main/java/org/dspace/app/util/DCInputSet.java @@ -7,6 +7,7 @@ */ package org.dspace.app.util; +import java.util.ArrayList; import java.util.List; import java.util.Map; @@ -16,7 +17,6 @@ import org.dspace.core.Utils; * Class representing all DC inputs required for a submission, organized into pages * * @author Brian S. Hughes, based on work by Jenny Toves, OCLC - * @version $Revision$ */ public class DCInputSet { @@ -33,7 +33,6 @@ public class DCInputSet { * constructor * * @param formName form name - * @param mandatoryFlags * @param rows the rows * @param listMap map */ @@ -176,4 +175,50 @@ public class DCInputSet { return true; } + /** + * Iterate DC input rows and populate a list of all allowed field names in this submission configuration. + * This is important because an input can be configured repeatedly in a form (for example it could be required + * for type Book, and allowed but not required for type Article). + * If the field is allowed for this document type it'll never be stripped from metadata on validation. + * + * This can be more efficient than isFieldPresent to avoid looping the input set with each check. + * + * @param documentTypeValue Document type eg. Article, Book + * @return ArrayList of field names to use in validation + */ + public List populateAllowedFieldNames(String documentTypeValue) { + List allowedFieldNames = new ArrayList<>(); + // Before iterating each input for validation, run through all inputs + fields and populate a lookup + // map with inputs for this type. Because an input can be configured repeatedly in a form (for example + // it could be required for type Book, and allowed but not required for type Article), allowed=true will + // always take precedence + for (DCInput[] row : inputs) { + for (DCInput input : row) { + if (input.isQualdropValue()) { + List inputPairs = input.getPairs(); + //starting from the second element of the list and skipping one every time because the display + // values are also in the list and before the stored values. + for (int i = 1; i < inputPairs.size(); i += 2) { + String fullFieldname = input.getFieldName() + "." + inputPairs.get(i); + if (input.isAllowedFor(documentTypeValue)) { + if (!allowedFieldNames.contains(fullFieldname)) { + allowedFieldNames.add(fullFieldname); + } + // For the purposes of qualdrop, we have to add the field name without the qualifier + // too, or a required qualdrop will get confused and incorrectly reject a value + if (!allowedFieldNames.contains(input.getFieldName())) { + allowedFieldNames.add(input.getFieldName()); + } + } + } + } else { + if (input.isAllowedFor(documentTypeValue) && !allowedFieldNames.contains(input.getFieldName())) { + allowedFieldNames.add(input.getFieldName()); + } + } + } + } + return allowedFieldNames; + } + } diff --git a/dspace-api/src/main/java/org/dspace/app/util/GoogleBitstreamComparator.java b/dspace-api/src/main/java/org/dspace/app/util/GoogleBitstreamComparator.java index add98af96f..ae6ba7e83f 100644 --- a/dspace-api/src/main/java/org/dspace/app/util/GoogleBitstreamComparator.java +++ b/dspace-api/src/main/java/org/dspace/app/util/GoogleBitstreamComparator.java @@ -86,8 +86,10 @@ public class GoogleBitstreamComparator implements Comparator { if (priority1 > priority2) { return 1; } else if (priority1 == priority2) { - if (b1.getSizeBytes() <= b2.getSizeBytes()) { + if (b1.getSizeBytes() < b2.getSizeBytes()) { return 1; + } else if (b1.getSizeBytes() == b2.getSizeBytes()) { + return 0; } else { return -1; } diff --git a/dspace-api/src/main/java/org/dspace/app/util/GoogleMetadata.java b/dspace-api/src/main/java/org/dspace/app/util/GoogleMetadata.java index 0021f26700..c4f3f2235e 100644 --- a/dspace-api/src/main/java/org/dspace/app/util/GoogleMetadata.java +++ b/dspace-api/src/main/java/org/dspace/app/util/GoogleMetadata.java @@ -42,7 +42,7 @@ import org.dspace.core.Context; import org.dspace.handle.factory.HandleServiceFactory; import org.dspace.services.ConfigurationService; import org.dspace.services.factory.DSpaceServicesFactory; -import org.jdom.Element; +import org.jdom2.Element; /** * Configuration and mapping for Google Scholar output metadata @@ -470,11 +470,7 @@ public class GoogleMetadata { parsedOptions.add(parsedFields); } - if (null != parsedOptions) { - return parsedOptions; - } else { - return null; - } + return parsedOptions; } /** diff --git a/dspace-api/src/main/java/org/dspace/app/util/OpenSearchServiceImpl.java b/dspace-api/src/main/java/org/dspace/app/util/OpenSearchServiceImpl.java index 97f25cb2b2..514143c93e 100644 --- a/dspace-api/src/main/java/org/dspace/app/util/OpenSearchServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/app/util/OpenSearchServiceImpl.java @@ -16,10 +16,11 @@ import java.util.Arrays; import java.util.List; import java.util.Map; -import com.sun.syndication.feed.module.opensearch.OpenSearchModule; -import com.sun.syndication.feed.module.opensearch.entity.OSQuery; -import com.sun.syndication.feed.module.opensearch.impl.OpenSearchModuleImpl; -import com.sun.syndication.io.FeedException; +import com.rometools.modules.opensearch.OpenSearchModule; +import com.rometools.modules.opensearch.entity.OSQuery; +import com.rometools.modules.opensearch.impl.OpenSearchModuleImpl; +import com.rometools.rome.io.FeedException; +import org.apache.commons.lang.StringUtils; import org.apache.logging.log4j.Logger; import org.dspace.app.util.service.OpenSearchService; import org.dspace.content.DSpaceObject; @@ -29,11 +30,11 @@ import org.dspace.discovery.IndexableObject; import org.dspace.handle.service.HandleService; import org.dspace.services.ConfigurationService; import org.dspace.services.factory.DSpaceServicesFactory; -import org.jdom.Element; -import org.jdom.JDOMException; -import org.jdom.Namespace; -import org.jdom.output.DOMOutputter; -import org.jdom.output.XMLOutputter; +import org.jdom2.Element; +import org.jdom2.JDOMException; +import org.jdom2.Namespace; +import org.jdom2.output.DOMOutputter; +import org.jdom2.output.XMLOutputter; import org.springframework.beans.factory.annotation.Autowired; import org.w3c.dom.Document; @@ -96,7 +97,7 @@ public class OpenSearchServiceImpl implements OpenSearchService { * Get base search UI URL (websvc.opensearch.uicontext) */ protected String getBaseSearchUIURL() { - return configurationService.getProperty("dspace.server.url") + "/" + + return configurationService.getProperty("dspace.ui.url") + "/" + configurationService.getProperty("websvc.opensearch.uicontext"); } @@ -177,7 +178,9 @@ public class OpenSearchServiceImpl implements OpenSearchService { OSQuery osq = new OSQuery(); osq.setRole("request"); try { - osq.setSearchTerms(URLEncoder.encode(query, "UTF-8")); + if (StringUtils.isNotBlank(query)) { + osq.setSearchTerms(URLEncoder.encode(query, "UTF-8")); + } } catch (UnsupportedEncodingException e) { log.error(e); } @@ -192,7 +195,7 @@ public class OpenSearchServiceImpl implements OpenSearchService { * @param scope - null for the entire repository, or a collection/community handle * @return Service Document */ - protected org.jdom.Document getServiceDocument(String scope) { + protected org.jdom2.Document getServiceDocument(String scope) { ConfigurationService config = DSpaceServicesFactory.getInstance().getConfigurationService(); Namespace ns = Namespace.getNamespace(osNs); @@ -245,7 +248,7 @@ public class OpenSearchServiceImpl implements OpenSearchService { url.setAttribute("template", template.toString()); root.addContent(url); } - return new org.jdom.Document(root); + return new org.jdom2.Document(root); } /** @@ -255,7 +258,7 @@ public class OpenSearchServiceImpl implements OpenSearchService { * @return W3C Document object * @throws IOException if IO error */ - protected Document jDomToW3(org.jdom.Document jdomDoc) throws IOException { + protected Document jDomToW3(org.jdom2.Document jdomDoc) throws IOException { DOMOutputter domOut = new DOMOutputter(); try { return domOut.output(jdomDoc); diff --git a/dspace-api/src/main/java/org/dspace/app/util/OptimizeSelectCollection.java b/dspace-api/src/main/java/org/dspace/app/util/OptimizeSelectCollection.java index 1e018ff889..5dd286726d 100644 --- a/dspace-api/src/main/java/org/dspace/app/util/OptimizeSelectCollection.java +++ b/dspace-api/src/main/java/org/dspace/app/util/OptimizeSelectCollection.java @@ -11,7 +11,6 @@ import java.sql.SQLException; import java.util.ArrayList; import java.util.List; -import org.apache.logging.log4j.Logger; import org.dspace.content.Collection; import org.dspace.content.factory.ContentServiceFactory; import org.dspace.content.service.CollectionService; @@ -23,12 +22,12 @@ import org.dspace.eperson.service.EPersonService; import org.springframework.util.StopWatch; /** + * A command line tool to verify/test the accuracy and speed gains of + * {@link Collection.findAuthorizedOptimized}. + * Invocation: {@code dsrun org.dspace.app.util.OptimizeSelectCollection} * @author peterdietz - * A command line tool to verify/test the accuracy and speed gains of Collection.findAuthorizedOptimized() - * Invocation: dsrun org.dspace.app.util.OptimizeSelectCollection */ public class OptimizeSelectCollection { - private static final Logger log = org.apache.logging.log4j.LogManager.getLogger(OptimizeSelectCollection.class); private static Context context; private static ArrayList brokenPeople; @@ -49,7 +48,7 @@ public class OptimizeSelectCollection { "values as the legacy select-collection logic."); context = new Context(); - brokenPeople = new ArrayList(); + brokenPeople = new ArrayList<>(); int peopleChecked = 0; timeSavedMS = 0L; @@ -68,7 +67,7 @@ public class OptimizeSelectCollection { } } - if (brokenPeople.size() > 0) { + if (!brokenPeople.isEmpty()) { System.out.println("NOT DONE YET!!! Some people don't have all their collections."); for (EPerson person : brokenPeople) { System.out.println("-- " + person.getEmail()); @@ -90,7 +89,7 @@ public class OptimizeSelectCollection { stopWatch.start("findAuthorized"); List collections = collectionService.findAuthorized(context, null, Constants.ADD); stopWatch.stop(); - Long defaultMS = stopWatch.getLastTaskTimeMillis(); + long defaultMS = stopWatch.getLastTaskTimeMillis(); stopWatch.start("ListingCollections"); System.out.println("Legacy Find Authorized"); @@ -100,7 +99,7 @@ public class OptimizeSelectCollection { stopWatch.start("findAuthorizedOptimized"); List collectionsOptimized = collectionService.findAuthorizedOptimized(context, Constants.ADD); stopWatch.stop(); - Long optimizedMS = stopWatch.getLastTaskTimeMillis(); + long optimizedMS = stopWatch.getLastTaskTimeMillis(); timeSavedMS += defaultMS - optimizedMS; diff --git a/dspace-api/src/main/java/org/dspace/app/util/RegexPatternUtils.java b/dspace-api/src/main/java/org/dspace/app/util/RegexPatternUtils.java new file mode 100644 index 0000000000..578e57fb09 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/app/util/RegexPatternUtils.java @@ -0,0 +1,73 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.util; + +import static java.util.regex.Pattern.CASE_INSENSITIVE; + +import java.util.Optional; +import java.util.regex.Matcher; +import java.util.regex.Pattern; +import java.util.regex.PatternSyntaxException; + +import org.apache.commons.lang3.StringUtils; + +/** + * Utility class useful for check regex and patterns. + * + * @author Vincenzo Mecca (vins01-4science - vincenzo.mecca at 4science.com) + * + */ +public class RegexPatternUtils { + + // checks input having the format /{pattern}/{flags} + // allowed flags are: g,i,m,s,u,y + public static final String REGEX_INPUT_VALIDATOR = "(/?)(.+)\\1([gimsuy]*)"; + // flags usable inside regex definition using format (?i|m|s|u|y) + public static final String REGEX_FLAGS = "(?%s)"; + public static final Pattern PATTERN_REGEX_INPUT_VALIDATOR = + Pattern.compile(REGEX_INPUT_VALIDATOR, CASE_INSENSITIVE); + + /** + * Computes a pattern starting from a regex definition with flags that + * uses the standard format: /{regex}/{flags} (ECMAScript format). + * This method can transform an ECMAScript regex into a java {@code Pattern} object + * wich can be used to validate strings. + *
    + * If regex is null, empty or blank a null {@code Pattern} will be retrieved + * If it's a valid regex, then a non-null {@code Pattern} will be retrieved, + * an exception will be thrown otherwise. + * + * @param regex with format /{regex}/{flags} + * @return {@code Pattern} regex pattern instance + * @throws PatternSyntaxException + */ + public static final Pattern computePattern(String regex) throws PatternSyntaxException { + if (StringUtils.isBlank(regex)) { + return null; + } + Matcher inputMatcher = PATTERN_REGEX_INPUT_VALIDATOR.matcher(regex); + String regexPattern = regex; + String regexFlags = ""; + if (inputMatcher.matches()) { + regexPattern = + Optional.of(inputMatcher.group(2)) + .filter(StringUtils::isNotBlank) + .orElse(regex); + regexFlags = + Optional.ofNullable(inputMatcher.group(3)) + .filter(StringUtils::isNotBlank) + .map(flags -> String.format(REGEX_FLAGS, flags)) + .orElse("") + .replaceAll("g", ""); + } + return Pattern.compile(regexFlags + regexPattern); + } + + private RegexPatternUtils() {} + +} diff --git a/dspace-api/src/main/java/org/dspace/app/util/SyndicationFeed.java b/dspace-api/src/main/java/org/dspace/app/util/SyndicationFeed.java index 2576df0193..8f155b6330 100644 --- a/dspace-api/src/main/java/org/dspace/app/util/SyndicationFeed.java +++ b/dspace-api/src/main/java/org/dspace/app/util/SyndicationFeed.java @@ -15,26 +15,26 @@ import java.util.List; import java.util.Map; import javax.servlet.http.HttpServletRequest; -import com.sun.syndication.feed.module.DCModule; -import com.sun.syndication.feed.module.DCModuleImpl; -import com.sun.syndication.feed.module.Module; -import com.sun.syndication.feed.module.itunes.EntryInformation; -import com.sun.syndication.feed.module.itunes.EntryInformationImpl; -import com.sun.syndication.feed.module.itunes.types.Duration; -import com.sun.syndication.feed.synd.SyndContent; -import com.sun.syndication.feed.synd.SyndContentImpl; -import com.sun.syndication.feed.synd.SyndEnclosure; -import com.sun.syndication.feed.synd.SyndEnclosureImpl; -import com.sun.syndication.feed.synd.SyndEntry; -import com.sun.syndication.feed.synd.SyndEntryImpl; -import com.sun.syndication.feed.synd.SyndFeed; -import com.sun.syndication.feed.synd.SyndFeedImpl; -import com.sun.syndication.feed.synd.SyndImage; -import com.sun.syndication.feed.synd.SyndImageImpl; -import com.sun.syndication.feed.synd.SyndPerson; -import com.sun.syndication.feed.synd.SyndPersonImpl; -import com.sun.syndication.io.FeedException; -import com.sun.syndication.io.SyndFeedOutput; +import com.rometools.modules.itunes.EntryInformation; +import com.rometools.modules.itunes.EntryInformationImpl; +import com.rometools.modules.itunes.types.Duration; +import com.rometools.rome.feed.module.DCModule; +import com.rometools.rome.feed.module.DCModuleImpl; +import com.rometools.rome.feed.module.Module; +import com.rometools.rome.feed.synd.SyndContent; +import com.rometools.rome.feed.synd.SyndContentImpl; +import com.rometools.rome.feed.synd.SyndEnclosure; +import com.rometools.rome.feed.synd.SyndEnclosureImpl; +import com.rometools.rome.feed.synd.SyndEntry; +import com.rometools.rome.feed.synd.SyndEntryImpl; +import com.rometools.rome.feed.synd.SyndFeed; +import com.rometools.rome.feed.synd.SyndFeedImpl; +import com.rometools.rome.feed.synd.SyndImage; +import com.rometools.rome.feed.synd.SyndImageImpl; +import com.rometools.rome.feed.synd.SyndPerson; +import com.rometools.rome.feed.synd.SyndPersonImpl; +import com.rometools.rome.io.FeedException; +import com.rometools.rome.io.SyndFeedOutput; import org.apache.commons.lang3.ArrayUtils; import org.apache.commons.lang3.StringUtils; import org.apache.logging.log4j.Logger; @@ -193,13 +193,11 @@ public class SyndicationFeed { String defaultTitle = null; boolean podcastFeed = false; this.request = request; - // dso is null for the whole site, or a search without scope if (dso == null) { defaultTitle = configurationService.getProperty("dspace.name"); feed.setDescription(localize(labels, MSG_FEED_DESCRIPTION)); objectURL = resolveURL(request, null); - logoURL = configurationService.getProperty("webui.feed.logo.url"); } else { Bitstream logo = null; if (dso instanceof IndexableCollection) { @@ -329,7 +327,8 @@ public class SyndicationFeed { dcDescriptionField != null) { DCModule dc = new DCModuleImpl(); if (dcCreatorField != null) { - List dcAuthors = itemService.getMetadataByMetadataString(item, dcCreatorField); + List dcAuthors = itemService + .getMetadataByMetadataString(item, dcCreatorField); if (dcAuthors.size() > 0) { List creators = new ArrayList<>(); for (MetadataValue author : dcAuthors) { @@ -345,7 +344,8 @@ public class SyndicationFeed { } } if (dcDescriptionField != null) { - List v = itemService.getMetadataByMetadataString(item, dcDescriptionField); + List v = itemService + .getMetadataByMetadataString(item, dcDescriptionField); if (v.size() > 0) { StringBuilder descs = new StringBuilder(); for (MetadataValue d : v) { @@ -376,6 +376,7 @@ public class SyndicationFeed { enc.setLength(bit.getSizeBytes()); enc.setUrl(urlOfBitstream(request, bit)); enclosures.add(enc); + } } } @@ -419,7 +420,7 @@ public class SyndicationFeed { // with length of song in seconds if (extent != null && extent.length() > 0) { extent = extent.split(" ")[0]; - Integer duration = Integer.parseInt(extent); + long duration = Long.parseLong(extent); itunes.setDuration(new Duration(duration)); // } diff --git a/dspace-api/src/main/java/org/dspace/authenticate/AuthenticationMethod.java b/dspace-api/src/main/java/org/dspace/authenticate/AuthenticationMethod.java index 25d31776cc..274779e928 100644 --- a/dspace-api/src/main/java/org/dspace/authenticate/AuthenticationMethod.java +++ b/dspace-api/src/main/java/org/dspace/authenticate/AuthenticationMethod.java @@ -224,4 +224,15 @@ public interface AuthenticationMethod { * @return whether the authentication method is being used. */ public boolean isUsed(Context context, HttpServletRequest request); + + /** + * Check if the given current password is valid to change the password of the + * given ePerson + * @param context The DSpace context + * @param ePerson the ePerson related to the password change + * @param currentPassword The current password to check + * @return true if the provided password matches with current + * password + */ + public boolean canChangePassword(Context context, EPerson ePerson, String currentPassword); } diff --git a/dspace-api/src/main/java/org/dspace/authenticate/AuthenticationServiceImpl.java b/dspace-api/src/main/java/org/dspace/authenticate/AuthenticationServiceImpl.java index 1270c1cb2c..a9449b87d4 100644 --- a/dspace-api/src/main/java/org/dspace/authenticate/AuthenticationServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/authenticate/AuthenticationServiceImpl.java @@ -207,4 +207,16 @@ public class AuthenticationServiceImpl implements AuthenticationService { return null; } + + @Override + public boolean canChangePassword(Context context, EPerson ePerson, String currentPassword) { + + for (AuthenticationMethod method : getAuthenticationMethodStack()) { + if (method.getName().equals(context.getAuthenticationMethod())) { + return method.canChangePassword(context, ePerson, currentPassword); + } + } + + return false; + } } diff --git a/dspace-api/src/main/java/org/dspace/authenticate/IPAuthentication.java b/dspace-api/src/main/java/org/dspace/authenticate/IPAuthentication.java index 67405b5c1c..9c37fcee47 100644 --- a/dspace-api/src/main/java/org/dspace/authenticate/IPAuthentication.java +++ b/dspace-api/src/main/java/org/dspace/authenticate/IPAuthentication.java @@ -278,4 +278,9 @@ public class IPAuthentication implements AuthenticationMethod { public boolean isUsed(final Context context, final HttpServletRequest request) { return false; } + + @Override + public boolean canChangePassword(Context context, EPerson ePerson, String currentPassword) { + return false; + } } diff --git a/dspace-api/src/main/java/org/dspace/authenticate/LDAPAuthentication.java b/dspace-api/src/main/java/org/dspace/authenticate/LDAPAuthentication.java index 520a5f62a6..f3c6022e02 100644 --- a/dspace-api/src/main/java/org/dspace/authenticate/LDAPAuthentication.java +++ b/dspace-api/src/main/java/org/dspace/authenticate/LDAPAuthentication.java @@ -752,4 +752,9 @@ public class LDAPAuthentication } return false; } + + @Override + public boolean canChangePassword(Context context, EPerson ePerson, String currentPassword) { + return false; + } } diff --git a/dspace-api/src/main/java/org/dspace/authenticate/OidcAuthentication.java b/dspace-api/src/main/java/org/dspace/authenticate/OidcAuthentication.java index edaa87dd13..5d4635d48e 100644 --- a/dspace-api/src/main/java/org/dspace/authenticate/OidcAuthentication.java +++ b/dspace-api/src/main/java/org/dspace/authenticate/OidcAuthentication.java @@ -86,4 +86,9 @@ public class OidcAuthentication implements AuthenticationMethod { return false; } + @Override + public boolean canChangePassword(Context context, EPerson ePerson, String currentPassword) { + return false; + } + } diff --git a/dspace-api/src/main/java/org/dspace/authenticate/OidcAuthenticationBean.java b/dspace-api/src/main/java/org/dspace/authenticate/OidcAuthenticationBean.java index 41b40066b3..8a4ac190c8 100644 --- a/dspace-api/src/main/java/org/dspace/authenticate/OidcAuthenticationBean.java +++ b/dspace-api/src/main/java/org/dspace/authenticate/OidcAuthenticationBean.java @@ -294,4 +294,9 @@ public class OidcAuthenticationBean implements AuthenticationMethod { return false; } + @Override + public boolean canChangePassword(Context context, EPerson ePerson, String currentPassword) { + return false; + } + } diff --git a/dspace-api/src/main/java/org/dspace/authenticate/OrcidAuthentication.java b/dspace-api/src/main/java/org/dspace/authenticate/OrcidAuthentication.java new file mode 100644 index 0000000000..3e9ff6638a --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/authenticate/OrcidAuthentication.java @@ -0,0 +1,109 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.authenticate; + +import java.sql.SQLException; +import java.util.Iterator; +import java.util.List; +import javax.servlet.http.HttpServletRequest; +import javax.servlet.http.HttpServletResponse; + +import org.dspace.authenticate.factory.AuthenticateServiceFactory; +import org.dspace.core.Context; +import org.dspace.eperson.EPerson; +import org.dspace.eperson.Group; +import org.dspace.kernel.ServiceManager; +import org.dspace.utils.DSpace; + +/** + * Implementation of {@link AuthenticationMethod} that delegate all the method + * invocations to the bean of class {@link OrcidAuthenticationBean}. + * + * @author Luca Giamminonni (luca.giamminonni at 4science.it) + * + */ +public class OrcidAuthentication implements AuthenticationMethod { + + private final ServiceManager serviceManager = new DSpace().getServiceManager(); + + /** + * Check if OrcidAuthentication plugin is enabled + * @return true if enabled, false otherwise + */ + public static boolean isEnabled() { + + String pluginName = new OrcidAuthentication().getName(); + + Iterator authenticationMethodIterator = AuthenticateServiceFactory.getInstance() + .getAuthenticationService().authenticationMethodIterator(); + + while (authenticationMethodIterator.hasNext()) { + if (pluginName.equals(authenticationMethodIterator.next().getName())) { + return true; + } + } + + return false; + } + + @Override + public boolean canSelfRegister(Context context, HttpServletRequest request, String username) throws SQLException { + return getOrcidAuthentication().canSelfRegister(context, request, username); + } + + @Override + public void initEPerson(Context context, HttpServletRequest request, EPerson eperson) throws SQLException { + getOrcidAuthentication().initEPerson(context, request, eperson); + } + + @Override + public boolean allowSetPassword(Context context, HttpServletRequest request, String username) throws SQLException { + return getOrcidAuthentication().allowSetPassword(context, request, username); + } + + @Override + public boolean isImplicit() { + return getOrcidAuthentication().isImplicit(); + } + + @Override + public List getSpecialGroups(Context context, HttpServletRequest request) throws SQLException { + return getOrcidAuthentication().getSpecialGroups(context, request); + } + + @Override + public int authenticate(Context context, String username, String password, String realm, HttpServletRequest request) + throws SQLException { + return getOrcidAuthentication().authenticate(context, username, password, realm, request); + } + + @Override + public String loginPageURL(Context context, HttpServletRequest request, HttpServletResponse response) { + return getOrcidAuthentication().loginPageURL(context, request, response); + } + + @Override + public String getName() { + return getOrcidAuthentication().getName(); + } + + private OrcidAuthenticationBean getOrcidAuthentication() { + return serviceManager.getServiceByName("orcidAuthentication", OrcidAuthenticationBean.class); + } + + @Override + public boolean isUsed(Context context, HttpServletRequest request) { + return getOrcidAuthentication().isUsed(context, request); + } + + @Override + public boolean canChangePassword(Context context, EPerson ePerson, String currentPassword) { + return false; + } + +} diff --git a/dspace-api/src/main/java/org/dspace/authenticate/OrcidAuthenticationBean.java b/dspace-api/src/main/java/org/dspace/authenticate/OrcidAuthenticationBean.java new file mode 100644 index 0000000000..a11bbfc867 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/authenticate/OrcidAuthenticationBean.java @@ -0,0 +1,335 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.authenticate; + +import static java.lang.String.format; +import static java.net.URLEncoder.encode; +import static org.apache.commons.lang.BooleanUtils.toBoolean; +import static org.apache.commons.lang3.StringUtils.isBlank; +import static org.dspace.content.Item.ANY; + +import java.io.UnsupportedEncodingException; +import java.sql.SQLException; +import java.util.Collections; +import java.util.List; +import java.util.Optional; +import javax.servlet.http.HttpServletRequest; +import javax.servlet.http.HttpServletResponse; + +import org.apache.commons.collections4.CollectionUtils; +import org.apache.commons.lang3.StringUtils; +import org.dspace.authorize.AuthorizeException; +import org.dspace.core.Context; +import org.dspace.eperson.EPerson; +import org.dspace.eperson.Group; +import org.dspace.eperson.service.EPersonService; +import org.dspace.orcid.OrcidToken; +import org.dspace.orcid.client.OrcidClient; +import org.dspace.orcid.client.OrcidConfiguration; +import org.dspace.orcid.model.OrcidTokenResponseDTO; +import org.dspace.orcid.service.OrcidSynchronizationService; +import org.dspace.orcid.service.OrcidTokenService; +import org.dspace.profile.ResearcherProfile; +import org.dspace.profile.service.ResearcherProfileService; +import org.dspace.services.ConfigurationService; +import org.orcid.jaxb.model.v3.release.record.Email; +import org.orcid.jaxb.model.v3.release.record.Person; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.beans.factory.annotation.Autowired; + +/** + * ORCID authentication for DSpace. + * + * @author Luca Giamminonni (luca.giamminonni at 4science.it) + * + */ +public class OrcidAuthenticationBean implements AuthenticationMethod { + + public static final String ORCID_AUTH_ATTRIBUTE = "orcid-authentication"; + + private final static Logger LOGGER = LoggerFactory.getLogger(OrcidAuthenticationBean.class); + + private final static String LOGIN_PAGE_URL_FORMAT = "%s?client_id=%s&response_type=code&scope=%s&redirect_uri=%s"; + + @Autowired + private OrcidClient orcidClient; + + @Autowired + private OrcidConfiguration orcidConfiguration; + + @Autowired + private ConfigurationService configurationService; + + @Autowired + private EPersonService ePersonService; + + @Autowired + private ResearcherProfileService researcherProfileService; + + @Autowired + private OrcidSynchronizationService orcidSynchronizationService; + + @Autowired + private OrcidTokenService orcidTokenService; + + @Override + public int authenticate(Context context, String username, String password, String realm, HttpServletRequest request) + throws SQLException { + + if (request == null) { + LOGGER.warn("Unable to authenticate using ORCID because the request object is null."); + return BAD_ARGS; + } + + String code = (String) request.getParameter("code"); + if (StringUtils.isEmpty(code)) { + LOGGER.warn("The incoming request has not code parameter"); + return NO_SUCH_USER; + } + request.setAttribute(ORCID_AUTH_ATTRIBUTE, true); + return authenticateWithOrcid(context, code, request); + } + + @Override + public String loginPageURL(Context context, HttpServletRequest request, HttpServletResponse response) { + + String authorizeUrl = orcidConfiguration.getAuthorizeEndpointUrl(); + String clientId = orcidConfiguration.getClientId(); + String redirectUri = orcidConfiguration.getRedirectUrl(); + String scopes = String.join("+", orcidConfiguration.getScopes()); + + if (StringUtils.isAnyBlank(authorizeUrl, clientId, redirectUri, scopes)) { + LOGGER.error("Missing mandatory configuration properties for OrcidAuthentication"); + return ""; + } + + try { + return format(LOGIN_PAGE_URL_FORMAT, authorizeUrl, clientId, scopes, encode(redirectUri, "UTF-8")); + } catch (UnsupportedEncodingException e) { + LOGGER.error(e.getMessage(), e); + return ""; + } + + } + + @Override + public boolean isUsed(Context context, HttpServletRequest request) { + return request.getAttribute(ORCID_AUTH_ATTRIBUTE) != null; + } + + @Override + public boolean canChangePassword(Context context, EPerson ePerson, String currentPassword) { + return false; + } + + @Override + public boolean canSelfRegister(Context context, HttpServletRequest request, String username) throws SQLException { + return canSelfRegister(); + } + + @Override + public void initEPerson(Context context, HttpServletRequest request, EPerson eperson) throws SQLException { + + } + + @Override + public boolean allowSetPassword(Context context, HttpServletRequest request, String username) throws SQLException { + return false; + } + + @Override + public boolean isImplicit() { + return false; + } + + @Override + public List getSpecialGroups(Context context, HttpServletRequest request) throws SQLException { + return Collections.emptyList(); + } + + @Override + public String getName() { + return "orcid"; + } + + private int authenticateWithOrcid(Context context, String code, HttpServletRequest request) throws SQLException { + OrcidTokenResponseDTO token = getOrcidAccessToken(code); + if (token == null) { + return NO_SUCH_USER; + } + + String orcid = token.getOrcid(); + + EPerson ePerson = ePersonService.findByNetid(context, orcid); + if (ePerson != null) { + return ePerson.canLogIn() ? logInEPerson(context, token, ePerson) : BAD_ARGS; + } + + Person person = getPersonFromOrcid(token); + if (person == null) { + return NO_SUCH_USER; + } + + String email = getEmail(person).orElse(null); + + ePerson = ePersonService.findByEmail(context, email); + if (ePerson != null) { + return ePerson.canLogIn() ? logInEPerson(context, token, ePerson) : BAD_ARGS; + } + + return canSelfRegister() ? registerNewEPerson(context, person, token) : NO_SUCH_USER; + + } + + private int logInEPerson(Context context, OrcidTokenResponseDTO token, EPerson ePerson) + throws SQLException { + + context.setCurrentUser(ePerson); + + setOrcidMetadataOnEPerson(context, ePerson, token); + + ResearcherProfile profile = findProfile(context, ePerson); + if (profile != null) { + orcidSynchronizationService.linkProfile(context, profile.getItem(), token); + } + + return SUCCESS; + + } + + private ResearcherProfile findProfile(Context context, EPerson ePerson) throws SQLException { + try { + return researcherProfileService.findById(context, ePerson.getID()); + } catch (AuthorizeException e) { + throw new RuntimeException(e); + } + } + + private int registerNewEPerson(Context context, Person person, OrcidTokenResponseDTO token) throws SQLException { + + try { + context.turnOffAuthorisationSystem(); + + String email = getEmail(person) + .orElseThrow(() -> new IllegalStateException("The email is configured private on orcid")); + + String orcid = token.getOrcid(); + + EPerson eperson = ePersonService.create(context); + + eperson.setNetid(orcid); + + eperson.setEmail(email); + + Optional firstName = getFirstName(person); + if (firstName.isPresent()) { + eperson.setFirstName(context, firstName.get()); + } + + Optional lastName = getLastName(person); + if (lastName.isPresent()) { + eperson.setLastName(context, lastName.get()); + } + eperson.setCanLogIn(true); + eperson.setSelfRegistered(true); + + setOrcidMetadataOnEPerson(context, eperson, token); + + ePersonService.update(context, eperson); + context.setCurrentUser(eperson); + context.dispatchEvents(); + + return SUCCESS; + + } catch (Exception ex) { + LOGGER.error("An error occurs registering a new EPerson from ORCID", ex); + context.rollback(); + return NO_SUCH_USER; + } finally { + context.restoreAuthSystemState(); + } + } + + private void setOrcidMetadataOnEPerson(Context context, EPerson person, OrcidTokenResponseDTO token) + throws SQLException { + + String orcid = token.getOrcid(); + String accessToken = token.getAccessToken(); + String[] scopes = token.getScopeAsArray(); + + ePersonService.setMetadataSingleValue(context, person, "eperson", "orcid", null, null, orcid); + ePersonService.clearMetadata(context, person, "eperson", "orcid", "scope", ANY); + for (String scope : scopes) { + ePersonService.addMetadata(context, person, "eperson", "orcid", "scope", null, scope); + } + + OrcidToken orcidToken = orcidTokenService.findByEPerson(context, person); + if (orcidToken == null) { + orcidTokenService.create(context, person, accessToken); + } else { + orcidToken.setAccessToken(accessToken); + } + + } + + private Person getPersonFromOrcid(OrcidTokenResponseDTO token) { + try { + return orcidClient.getPerson(token.getAccessToken(), token.getOrcid()); + } catch (Exception ex) { + LOGGER.error("An error occurs retriving the ORCID record with id " + token.getOrcid(), ex); + return null; + } + } + + private Optional getEmail(Person person) { + List emails = person.getEmails() != null ? person.getEmails().getEmails() : Collections.emptyList(); + if (CollectionUtils.isEmpty(emails)) { + return Optional.empty(); + } + return Optional.ofNullable(emails.get(0).getEmail()); + } + + private Optional getFirstName(Person person) { + return Optional.ofNullable(person.getName()) + .map(name -> name.getGivenNames()) + .map(givenNames -> givenNames.getContent()); + } + + private Optional getLastName(Person person) { + return Optional.ofNullable(person.getName()) + .map(name -> name.getFamilyName()) + .map(givenNames -> givenNames.getContent()); + } + + private boolean canSelfRegister() { + String canSelfRegister = configurationService.getProperty("authentication-orcid.can-self-register", "true"); + if (isBlank(canSelfRegister)) { + return true; + } + return toBoolean(canSelfRegister); + } + + private OrcidTokenResponseDTO getOrcidAccessToken(String code) { + try { + return orcidClient.getAccessToken(code); + } catch (Exception ex) { + LOGGER.error("An error occurs retriving the ORCID access_token", ex); + return null; + } + } + + public OrcidClient getOrcidClient() { + return orcidClient; + } + + public void setOrcidClient(OrcidClient orcidClient) { + this.orcidClient = orcidClient; + } + +} diff --git a/dspace-api/src/main/java/org/dspace/authenticate/PasswordAuthentication.java b/dspace-api/src/main/java/org/dspace/authenticate/PasswordAuthentication.java index 50a685872a..6d1ca862d3 100644 --- a/dspace-api/src/main/java/org/dspace/authenticate/PasswordAuthentication.java +++ b/dspace-api/src/main/java/org/dspace/authenticate/PasswordAuthentication.java @@ -22,6 +22,7 @@ import org.dspace.core.LogHelper; import org.dspace.eperson.EPerson; import org.dspace.eperson.Group; import org.dspace.eperson.factory.EPersonServiceFactory; +import org.dspace.eperson.service.EPersonService; import org.dspace.services.factory.DSpaceServicesFactory; /** @@ -53,6 +54,8 @@ public class PasswordAuthentication private static final String PASSWORD_AUTHENTICATED = "password.authenticated"; + private EPersonService ePersonService = EPersonServiceFactory.getInstance().getEPersonService(); + /** @@ -264,4 +267,12 @@ public class PasswordAuthentication } return false; } + + @Override + public boolean canChangePassword(Context context, EPerson ePerson, String currentPassword) { + if (context == null || ePerson == null) { + return false; + } + return ePersonService.checkPassword(context, ePerson, currentPassword); + } } diff --git a/dspace-api/src/main/java/org/dspace/authenticate/ShibAuthentication.java b/dspace-api/src/main/java/org/dspace/authenticate/ShibAuthentication.java index a913d27d62..791634a7dc 100644 --- a/dspace-api/src/main/java/org/dspace/authenticate/ShibAuthentication.java +++ b/dspace-api/src/main/java/org/dspace/authenticate/ShibAuthentication.java @@ -20,7 +20,6 @@ import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Set; -import java.util.UUID; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; @@ -290,20 +289,13 @@ public class ShibAuthentication implements AuthenticationMethod { try { // User has not successfuly authenticated via shibboleth. if (request == null || - context.getCurrentUser() == null || - request.getSession().getAttribute("shib.authenticated") == null) { + context.getCurrentUser() == null) { return Collections.EMPTY_LIST; } - // If we have already calculated the special groups then return them. - if (request.getSession().getAttribute("shib.specialgroup") != null) { + if (context.getSpecialGroups().size() > 0 ) { log.debug("Returning cached special groups."); - List sessionGroupIds = (List) request.getSession().getAttribute("shib.specialgroup"); - List result = new ArrayList<>(); - for (UUID uuid : sessionGroupIds) { - result.add(groupService.find(context, uuid)); - } - return result; + return context.getSpecialGroups(); } log.debug("Starting to determine special groups"); @@ -396,16 +388,8 @@ public class ShibAuthentication implements AuthenticationMethod { log.info("Added current EPerson to special groups: " + groups); - List groupIds = new ArrayList<>(); - for (Group group : groups) { - groupIds.add(group.getID()); - } - - // Cache the special groups, so we don't have to recalculate them again - // for this session. - request.setAttribute("shib.specialgroup", groupIds); - return new ArrayList<>(groups); + } catch (Throwable t) { log.error("Unable to validate any sepcial groups this user may belong too because of an exception.", t); return Collections.EMPTY_LIST; @@ -1292,5 +1276,10 @@ public class ShibAuthentication implements AuthenticationMethod { } return false; } + + @Override + public boolean canChangePassword(Context context, EPerson ePerson, String currentPassword) { + return false; + } } diff --git a/dspace-api/src/main/java/org/dspace/authenticate/X509Authentication.java b/dspace-api/src/main/java/org/dspace/authenticate/X509Authentication.java index 503d90d0ec..12dc5feda5 100644 --- a/dspace-api/src/main/java/org/dspace/authenticate/X509Authentication.java +++ b/dspace-api/src/main/java/org/dspace/authenticate/X509Authentication.java @@ -608,4 +608,9 @@ public class X509Authentication implements AuthenticationMethod { } return false; } + + @Override + public boolean canChangePassword(Context context, EPerson ePerson, String currentPassword) { + return false; + } } diff --git a/dspace-api/src/main/java/org/dspace/authenticate/service/AuthenticationService.java b/dspace-api/src/main/java/org/dspace/authenticate/service/AuthenticationService.java index fba2f00323..e955302ec3 100644 --- a/dspace-api/src/main/java/org/dspace/authenticate/service/AuthenticationService.java +++ b/dspace-api/src/main/java/org/dspace/authenticate/service/AuthenticationService.java @@ -177,4 +177,16 @@ public interface AuthenticationService { */ public String getAuthenticationMethod(Context context, HttpServletRequest request); + /** + * Check if the given current password is valid to change the password of the + * given ePerson. + * + * @param context The DSpace context + * @param ePerson the ePerson related to the password change + * @param currentPassword The current password to check + * @return true if the provided password matches with current + * password + */ + public boolean canChangePassword(Context context, EPerson ePerson, String currentPassword); + } diff --git a/dspace-api/src/main/java/org/dspace/authority/AuthoritySolrServiceImpl.java b/dspace-api/src/main/java/org/dspace/authority/AuthoritySolrServiceImpl.java index dab8cd5b2e..ca5b4a11b5 100644 --- a/dspace-api/src/main/java/org/dspace/authority/AuthoritySolrServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/authority/AuthoritySolrServiceImpl.java @@ -50,7 +50,7 @@ public class AuthoritySolrServiceImpl implements AuthorityIndexingService, Autho */ protected SolrClient solr = null; - protected SolrClient getSolr() + public SolrClient getSolr() throws MalformedURLException, SolrServerException, IOException { if (solr == null) { @@ -67,7 +67,11 @@ public class AuthoritySolrServiceImpl implements AuthorityIndexingService, Autho SolrQuery solrQuery = new SolrQuery().setQuery("*:*"); - solrServer.query(solrQuery); + try { + solrServer.query(solrQuery); + } catch (Exception ex) { + log.error("An error occurs querying authority solr core", ex); + } solr = solrServer; } diff --git a/dspace-api/src/main/java/org/dspace/authority/orcid/Orcidv3SolrAuthorityImpl.java b/dspace-api/src/main/java/org/dspace/authority/orcid/Orcidv3SolrAuthorityImpl.java index a1c3867fb9..6753a5d113 100644 --- a/dspace-api/src/main/java/org/dspace/authority/orcid/Orcidv3SolrAuthorityImpl.java +++ b/dspace-api/src/main/java/org/dspace/authority/orcid/Orcidv3SolrAuthorityImpl.java @@ -21,7 +21,8 @@ import org.apache.http.HttpResponse; import org.apache.http.client.HttpClient; import org.apache.http.client.methods.HttpPost; import org.apache.http.impl.client.HttpClientBuilder; -import org.apache.log4j.Logger; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; import org.dspace.authority.AuthorityValue; import org.dspace.authority.SolrAuthorityInterface; import org.dspace.external.OrcidRestConnector; @@ -40,7 +41,7 @@ import org.orcid.jaxb.model.v3.release.search.Result; */ public class Orcidv3SolrAuthorityImpl implements SolrAuthorityInterface { - private static Logger log = Logger.getLogger(Orcidv3SolrAuthorityImpl.class); + private final static Logger log = LogManager.getLogger(); private OrcidRestConnector orcidRestConnector; private String OAUTHUrl; diff --git a/dspace-api/src/main/java/org/dspace/authority/util/XMLUtils.java b/dspace-api/src/main/java/org/dspace/authority/util/XMLUtils.java index 77568205af..6cf49ac65b 100644 --- a/dspace-api/src/main/java/org/dspace/authority/util/XMLUtils.java +++ b/dspace-api/src/main/java/org/dspace/authority/util/XMLUtils.java @@ -14,11 +14,12 @@ import java.util.Iterator; import javax.xml.parsers.DocumentBuilder; import javax.xml.parsers.DocumentBuilderFactory; import javax.xml.parsers.ParserConfigurationException; -import javax.xml.transform.TransformerException; +import javax.xml.xpath.XPath; +import javax.xml.xpath.XPathConstants; import javax.xml.xpath.XPathExpressionException; +import javax.xml.xpath.XPathFactory; import org.apache.logging.log4j.Logger; -import org.apache.xpath.XPathAPI; import org.w3c.dom.Document; import org.w3c.dom.Element; import org.w3c.dom.Node; @@ -62,36 +63,26 @@ public class XMLUtils { /** * @param xml The starting context (a Node or a Document, for example). - * @param NodeListXPath xpath + * @param nodeListXPath xpath * @return A Node matches the NodeListXPath * null if nothing matches the NodeListXPath * @throws XPathExpressionException if xpath error */ - public static Node getNode(Node xml, String NodeListXPath) throws XPathExpressionException { - Node result = null; - try { - result = XPathAPI.selectSingleNode(xml, NodeListXPath); - } catch (TransformerException e) { - log.error("Error", e); - } - return result; + public static Node getNode(Node xml, String nodeListXPath) throws XPathExpressionException { + XPath xPath = XPathFactory.newInstance().newXPath(); + return (Node) xPath.compile(nodeListXPath).evaluate(xml, XPathConstants.NODE); } /** * @param xml The starting context (a Node or a Document, for example). - * @param NodeListXPath xpath + * @param nodeListXPath xpath * @return A NodeList containing the nodes that match the NodeListXPath * null if nothing matches the NodeListXPath * @throws XPathExpressionException if xpath error */ - public static NodeList getNodeList(Node xml, String NodeListXPath) throws XPathExpressionException { - NodeList nodeList = null; - try { - nodeList = XPathAPI.selectNodeList(xml, NodeListXPath); - } catch (TransformerException e) { - log.error("Error", e); - } - return nodeList; + public static NodeList getNodeList(Node xml, String nodeListXPath) throws XPathExpressionException { + XPath xPath = XPathFactory.newInstance().newXPath(); + return (NodeList) xPath.compile(nodeListXPath).evaluate(xml, XPathConstants.NODESET); } public static Iterator getNodeListIterator(Node xml, String NodeListXPath) throws XPathExpressionException { diff --git a/dspace-api/src/main/java/org/dspace/authorize/AuthorizeServiceImpl.java b/dspace-api/src/main/java/org/dspace/authorize/AuthorizeServiceImpl.java index 919e82f14f..5dd491fd4d 100644 --- a/dspace-api/src/main/java/org/dspace/authorize/AuthorizeServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/authorize/AuthorizeServiceImpl.java @@ -7,6 +7,9 @@ */ package org.dspace.authorize; +import static org.dspace.app.util.AuthorizeUtil.canCollectionAdminManageAccounts; +import static org.dspace.app.util.AuthorizeUtil.canCommunityAdminManageAccounts; + import java.sql.SQLException; import java.util.ArrayList; import java.util.Arrays; @@ -28,10 +31,12 @@ import org.dspace.content.DSpaceObject; import org.dspace.content.Item; import org.dspace.content.factory.ContentServiceFactory; import org.dspace.content.service.BitstreamService; +import org.dspace.content.service.CollectionService; import org.dspace.content.service.WorkspaceItemService; import org.dspace.core.Constants; import org.dspace.core.Context; import org.dspace.discovery.DiscoverQuery; +import org.dspace.discovery.DiscoverQuery.SORT_ORDER; import org.dspace.discovery.DiscoverResult; import org.dspace.discovery.IndexableObject; import org.dspace.discovery.SearchService; @@ -827,7 +832,7 @@ public class AuthorizeServiceImpl implements AuthorizeService { query = formatCustomQuery(query); DiscoverResult discoverResult = getDiscoverResult(context, query + "search.resourcetype:" + IndexableCommunity.TYPE, - offset, limit); + offset, limit, null, null); for (IndexableObject solrCollections : discoverResult.getIndexableObjects()) { Community community = ((IndexableCommunity) solrCollections).getIndexedObject(); communities.add(community); @@ -849,7 +854,7 @@ public class AuthorizeServiceImpl implements AuthorizeService { query = formatCustomQuery(query); DiscoverResult discoverResult = getDiscoverResult(context, query + "search.resourcetype:" + IndexableCommunity.TYPE, - null, null); + null, null, null, null); return discoverResult.getTotalSearchResults(); } @@ -874,7 +879,7 @@ public class AuthorizeServiceImpl implements AuthorizeService { query = formatCustomQuery(query); DiscoverResult discoverResult = getDiscoverResult(context, query + "search.resourcetype:" + IndexableCollection.TYPE, - offset, limit); + offset, limit, CollectionService.SOLR_SORT_FIELD, SORT_ORDER.asc); for (IndexableObject solrCollections : discoverResult.getIndexableObjects()) { Collection collection = ((IndexableCollection) solrCollections).getIndexedObject(); collections.add(collection); @@ -896,17 +901,27 @@ public class AuthorizeServiceImpl implements AuthorizeService { query = formatCustomQuery(query); DiscoverResult discoverResult = getDiscoverResult(context, query + "search.resourcetype:" + IndexableCollection.TYPE, - null, null); + null, null, null, null); return discoverResult.getTotalSearchResults(); } + @Override + public boolean isAccountManager(Context context) { + try { + return (canCommunityAdminManageAccounts() && isCommunityAdmin(context) + || canCollectionAdminManageAccounts() && isCollectionAdmin(context)); + } catch (SQLException e) { + throw new RuntimeException(e); + } + } + private boolean performCheck(Context context, String query) throws SQLException { if (context.getCurrentUser() == null) { return false; } try { - DiscoverResult discoverResult = getDiscoverResult(context, query, null, null); + DiscoverResult discoverResult = getDiscoverResult(context, query, null, null, null, null); if (discoverResult.getTotalSearchResults() > 0) { return true; } @@ -918,7 +933,8 @@ public class AuthorizeServiceImpl implements AuthorizeService { return false; } - private DiscoverResult getDiscoverResult(Context context, String query, Integer offset, Integer limit) + private DiscoverResult getDiscoverResult(Context context, String query, Integer offset, Integer limit, + String sortField, SORT_ORDER sortOrder) throws SearchServiceException, SQLException { String groupQuery = getGroupToQuery(groupService.allMemberGroups(context, context.getCurrentUser())); @@ -934,7 +950,9 @@ public class AuthorizeServiceImpl implements AuthorizeService { if (limit != null) { discoverQuery.setMaxResults(limit); } - + if (sortField != null && sortOrder != null) { + discoverQuery.setSortField(sortField, sortOrder); + } return searchService.search(context, discoverQuery); } diff --git a/dspace-api/src/main/java/org/dspace/authorize/RegexPasswordValidator.java b/dspace-api/src/main/java/org/dspace/authorize/RegexPasswordValidator.java new file mode 100644 index 0000000000..d12c3ba919 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/authorize/RegexPasswordValidator.java @@ -0,0 +1,48 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.authorize; + +import static org.apache.commons.lang.StringUtils.isNotBlank; + +import java.util.regex.Pattern; + +import org.dspace.authorize.service.PasswordValidatorService; +import org.dspace.services.ConfigurationService; +import org.springframework.beans.factory.annotation.Autowired; + +/** + * Implementation of {@link PasswordValidatorService} that verifies if the given + * passowrd matches the configured pattern. + * + * @author Luca Giamminonni (luca.giamminonni at 4science.it) + */ +public class RegexPasswordValidator implements PasswordValidatorService { + + @Autowired + private ConfigurationService configurationService; + + @Override + public boolean isPasswordValidationEnabled() { + return isNotBlank(getPasswordValidationPattern()); + } + + @Override + public boolean isPasswordValid(String password) { + if (!isPasswordValidationEnabled()) { + return true; + } + + Pattern pattern = Pattern.compile(getPasswordValidationPattern()); + return pattern.matcher(password).find(); + } + + private String getPasswordValidationPattern() { + return configurationService.getProperty("authentication-password.regex-validation.pattern"); + } + +} diff --git a/dspace-api/src/main/java/org/dspace/authorize/ResourcePolicy.java b/dspace-api/src/main/java/org/dspace/authorize/ResourcePolicy.java index a25a492a3a..954bb96990 100644 --- a/dspace-api/src/main/java/org/dspace/authorize/ResourcePolicy.java +++ b/dspace-api/src/main/java/org/dspace/authorize/ResourcePolicy.java @@ -93,7 +93,7 @@ public class ResourcePolicy implements ReloadableEntity { private String rptype; @Lob - @Type(type = "org.hibernate.type.MaterializedClobType") + @Type(type = "org.dspace.storage.rdbms.hibernate.DatabaseAwareLobType") @Column(name = "rpdescription") private String rpdescription; diff --git a/dspace-api/src/main/java/org/dspace/authorize/ValidatePasswordServiceImpl.java b/dspace-api/src/main/java/org/dspace/authorize/ValidatePasswordServiceImpl.java new file mode 100644 index 0000000000..663308d627 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/authorize/ValidatePasswordServiceImpl.java @@ -0,0 +1,33 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.authorize; + +import java.util.List; + +import org.dspace.authorize.service.PasswordValidatorService; +import org.dspace.authorize.service.ValidatePasswordService; +import org.springframework.beans.factory.annotation.Autowired; + +/** + * Basic implementation for validation password robustness. + * + * @author Mykhaylo Boychuk (mykhaylo.boychuk@4science.com) + */ +public class ValidatePasswordServiceImpl implements ValidatePasswordService { + + @Autowired + private List validators; + + @Override + public boolean isPasswordValid(String password) { + return validators.stream() + .filter(passwordValidator -> passwordValidator.isPasswordValidationEnabled()) + .allMatch(passwordValidator -> passwordValidator.isPasswordValid(password)); + } + +} \ No newline at end of file diff --git a/dspace-api/src/main/java/org/dspace/authorize/service/AuthorizeService.java b/dspace-api/src/main/java/org/dspace/authorize/service/AuthorizeService.java index 9f6171a220..6b097cdd73 100644 --- a/dspace-api/src/main/java/org/dspace/authorize/service/AuthorizeService.java +++ b/dspace-api/src/main/java/org/dspace/authorize/service/AuthorizeService.java @@ -592,4 +592,12 @@ public interface AuthorizeService { */ long countAdminAuthorizedCollection(Context context, String query) throws SearchServiceException, SQLException; + + /** + * Returns true if the current user can manage accounts. + * + * @param context context with the current user + * @return true if the current user can manage accounts + */ + boolean isAccountManager(Context context); } diff --git a/dspace-api/src/main/java/org/dspace/authorize/service/PasswordValidatorService.java b/dspace-api/src/main/java/org/dspace/authorize/service/PasswordValidatorService.java new file mode 100644 index 0000000000..5817969b6d --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/authorize/service/PasswordValidatorService.java @@ -0,0 +1,29 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.authorize.service; + +/** + * Interface for classes that validate a given password with a specific + * strategy. + * + * @author Luca Giamminonni (luca.giamminonni at 4science.it) + */ +public interface PasswordValidatorService { + + /** + * Check if the password validator is active. + */ + public boolean isPasswordValidationEnabled(); + + /** + * This method checks whether the password is valid + * + * @param password password to validate + */ + public boolean isPasswordValid(String password); +} diff --git a/dspace-api/src/main/java/org/dspace/authorize/service/ValidatePasswordService.java b/dspace-api/src/main/java/org/dspace/authorize/service/ValidatePasswordService.java new file mode 100644 index 0000000000..0d5f6191f6 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/authorize/service/ValidatePasswordService.java @@ -0,0 +1,25 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.authorize.service; + +/** + * Services to use during Validating of password. + * + * @author Mykhaylo Boychuk (mykhaylo.boychuk@4science.com) + */ +public interface ValidatePasswordService { + + /** + * This method checks whether the password is valid based on the configured + * rules/strategies. + * + * @param password password to validate + */ + public boolean isPasswordValid(String password); + +} \ No newline at end of file diff --git a/dspace-api/src/main/java/org/dspace/browse/BrowseDAO.java b/dspace-api/src/main/java/org/dspace/browse/BrowseDAO.java index 22cf02fe13..29debf64e2 100644 --- a/dspace-api/src/main/java/org/dspace/browse/BrowseDAO.java +++ b/dspace-api/src/main/java/org/dspace/browse/BrowseDAO.java @@ -346,7 +346,7 @@ public interface BrowseDAO { public String getFilterValueField(); /** - * Set he name of the field in which the value to constrain results is + * Set the name of the field in which the value to constrain results is * contained * * @param valueField the name of the field diff --git a/dspace-api/src/main/java/org/dspace/browse/BrowseEngine.java b/dspace-api/src/main/java/org/dspace/browse/BrowseEngine.java index 7454c8e82b..6f0235e6c1 100644 --- a/dspace-api/src/main/java/org/dspace/browse/BrowseEngine.java +++ b/dspace-api/src/main/java/org/dspace/browse/BrowseEngine.java @@ -11,6 +11,7 @@ import java.sql.SQLException; import java.util.ArrayList; import java.util.List; +import org.apache.commons.lang3.StringUtils; import org.apache.logging.log4j.Logger; import org.dspace.content.Collection; import org.dspace.content.Community; @@ -202,6 +203,13 @@ public class BrowseEngine { // get the table name that we are going to be getting our data from dao.setTable(browseIndex.getTableName()); + if (scope.getBrowseIndex() != null && OrderFormat.TITLE.equals(scope.getBrowseIndex().getDataType())) { + // For browsing by title, apply the same normalization applied to indexed titles + dao.setStartsWith(normalizeJumpToValue(scope.getStartsWith())); + } else { + dao.setStartsWith(StringUtils.lowerCase(scope.getStartsWith())); + } + // tell the browse query whether we are ascending or descending on the value dao.setAscending(scope.isAscending()); @@ -248,9 +256,6 @@ public class BrowseEngine { } } - // this is the total number of results in answer to the query - int total = getTotalResults(); - // assemble the ORDER BY clause String orderBy = browseIndex.getSortField(scope.isSecondLevel()); if (scope.getSortBy() > 0) { @@ -258,6 +263,9 @@ public class BrowseEngine { } dao.setOrderField(orderBy); + // this is the total number of results in answer to the query + int total = getTotalResults(); + int offset = scope.getOffset(); String rawFocusValue = null; if (offset < 1 && (scope.hasJumpToItem() || scope.hasJumpToValue() || scope.hasStartsWith())) { @@ -269,9 +277,6 @@ public class BrowseEngine { String focusValue = normalizeJumpToValue(rawFocusValue); log.debug("browsing using focus: " + focusValue); - - // Convert the focus value into an offset - offset = getOffsetForValue(focusValue); } dao.setOffset(offset); @@ -290,7 +295,7 @@ public class BrowseEngine { // now, if we don't have any results, we are at the end of the browse. This will // be because a starts_with value has been supplied for which we don't have // any items. - if (results.size() == 0) { + if (results.isEmpty()) { // In this case, we will calculate a new offset for the last page of results offset = total - scope.getResultsPerPage(); if (offset < 0) { @@ -450,7 +455,7 @@ public class BrowseEngine { // now, if we don't have any results, we are at the end of the browse. This will // be because a starts_with value has been supplied for which we don't have // any items. - if (results.size() == 0) { + if (results.isEmpty()) { // In this case, we will calculate a new offset for the last page of results offset = total - scope.getResultsPerPage(); if (offset < 0) { @@ -463,7 +468,7 @@ public class BrowseEngine { } } else { // No records, so make an empty list - results = new ArrayList(); + results = new ArrayList<>(); } // construct the BrowseInfo object to pass back @@ -554,7 +559,7 @@ public class BrowseEngine { } String col = "sort_1"; - if (so.getNumber() > 0) { + if (so != null && so.getNumber() > 0) { col = "sort_" + Integer.toString(so.getNumber()); } @@ -591,7 +596,7 @@ public class BrowseEngine { } String col = "sort_1"; - if (so.getNumber() > 0) { + if (so != null && so.getNumber() > 0) { col = "sort_" + Integer.toString(so.getNumber()); } @@ -684,13 +689,11 @@ public class BrowseEngine { // our count, storing them locally to reinstate later String focusField = dao.getJumpToField(); String focusValue = dao.getJumpToValue(); - String orderField = dao.getOrderField(); int limit = dao.getLimit(); int offset = dao.getOffset(); dao.setJumpToField(null); dao.setJumpToValue(null); - dao.setOrderField(null); dao.setLimit(-1); dao.setOffset(-1); @@ -700,7 +703,6 @@ public class BrowseEngine { // now put back the values we removed for this method dao.setJumpToField(focusField); dao.setJumpToValue(focusValue); - dao.setOrderField(orderField); dao.setLimit(limit); dao.setOffset(offset); dao.setCountValues(null); diff --git a/dspace-api/src/main/java/org/dspace/browse/BrowseIndex.java b/dspace-api/src/main/java/org/dspace/browse/BrowseIndex.java index 859063272a..8d065c21ce 100644 --- a/dspace-api/src/main/java/org/dspace/browse/BrowseIndex.java +++ b/dspace-api/src/main/java/org/dspace/browse/BrowseIndex.java @@ -313,14 +313,6 @@ public final class BrowseIndex { return name; } - /** - * @param name The name to set. - */ -// public void setName(String name) -// { -// this.name = name; -// } - /** * Get the SortOption associated with this index. * diff --git a/dspace-api/src/main/java/org/dspace/browse/ItemListConfig.java b/dspace-api/src/main/java/org/dspace/browse/ItemListConfig.java index 9cbbe8f194..6a63659c82 100644 --- a/dspace-api/src/main/java/org/dspace/browse/ItemListConfig.java +++ b/dspace-api/src/main/java/org/dspace/browse/ItemListConfig.java @@ -25,22 +25,7 @@ public class ItemListConfig { /** * a map of column number to metadata value */ - private Map metadata = new HashMap(); - - /** - * a map of column number to data type - */ - private Map types = new HashMap(); - - /** - * constant for a DATE column - */ - private static final int DATE = 1; - - /** - * constant for a TEXT column - */ - private static final int TEXT = 2; + private Map metadata = new HashMap<>(); private final transient ConfigurationService configurationService = DSpaceServicesFactory.getInstance().getConfigurationService(); @@ -63,14 +48,11 @@ public class ItemListConfig { // parse the config int i = 1; for (String token : browseFields) { - Integer key = Integer.valueOf(i); + Integer key = i; // find out if the field is a date if (token.indexOf("(date)") > 0) { token = token.replaceAll("\\(date\\)", ""); - types.put(key, Integer.valueOf(ItemListConfig.DATE)); - } else { - types.put(key, Integer.valueOf(ItemListConfig.TEXT)); } String[] mdBits = interpretField(token.trim(), null); @@ -100,7 +82,7 @@ public class ItemListConfig { * @return array of metadata */ public String[] getMetadata(int col) { - return metadata.get(Integer.valueOf(col)); + return metadata.get(col); } /** diff --git a/dspace-api/src/main/java/org/dspace/browse/SolrBrowseDAO.java b/dspace-api/src/main/java/org/dspace/browse/SolrBrowseDAO.java index 6a960e8d75..cee3ae017e 100644 --- a/dspace-api/src/main/java/org/dspace/browse/SolrBrowseDAO.java +++ b/dspace-api/src/main/java/org/dspace/browse/SolrBrowseDAO.java @@ -17,6 +17,7 @@ import java.util.UUID; import org.apache.commons.lang3.StringUtils; import org.apache.logging.log4j.Logger; +import org.apache.solr.client.solrj.util.ClientUtils; import org.dspace.authorize.factory.AuthorizeServiceFactory; import org.dspace.authorize.service.AuthorizeService; import org.dspace.content.Item; @@ -205,6 +206,10 @@ public class SolrBrowseDAO implements BrowseDAO { } else if (valuePartial) { query.addFilterQueries("{!field f=" + facetField + "_partial}" + value); } + if (StringUtils.isNotBlank(startsWith) && orderField != null) { + query.addFilterQueries( + "bi_" + orderField + "_sort:" + ClientUtils.escapeQueryChars(startsWith) + "*"); + } // filter on item to be sure to don't include any other object // indexed in the Discovery Search core query.addFilterQueries("search.resourcetype:" + IndexableItem.TYPE); diff --git a/dspace-api/src/main/java/org/dspace/content/BundleServiceImpl.java b/dspace-api/src/main/java/org/dspace/content/BundleServiceImpl.java index aa32983362..485f1d6451 100644 --- a/dspace-api/src/main/java/org/dspace/content/BundleServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/content/BundleServiceImpl.java @@ -158,6 +158,11 @@ public class BundleServiceImpl extends DSpaceObjectServiceImpl implement } bundle.addBitstream(bitstream); + // If a bitstream is moved from one bundle to another it may be temporarily flagged as deleted + // (when removed from the original bundle) + if (bitstream.isDeleted()) { + bitstream.setDeleted(false); + } bitstream.getBundles().add(bundle); diff --git a/dspace-api/src/main/java/org/dspace/content/CollectionServiceImpl.java b/dspace-api/src/main/java/org/dspace/content/CollectionServiceImpl.java index e54f609389..8488b4eaf1 100644 --- a/dspace-api/src/main/java/org/dspace/content/CollectionServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/content/CollectionServiceImpl.java @@ -43,6 +43,7 @@ import org.dspace.core.I18nUtil; import org.dspace.core.LogHelper; import org.dspace.core.service.LicenseService; import org.dspace.discovery.DiscoverQuery; +import org.dspace.discovery.DiscoverQuery.SORT_ORDER; import org.dspace.discovery.DiscoverResult; import org.dspace.discovery.IndexableObject; import org.dspace.discovery.SearchService; @@ -946,6 +947,7 @@ public class CollectionServiceImpl extends DSpaceObjectServiceImpl i discoverQuery.setDSpaceObjectFilter(IndexableCollection.TYPE); discoverQuery.setStart(offset); discoverQuery.setMaxResults(limit); + discoverQuery.setSortField(SOLR_SORT_FIELD, SORT_ORDER.asc); DiscoverResult resp = retrieveCollectionsWithSubmit(context, discoverQuery, null, community, q); for (IndexableObject solrCollections : resp.getIndexableObjects()) { Collection c = ((IndexableCollection) solrCollections).getIndexedObject(); @@ -1025,6 +1027,7 @@ public class CollectionServiceImpl extends DSpaceObjectServiceImpl i discoverQuery.setDSpaceObjectFilter(IndexableCollection.TYPE); discoverQuery.setStart(offset); discoverQuery.setMaxResults(limit); + discoverQuery.setSortField(SOLR_SORT_FIELD, SORT_ORDER.asc); DiscoverResult resp = retrieveCollectionsWithSubmit(context, discoverQuery, entityType, community, q); for (IndexableObject solrCollections : resp.getIndexableObjects()) { diff --git a/dspace-api/src/main/java/org/dspace/content/DSpaceObjectServiceImpl.java b/dspace-api/src/main/java/org/dspace/content/DSpaceObjectServiceImpl.java index 6b188396f6..24778824bf 100644 --- a/dspace-api/src/main/java/org/dspace/content/DSpaceObjectServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/content/DSpaceObjectServiceImpl.java @@ -243,67 +243,64 @@ public abstract class DSpaceObjectServiceImpl implements boolean authorityControlled = metadataAuthorityService.isAuthorityControlled(metadataField); boolean authorityRequired = metadataAuthorityService.isAuthorityRequired(metadataField); - List newMetadata = new ArrayList<>(values.size()); + List newMetadata = new ArrayList<>(); // We will not verify that they are valid entries in the registry // until update() is called. for (int i = 0; i < values.size(); i++) { - - if (authorities != null && authorities.size() >= i) { - if (StringUtils.startsWith(authorities.get(i), Constants.VIRTUAL_AUTHORITY_PREFIX)) { - continue; - } - } - MetadataValue metadataValue = metadataValueService.create(context, dso, metadataField); - newMetadata.add(metadataValue); - - metadataValue.setPlace(placeSupplier.get()); - - metadataValue.setLanguage(lang == null ? null : lang.trim()); - - // Logic to set Authority and Confidence: - // - normalize an empty string for authority to NULL. - // - if authority key is present, use given confidence or NOVALUE if not given - // - otherwise, preserve confidence if meaningful value was given since it may document a failed - // authority lookup - // - CF_UNSET signifies no authority nor meaningful confidence. - // - it's possible to have empty authority & CF_ACCEPTED if e.g. user deletes authority key - if (authorityControlled) { - if (authorities != null && authorities.get(i) != null && authorities.get(i).length() > 0) { - metadataValue.setAuthority(authorities.get(i)); - metadataValue.setConfidence(confidences == null ? Choices.CF_NOVALUE : confidences.get(i)); - } else { - metadataValue.setAuthority(null); - metadataValue.setConfidence(confidences == null ? Choices.CF_UNSET : confidences.get(i)); - } - // authority sanity check: if authority is required, was it supplied? - // XXX FIXME? can't throw a "real" exception here without changing all the callers to expect it, so - // use a runtime exception - if (authorityRequired && (metadataValue.getAuthority() == null || metadataValue.getAuthority() - .length() == 0)) { - throw new IllegalArgumentException("The metadata field \"" + metadataField - .toString() + "\" requires an authority key but none was provided. Value=\"" + values - .get(i) + "\""); - } - } if (values.get(i) != null) { + if (authorities != null && authorities.size() >= i) { + if (StringUtils.startsWith(authorities.get(i), Constants.VIRTUAL_AUTHORITY_PREFIX)) { + continue; + } + } + MetadataValue metadataValue = metadataValueService.create(context, dso, metadataField); + newMetadata.add(metadataValue); + + metadataValue.setPlace(placeSupplier.get()); + + metadataValue.setLanguage(lang == null ? null : lang.trim()); + + // Logic to set Authority and Confidence: + // - normalize an empty string for authority to NULL. + // - if authority key is present, use given confidence or NOVALUE if not given + // - otherwise, preserve confidence if meaningful value was given since it may document a failed + // authority lookup + // - CF_UNSET signifies no authority nor meaningful confidence. + // - it's possible to have empty authority & CF_ACCEPTED if e.g. user deletes authority key + if (authorityControlled) { + if (authorities != null && authorities.get(i) != null && authorities.get(i).length() > 0) { + metadataValue.setAuthority(authorities.get(i)); + metadataValue.setConfidence(confidences == null ? Choices.CF_NOVALUE : confidences.get(i)); + } else { + metadataValue.setAuthority(null); + metadataValue.setConfidence(confidences == null ? Choices.CF_UNSET : confidences.get(i)); + } + // authority sanity check: if authority is required, was it supplied? + // XXX FIXME? can't throw a "real" exception here without changing all the callers to expect it, so + // use a runtime exception + if (authorityRequired && (metadataValue.getAuthority() == null || metadataValue.getAuthority() + .length() == 0)) { + throw new IllegalArgumentException("The metadata field \"" + metadataField + .toString() + "\" requires an authority key but none was provided. Value=\"" + values + .get(i) + "\""); + } + } // remove control unicode char String temp = values.get(i).trim(); char[] dcvalue = temp.toCharArray(); for (int charPos = 0; charPos < dcvalue.length; charPos++) { if (Character.isISOControl(dcvalue[charPos]) && - !String.valueOf(dcvalue[charPos]).equals("\u0009") && - !String.valueOf(dcvalue[charPos]).equals("\n") && - !String.valueOf(dcvalue[charPos]).equals("\r")) { + !String.valueOf(dcvalue[charPos]).equals("\u0009") && + !String.valueOf(dcvalue[charPos]).equals("\n") && + !String.valueOf(dcvalue[charPos]).equals("\r")) { dcvalue[charPos] = ' '; } } metadataValue.setValue(String.valueOf(dcvalue)); - } else { - metadataValue.setValue(null); - } - //An update here isn't needed, this is persited upon the merge of the owning object + //An update here isn't needed, this is persited upon the merge of the owning object // metadataValueService.update(context, metadataValue); - dso.addDetails(metadataField.toString()); + dso.addDetails(metadataField.toString()); + } } setMetadataModified(dso); return newMetadata; @@ -624,8 +621,14 @@ public abstract class DSpaceObjectServiceImpl implements }); for (MetadataValue metadataValue : metadataValues) { //Retrieve & store the place for each metadata value - if (StringUtils.startsWith(metadataValue.getAuthority(), Constants.VIRTUAL_AUTHORITY_PREFIX) && - ((RelationshipMetadataValue) metadataValue).isUseForPlace()) { + if ( + // For virtual MDVs with useForPlace=true, + // update both the place of the metadatum and the place of the Relationship. + // E.g. for an Author relationship, + // the place should be updated using the same principle as dc.contributor.author. + StringUtils.startsWith(metadataValue.getAuthority(), Constants.VIRTUAL_AUTHORITY_PREFIX) + && ((RelationshipMetadataValue) metadataValue).isUseForPlace() + ) { int mvPlace = getMetadataValuePlace(fieldToLastPlace, metadataValue); metadataValue.setPlace(mvPlace); String authority = metadataValue.getAuthority(); @@ -638,8 +641,16 @@ public abstract class DSpaceObjectServiceImpl implements } relationshipService.update(context, relationship); - } else if (!StringUtils.startsWith(metadataValue.getAuthority(), - Constants.VIRTUAL_AUTHORITY_PREFIX)) { + } else if ( + // Otherwise, just set the place of the metadatum + // ...unless the metadatum in question is a relation.* metadatum. + // This case is a leftover from when a Relationship is removed and copied to metadata. + // If we let its place change the order of any remaining Relationships will be affected. + // todo: this makes it so these leftover MDVs can't be reordered later on + !StringUtils.equals( + metadataValue.getMetadataField().getMetadataSchema().getName(), "relation" + ) + ) { int mvPlace = getMetadataValuePlace(fieldToLastPlace, metadataValue); metadataValue.setPlace(mvPlace); } diff --git a/dspace-api/src/main/java/org/dspace/content/ItemServiceImpl.java b/dspace-api/src/main/java/org/dspace/content/ItemServiceImpl.java index 96dac1a4df..27d0ba189c 100644 --- a/dspace-api/src/main/java/org/dspace/content/ItemServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/content/ItemServiceImpl.java @@ -17,6 +17,7 @@ import java.util.Date; import java.util.Iterator; import java.util.LinkedList; import java.util.List; +import java.util.Map; import java.util.Objects; import java.util.UUID; import java.util.function.Supplier; @@ -40,6 +41,7 @@ import org.dspace.content.service.BitstreamService; import org.dspace.content.service.BundleService; import org.dspace.content.service.CollectionService; import org.dspace.content.service.CommunityService; +import org.dspace.content.service.EntityTypeService; import org.dspace.content.service.InstallItemService; import org.dspace.content.service.ItemService; import org.dspace.content.service.MetadataSchemaService; @@ -56,6 +58,15 @@ import org.dspace.harvest.HarvestedItem; import org.dspace.harvest.service.HarvestedItemService; import org.dspace.identifier.IdentifierException; import org.dspace.identifier.service.IdentifierService; +import org.dspace.orcid.OrcidHistory; +import org.dspace.orcid.OrcidQueue; +import org.dspace.orcid.OrcidToken; +import org.dspace.orcid.model.OrcidEntityType; +import org.dspace.orcid.service.OrcidHistoryService; +import org.dspace.orcid.service.OrcidQueueService; +import org.dspace.orcid.service.OrcidSynchronizationService; +import org.dspace.orcid.service.OrcidTokenService; +import org.dspace.profile.service.ResearcherProfileService; import org.dspace.services.ConfigurationService; import org.dspace.versioning.service.VersioningService; import org.dspace.workflow.WorkflowItemService; @@ -120,6 +131,24 @@ public class ItemServiceImpl extends DSpaceObjectServiceImpl implements It @Autowired(required = true) private RelationshipMetadataService relationshipMetadataService; + @Autowired(required = true) + private EntityTypeService entityTypeService; + + @Autowired + private OrcidTokenService orcidTokenService; + + @Autowired(required = true) + private OrcidHistoryService orcidHistoryService; + + @Autowired(required = true) + private OrcidQueueService orcidQueueService; + + @Autowired(required = true) + private OrcidSynchronizationService orcidSynchronizationService; + + @Autowired(required = true) + private ResearcherProfileService researcherProfileService; + protected ItemServiceImpl() { super(); } @@ -241,6 +270,10 @@ public class ItemServiceImpl extends DSpaceObjectServiceImpl implements It return itemDAO.findAll(context, true, true); } + public Iterator findAllRegularItems(Context context) throws SQLException { + return itemDAO.findAllRegularItems(context); + }; + @Override public Iterator findBySubmitter(Context context, EPerson eperson) throws SQLException { return itemDAO.findBySubmitter(context, eperson); @@ -724,7 +757,7 @@ public class ItemServiceImpl extends DSpaceObjectServiceImpl implements It + item.getID())); // Remove relationships - for (Relationship relationship : relationshipService.findByItem(context, item)) { + for (Relationship relationship : relationshipService.findByItem(context, item, -1, -1, false, false)) { relationshipService.forceDelete(context, relationship, false, false); } @@ -737,6 +770,8 @@ public class ItemServiceImpl extends DSpaceObjectServiceImpl implements It // remove version attached to the item removeVersion(context, item); + removeOrcidSynchronizationStuff(context, item); + // Also delete the item if it appears in a harvested collection. HarvestedItem hi = harvestedItemService.find(context, item); @@ -744,6 +779,11 @@ public class ItemServiceImpl extends DSpaceObjectServiceImpl implements It harvestedItemService.delete(context, hi); } + OrcidToken orcidToken = orcidTokenService.findByProfileItem(context, item); + if (orcidToken != null) { + orcidToken.setProfileItem(null); + } + //Only clear collections after we have removed everything else from the item item.clearCollections(); item.setOwningCollection(null); @@ -911,6 +951,12 @@ public class ItemServiceImpl extends DSpaceObjectServiceImpl implements It @Override public void move(Context context, Item item, Collection from, Collection to) throws SQLException, AuthorizeException, IOException { + + // If the two collections are the same, do nothing. + if (from.equals(to)) { + return; + } + // Use the normal move method, and default to not inherit permissions this.move(context, item, from, to, false); } @@ -1125,6 +1171,50 @@ prevent the generation of resource policy entry values with null dspace_object a return !(hasCustomPolicy && isAnonimousGroup && datesAreNull); } + /** + * Returns an iterator of Items possessing the passed metadata field, or only + * those matching the passed value, if value is not Item.ANY + * + * @param context DSpace context object + * @param schema metadata field schema + * @param element metadata field element + * @param qualifier metadata field qualifier + * @param value field value or Item.ANY to match any value + * @return an iterator over the items matching that authority value + * @throws SQLException if database error + * An exception that provides information on a database access error or other errors. + * @throws AuthorizeException if authorization error + * Exception indicating the current user of the context does not have permission + * to perform a particular action. + */ + @Override + public Iterator findArchivedByMetadataField(Context context, + String schema, String element, String qualifier, String value) + throws SQLException, AuthorizeException { + MetadataSchema mds = metadataSchemaService.find(context, schema); + if (mds == null) { + throw new IllegalArgumentException("No such metadata schema: " + schema); + } + MetadataField mdf = metadataFieldService.findByElement(context, mds, element, qualifier); + if (mdf == null) { + throw new IllegalArgumentException( + "No such metadata field: schema=" + schema + ", element=" + element + ", qualifier=" + qualifier); + } + + if (Item.ANY.equals(value)) { + return itemDAO.findByMetadataField(context, mdf, null, true); + } else { + return itemDAO.findByMetadataField(context, mdf, value, true); + } + } + + @Override + public Iterator findArchivedByMetadataField(Context context, String metadataField, String value) + throws SQLException, AuthorizeException { + String[] mdValueByField = getMDValueByField(metadataField); + return findArchivedByMetadataField(context, mdValueByField[0], mdValueByField[1], mdValueByField[2], value); + } + /** * Returns an iterator of Items possessing the passed metadata field, or only * those matching the passed value, if value is not Item.ANY @@ -1529,5 +1619,100 @@ prevent the generation of resource policy entry values with null dspace_object a .stream().findFirst().orElse(null); } + @Override + public String getEntityTypeLabel(Item item) { + List mdvs = getMetadata(item, "dspace", "entity", "type", Item.ANY, false); + if (mdvs.isEmpty()) { + return null; + } + + if (mdvs.size() > 1) { + log.warn( + "Item with uuid {}, handle {} has {} entity types ({}), expected 1 entity type", + item.getID(), item.getHandle(), mdvs.size(), + mdvs.stream().map(MetadataValue::getValue).collect(Collectors.toList()) + ); + } + + String entityType = mdvs.get(0).getValue(); + if (StringUtils.isBlank(entityType)) { + return null; + } + + return entityType; + } + + @Override + public EntityType getEntityType(Context context, Item item) throws SQLException { + String entityTypeString = getEntityTypeLabel(item); + if (StringUtils.isBlank(entityTypeString)) { + return null; + } + + return entityTypeService.findByEntityType(context, entityTypeString); + } + + private void removeOrcidSynchronizationStuff(Context context, Item item) throws SQLException, AuthorizeException { + + if (isNotProfileOrOrcidEntity(item)) { + return; + } + + context.turnOffAuthorisationSystem(); + + try { + + createOrcidQueueRecordsToDeleteOnOrcid(context, item); + deleteOrcidHistoryRecords(context, item); + deleteOrcidQueueRecords(context, item); + + } finally { + context.restoreAuthSystemState(); + } + + } + + private boolean isNotProfileOrOrcidEntity(Item item) { + String entityType = getEntityTypeLabel(item); + return !OrcidEntityType.isValidEntityType(entityType) + && !researcherProfileService.getProfileType().equals(entityType); + } + + private void createOrcidQueueRecordsToDeleteOnOrcid(Context context, Item entity) throws SQLException { + + String entityType = getEntityTypeLabel(entity); + if (entityType == null || researcherProfileService.getProfileType().equals(entityType)) { + return; + } + + Map profileAndPutCodeMap = orcidHistoryService.findLastPutCodes(context, entity); + for (Item profile : profileAndPutCodeMap.keySet()) { + if (orcidSynchronizationService.isSynchronizationAllowed(profile, entity)) { + String putCode = profileAndPutCodeMap.get(profile); + String title = getMetadataFirstValue(entity, "dc", "title", null, Item.ANY); + orcidQueueService.createEntityDeletionRecord(context, profile, title, entityType, putCode); + } + } + + } + + private void deleteOrcidHistoryRecords(Context context, Item item) throws SQLException { + List historyRecords = orcidHistoryService.findByProfileItemOrEntity(context, item); + for (OrcidHistory historyRecord : historyRecords) { + if (historyRecord.getProfileItem().equals(item)) { + orcidHistoryService.delete(context, historyRecord); + } else { + historyRecord.setEntity(null); + orcidHistoryService.update(context, historyRecord); + } + } + } + + private void deleteOrcidQueueRecords(Context context, Item item) throws SQLException { + List orcidQueueRecords = orcidQueueService.findByProfileItemOrEntity(context, item); + for (OrcidQueue orcidQueueRecord : orcidQueueRecords) { + orcidQueueService.delete(context, orcidQueueRecord); + } + } } diff --git a/dspace-api/src/main/java/org/dspace/content/LicenseUtils.java b/dspace-api/src/main/java/org/dspace/content/LicenseUtils.java index be804a9bbb..673a30d2dd 100644 --- a/dspace-api/src/main/java/org/dspace/content/LicenseUtils.java +++ b/dspace-api/src/main/java/org/dspace/content/LicenseUtils.java @@ -59,7 +59,7 @@ public class LicenseUtils { * {6} the eperson object that will be formatted using the appropriate * LicenseArgumentFormatter plugin (if defined)
    * {x} any addition argument supplied wrapped in the - * LicenseArgumentFormatter based on his type (map key) + * LicenseArgumentFormatter based on its type (map key) * * @param locale Formatter locale * @param collection collection to get license from diff --git a/dspace-api/src/main/java/org/dspace/content/MetadataValue.java b/dspace-api/src/main/java/org/dspace/content/MetadataValue.java index d1b636cdff..9ff3cb9ec2 100644 --- a/dspace-api/src/main/java/org/dspace/content/MetadataValue.java +++ b/dspace-api/src/main/java/org/dspace/content/MetadataValue.java @@ -59,7 +59,7 @@ public class MetadataValue implements ReloadableEntity { * The value of the field */ @Lob - @Type(type = "org.hibernate.type.MaterializedClobType") + @Type(type = "org.dspace.storage.rdbms.hibernate.DatabaseAwareLobType") @Column(name = "text_value") private String value; diff --git a/dspace-api/src/main/java/org/dspace/content/Relationship.java b/dspace-api/src/main/java/org/dspace/content/Relationship.java index 81d13d6c10..77c418a23d 100644 --- a/dspace-api/src/main/java/org/dspace/content/Relationship.java +++ b/dspace-api/src/main/java/org/dspace/content/Relationship.java @@ -89,6 +89,15 @@ public class Relationship implements ReloadableEntity { @Column(name = "rightward_value") private String rightwardValue; + /** + * Whether the left and/or right side of a given relationship are the "latest". + * A side of a relationship is "latest" if the item on that side has either no other versions, + * or the item on that side is the most recent version that is relevant to the given relationship. + * This column affects what version of an item appears on search pages or the relationship listings of other items. + */ + @Column(name = "latest_version_status") + private LatestVersionStatus latestVersionStatus = LatestVersionStatus.BOTH; + /** * Protected constructor, create object using: * {@link org.dspace.content.service.RelationshipService#create(Context)} } @@ -216,6 +225,39 @@ public class Relationship implements ReloadableEntity { this.rightwardValue = rightwardValue; } + /** + * Getter for {@link #latestVersionStatus}. + * @return the latest version status of this relationship. + */ + public LatestVersionStatus getLatestVersionStatus() { + return latestVersionStatus; + } + + /** + * Setter for {@link #latestVersionStatus}. + * @param latestVersionStatus the new latest version status for this relationship. + */ + public void setLatestVersionStatus(LatestVersionStatus latestVersionStatus) { + if (this.latestVersionStatus == latestVersionStatus) { + return; // no change or cache reset needed + } + + this.latestVersionStatus = latestVersionStatus; + + // on one item, relation.* fields will change + // on the other item, relation.*.latestForDiscovery will change + leftItem.setMetadataModified(); + rightItem.setMetadataModified(); + } + + public enum LatestVersionStatus { + // NOTE: SQL migration expects BOTH to be the first constant in this enum! + BOTH, // both items in this relationship are the "latest" + LEFT_ONLY, // the left-hand item of this relationship is the "latest", but the right-hand item is not + RIGHT_ONLY // the right-hand item of this relationship is the "latest", but the left-hand item is not + // NOTE: one side of any given relationship should ALWAYS be the "latest" + } + /** * Standard getter for the ID for this Relationship * @return The ID of this relationship diff --git a/dspace-api/src/main/java/org/dspace/content/RelationshipMetadataService.java b/dspace-api/src/main/java/org/dspace/content/RelationshipMetadataService.java index 38b0d18bd9..c3570ad47e 100644 --- a/dspace-api/src/main/java/org/dspace/content/RelationshipMetadataService.java +++ b/dspace-api/src/main/java/org/dspace/content/RelationshipMetadataService.java @@ -56,7 +56,9 @@ public interface RelationshipMetadataService { * This method will retrieve the EntityType String from an item * @param item The Item for which the entityType String will be returned * @return A String value indicating the entityType + * @deprecated use {@link org.dspace.content.service.ItemService#getEntityTypeLabel(Item)} instead. */ + @Deprecated public String getEntityTypeStringFromMetadata(Item item); } diff --git a/dspace-api/src/main/java/org/dspace/content/RelationshipMetadataServiceImpl.java b/dspace-api/src/main/java/org/dspace/content/RelationshipMetadataServiceImpl.java index f8b756a1ea..14ed441b81 100644 --- a/dspace-api/src/main/java/org/dspace/content/RelationshipMetadataServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/content/RelationshipMetadataServiceImpl.java @@ -7,16 +7,24 @@ */ package org.dspace.content; +import static org.dspace.content.RelationshipType.Tilted.LEFT; +import static org.dspace.content.RelationshipType.Tilted.RIGHT; + import java.sql.SQLException; import java.util.HashMap; import java.util.LinkedList; import java.util.List; import java.util.Map; +import java.util.Objects; +import java.util.stream.Collectors; import org.apache.commons.lang3.StringUtils; import org.apache.logging.log4j.Logger; +import org.dspace.content.dao.pojo.ItemUuidAndRelationshipId; +import org.dspace.content.service.ItemService; import org.dspace.content.service.MetadataFieldService; import org.dspace.content.service.RelationshipService; +import org.dspace.content.service.RelationshipTypeService; import org.dspace.content.virtual.VirtualMetadataConfiguration; import org.dspace.content.virtual.VirtualMetadataPopulator; import org.dspace.core.Constants; @@ -33,6 +41,12 @@ public class RelationshipMetadataServiceImpl implements RelationshipMetadataServ @Autowired(required = true) protected RelationshipService relationshipService; + @Autowired(required = true) + protected RelationshipTypeService relationshipTypeService; + + @Autowired(required = true) + protected ItemService itemService; + @Autowired(required = true) protected VirtualMetadataPopulator virtualMetadataPopulator; @@ -44,12 +58,25 @@ public class RelationshipMetadataServiceImpl implements RelationshipMetadataServ Context context = new Context(); List fullMetadataValueList = new LinkedList<>(); try { - String entityType = getEntityTypeStringFromMetadata(item); - if (StringUtils.isNotBlank(entityType)) { + EntityType entityType = itemService.getEntityType(context, item); + if (entityType != null) { + // NOTE: The following code will add metadata fields of type relation.*.latestForDiscovery + // (e.g. relation.isAuthorOfPublication.latestForDiscovery). + // These fields contain the UUIDs of the items that have a relationship with current item, + // from the perspective of the other item. In other words, given a relationship with this item, + // the current item should have "latest status" in order for the other item to appear in + // relation.*.latestForDiscovery fields. + fullMetadataValueList.addAll(findLatestForDiscoveryMetadataValues(context, item, entityType)); + + // NOTE: The following code will, among other things, + // add metadata fields of type relation.* (e.g. relation.isAuthorOfPublication). + // These fields contain the UUIDs of the items that have a relationship with current item, + // from the perspective of this item. In other words, given a relationship with this item, + // the other item should have "latest status" in order to appear in relation.* fields. List relationships = relationshipService.findByItem(context, item, -1, -1, true); for (Relationship relationship : relationships) { fullMetadataValueList - .addAll(findRelationshipMetadataValueForItemRelationship(context, item, entityType, + .addAll(findRelationshipMetadataValueForItemRelationship(context, item, entityType.getLabel(), relationship, enableVirtualMetadata)); } @@ -60,16 +87,90 @@ public class RelationshipMetadataServiceImpl implements RelationshipMetadataServ return fullMetadataValueList; } - public String getEntityTypeStringFromMetadata(Item item) { - List list = item.getMetadata(); - for (MetadataValue mdv : list) { - if (StringUtils.equals(mdv.getMetadataField().getMetadataSchema().getName(), "dspace") - && StringUtils.equals(mdv.getMetadataField().getElement(), "entity") - && StringUtils.equals(mdv.getMetadataField().getQualifier(), "type")) { - return mdv.getValue(); + /** + * Create the list of relation.*.latestForDiscovery virtual metadata values for the given item. + * @param context the DSpace context. + * @param item the item. + * @param itemEntityType the entity type of the item. + * @return a list (may be empty) of metadata values of type relation.*.latestForDiscovery. + */ + protected List findLatestForDiscoveryMetadataValues( + Context context, Item item, EntityType itemEntityType + ) throws SQLException { + final String schema = MetadataSchemaEnum.RELATION.getName(); + final String qualifier = "latestForDiscovery"; + + List mdvs = new LinkedList<>(); + + List relationshipTypes = relationshipTypeService.findByEntityType(context, itemEntityType); + for (RelationshipType relationshipType : relationshipTypes) { + // item is on left side of this relationship type + // NOTE: On the left item, we should index the uuids of the right items. If the relationship type is + // "tilted right", it means that we expect a huge amount of right items, so we don't index their uuids + // on the left item as a storage/performance improvement. + // As a consequence, when searching for related items (using discovery) + // on the pages of the right items you won't be able to find the left item. + if (relationshipType.getTilted() != RIGHT && relationshipType.getLeftType().equals(itemEntityType)) { + String element = relationshipType.getLeftwardType(); + List data = relationshipService + .findByLatestItemAndRelationshipType(context, item, relationshipType, true); + mdvs.addAll(constructLatestForDiscoveryMetadataValues(context, schema, element, qualifier, data)); + } + + // item is on right side of this relationship type + // NOTE: On the right item, we should index the uuids of the left items. If the relationship type is + // "tilted left", it means that we expect a huge amount of left items, so we don't index their uuids + // on the right item as a storage/performance improvement. + // As a consequence, when searching for related items (using discovery) + // on the pages of the left items you won't be able to find the right item. + if (relationshipType.getTilted() != LEFT && relationshipType.getRightType().equals(itemEntityType)) { + String element = relationshipType.getRightwardType(); + List data = relationshipService + .findByLatestItemAndRelationshipType(context, item, relationshipType, false); + mdvs.addAll(constructLatestForDiscoveryMetadataValues(context, schema, element, qualifier, data)); } } - return null; + + return mdvs; + } + + /** + * Turn the given data into a list of relation.*.latestForDiscovery virtual metadata values. + * @param context the DSpace context. + * @param schema the schema for all metadata values. + * @param element the element for all metadata values. + * @param qualifier the qualifier for all metadata values. + * @param data a POJO containing the item uuid and relationship id. + * @return a list (may be empty) of metadata values of type relation.*.latestForDiscovery. + */ + protected List constructLatestForDiscoveryMetadataValues( + Context context, String schema, String element, String qualifier, List data + ) { + String mdf = new MetadataFieldName(schema, element, qualifier).toString(); + + return data.stream() + .map(datum -> { + RelationshipMetadataValue mdv = constructMetadataValue(context, mdf); + if (mdv == null) { + return null; + } + + mdv.setAuthority(Constants.VIRTUAL_AUTHORITY_PREFIX + datum.getRelationshipId()); + mdv.setValue(datum.getItemUuid().toString()); + // NOTE: place has no meaning for relation.*.latestForDiscovery metadata fields + mdv.setPlace(-1); + mdv.setUseForPlace(false); + + return mdv; + }) + .filter(Objects::nonNull) + .collect(Collectors.toUnmodifiableList()); + } + + @Override + @Deprecated + public String getEntityTypeStringFromMetadata(Item item) { + return itemService.getEntityTypeLabel(item); } @Override diff --git a/dspace-api/src/main/java/org/dspace/content/RelationshipServiceImpl.java b/dspace-api/src/main/java/org/dspace/content/RelationshipServiceImpl.java index 1c99878e81..1fdfde6c74 100644 --- a/dspace-api/src/main/java/org/dspace/content/RelationshipServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/content/RelationshipServiceImpl.java @@ -10,9 +10,11 @@ package org.dspace.content; import java.sql.SQLException; import java.util.ArrayList; import java.util.Collections; -import java.util.Comparator; +import java.util.HashMap; import java.util.List; import java.util.UUID; +import java.util.stream.Collectors; +import java.util.stream.Stream; import org.apache.commons.collections4.CollectionUtils; import org.apache.commons.lang3.StringUtils; @@ -20,15 +22,19 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.dspace.authorize.AuthorizeException; import org.dspace.authorize.service.AuthorizeService; +import org.dspace.content.Relationship.LatestVersionStatus; import org.dspace.content.dao.RelationshipDAO; +import org.dspace.content.dao.pojo.ItemUuidAndRelationshipId; import org.dspace.content.service.EntityTypeService; import org.dspace.content.service.ItemService; import org.dspace.content.service.RelationshipService; import org.dspace.content.service.RelationshipTypeService; +import org.dspace.content.virtual.VirtualMetadataConfiguration; import org.dspace.content.virtual.VirtualMetadataPopulator; import org.dspace.core.Constants; import org.dspace.core.Context; import org.dspace.services.ConfigurationService; +import org.dspace.versioning.utils.RelationshipVersioningUtils; import org.springframework.beans.factory.annotation.Autowired; public class RelationshipServiceImpl implements RelationshipService { @@ -55,6 +61,10 @@ public class RelationshipServiceImpl implements RelationshipService { @Autowired private RelationshipMetadataService relationshipMetadataService; + + @Autowired + private RelationshipVersioningUtils relationshipVersioningUtils; + @Autowired private VirtualMetadataPopulator virtualMetadataPopulator; @@ -76,9 +86,10 @@ public class RelationshipServiceImpl implements RelationshipService { @Override - public Relationship create(Context c, Item leftItem, Item rightItem, RelationshipType relationshipType, - int leftPlace, int rightPlace, String leftwardValue, String rightwardValue) - throws AuthorizeException, SQLException { + public Relationship create( + Context c, Item leftItem, Item rightItem, RelationshipType relationshipType, int leftPlace, int rightPlace, + String leftwardValue, String rightwardValue, LatestVersionStatus latestVersionStatus + ) throws AuthorizeException, SQLException { Relationship relationship = new Relationship(); relationship.setLeftItem(leftItem); relationship.setRightItem(rightItem); @@ -87,9 +98,21 @@ public class RelationshipServiceImpl implements RelationshipService { relationship.setRightPlace(rightPlace); relationship.setLeftwardValue(leftwardValue); relationship.setRightwardValue(rightwardValue); + relationship.setLatestVersionStatus(latestVersionStatus); return create(c, relationship); } + @Override + public Relationship create( + Context c, Item leftItem, Item rightItem, RelationshipType relationshipType, int leftPlace, int rightPlace, + String leftwardValue, String rightwardValue + ) throws AuthorizeException, SQLException { + return create( + c, leftItem, rightItem, relationshipType, leftPlace, rightPlace, leftwardValue, rightwardValue, + LatestVersionStatus.BOTH + ); + } + @Override public Relationship create(Context context, Relationship relationship) throws SQLException, AuthorizeException { if (isRelationshipValidToCreate(context, relationship)) { @@ -98,7 +121,7 @@ public class RelationshipServiceImpl implements RelationshipService { // This order of execution should be handled in the creation (create, updateplace, update relationship) // for a proper place allocation Relationship relationshipToReturn = relationshipDAO.create(context, relationship); - updatePlaceInRelationship(context, relationshipToReturn); + updatePlaceInRelationship(context, relationshipToReturn, null, null, true, true); update(context, relationshipToReturn); updateItemsInRelationship(context, relationship); return relationshipToReturn; @@ -113,71 +136,388 @@ public class RelationshipServiceImpl implements RelationshipService { } @Override - public void updatePlaceInRelationship(Context context, Relationship relationship) - throws SQLException, AuthorizeException { + public Relationship move( + Context context, Relationship relationship, Integer newLeftPlace, Integer newRightPlace + ) throws SQLException, AuthorizeException { + if (authorizeService.authorizeActionBoolean(context, relationship.getLeftItem(), Constants.WRITE) || + authorizeService.authorizeActionBoolean(context, relationship.getRightItem(), Constants.WRITE)) { + + // Don't do anything if neither the leftPlace nor rightPlace was updated + if (newLeftPlace != null || newRightPlace != null) { + // This order of execution should be handled in the creation (create, updateplace, update relationship) + // for a proper place allocation + updatePlaceInRelationship(context, relationship, newLeftPlace, newRightPlace, false, false); + update(context, relationship); + updateItemsInRelationship(context, relationship); + } + + return relationship; + } else { + throw new AuthorizeException( + "You do not have write rights on this relationship's items"); + } + } + + @Override + public Relationship move( + Context context, Relationship relationship, Item newLeftItem, Item newRightItem + ) throws SQLException, AuthorizeException { + // If the new Item is the same as the current Item, don't move + newLeftItem = newLeftItem != relationship.getLeftItem() ? newLeftItem : null; + newRightItem = newRightItem != relationship.getRightItem() ? newRightItem : null; + + // Don't do anything if neither the leftItem nor rightItem was updated + if (newLeftItem != null || newRightItem != null) { + // First move the Relationship to the back within the current Item's lists + // This ensures that we won't have any gaps once we move the Relationship to a different Item + move( + context, relationship, + newLeftItem != null ? -1 : null, + newRightItem != null ? -1 : null + ); + + boolean insertLeft = false; + boolean insertRight = false; + + // If Item has been changed, mark the previous Item as modified to make sure we discard the old relation.* + // metadata on the next update. + // Set the Relationship's Items to the new ones, appending to the end + if (newLeftItem != null) { + relationship.getLeftItem().setMetadataModified(); + relationship.setLeftItem(newLeftItem); + relationship.setLeftPlace(-1); + insertLeft = true; + } + if (newRightItem != null) { + relationship.getRightItem().setMetadataModified(); + relationship.setRightItem(newRightItem); + relationship.setRightPlace(-1); + insertRight = true; + } + + // This order of execution should be handled in the creation (create, updateplace, update relationship) + // for a proper place allocation + updatePlaceInRelationship(context, relationship, null, null, insertLeft, insertRight); + update(context, relationship); + updateItemsInRelationship(context, relationship); + } + return relationship; + } + + /** + * This method will update the place for the Relationship and all other relationships found by the items and + * relationship type of the given Relationship. + * + * @param context The relevant DSpace context + * @param relationship The Relationship object that will have its place updated and that will be used + * to retrieve the other relationships whose place might need to be updated. + * @param newLeftPlace If the Relationship in question is to be moved, the leftPlace it is to be moved to. + * Set this to null if the Relationship has not been moved, i.e. it has just been created, + * deleted or when its Items have been modified. + * @param newRightPlace If the Relationship in question is to be moved, the rightPlace it is to be moved to. + * Set this to null if the Relationship has not been moved, i.e. it has just been created, + * deleted or when its Items have been modified. + * @param insertLeft Whether the Relationship in question should be inserted into the left Item. + * Should be set to true when creating or moving to a different Item. + * @param insertRight Whether the Relationship in question should be inserted into the right Item. + * Should be set to true when creating or moving to a different Item. + * @throws SQLException If something goes wrong + * @throws AuthorizeException + * If the user is not authorized to update the Relationship or its Items + */ + private void updatePlaceInRelationship( + Context context, Relationship relationship, + Integer newLeftPlace, Integer newRightPlace, boolean insertLeft, boolean insertRight + ) throws SQLException, AuthorizeException { Item leftItem = relationship.getLeftItem(); - // Max value is used to ensure that these will get added to the back of the list and thus receive the highest - // (last) place as it's set to a -1 for creation - if (relationship.getLeftPlace() == -1) { - relationship.setLeftPlace(Integer.MAX_VALUE); - } Item rightItem = relationship.getRightItem(); - if (relationship.getRightPlace() == -1) { - relationship.setRightPlace(Integer.MAX_VALUE); - } - List leftRelationships = findByItemAndRelationshipType(context, - leftItem, - relationship.getRelationshipType(), true); - List rightRelationships = findByItemAndRelationshipType(context, - rightItem, - relationship.getRelationshipType(), - false); - // These relationships are only deleted from the temporary lists incase they're present in them so that we can + // These list also include the non-latest. This is relevant to determine whether it's deleted. + // This can also imply there may be overlapping places, and/or the given relationship will overlap + // But the shift will allow this, and only happen when needed based on the latest status + List leftRelationships = findByItemAndRelationshipType( + context, leftItem, relationship.getRelationshipType(), true, -1, -1, false + ); + List rightRelationships = findByItemAndRelationshipType( + context, rightItem, relationship.getRelationshipType(), false, -1, -1, false + ); + + // These relationships are only deleted from the temporary lists in case they're present in them so that we can // properly perform our place calculation later down the line in this method. - if (leftRelationships.contains(relationship)) { - leftRelationships.remove(relationship); - } - if (rightRelationships.contains(relationship)) { - rightRelationships.remove(relationship); - } + boolean deletedFromLeft = !leftRelationships.contains(relationship); + boolean deletedFromRight = !rightRelationships.contains(relationship); + leftRelationships.remove(relationship); + rightRelationships.remove(relationship); + + List leftMetadata = getSiblingMetadata(leftItem, relationship, true); + List rightMetadata = getSiblingMetadata(rightItem, relationship, false); + + // For new relationships added to the end, this will be -1. + // For new relationships added at a specific position, this will contain that position. + // For existing relationships, this will contain the place before it was moved. + // For deleted relationships, this will contain the place before it was deleted. + int oldLeftPlace = relationship.getLeftPlace(); + int oldRightPlace = relationship.getRightPlace(); + + + boolean movedUpLeft = resolveRelationshipPlace( + relationship, true, leftRelationships, leftMetadata, oldLeftPlace, newLeftPlace + ); + boolean movedUpRight = resolveRelationshipPlace( + relationship, false, rightRelationships, rightMetadata, oldRightPlace, newRightPlace + ); + context.turnOffAuthorisationSystem(); - //If useForPlace for the leftwardType is false for the relationshipType, - // we need to sort the relationships here based on leftplace. - if (!virtualMetadataPopulator.isUseForPlaceTrueForRelationshipType(relationship.getRelationshipType(), true)) { - if (!leftRelationships.isEmpty()) { - leftRelationships.sort(Comparator.comparingInt(Relationship::getLeftPlace)); - for (int i = 0; i < leftRelationships.size(); i++) { - leftRelationships.get(i).setLeftPlace(i); - } - relationship.setLeftPlace(leftRelationships.size()); - } else { - relationship.setLeftPlace(0); - } - } else { - updateItem(context, leftItem); + //only shift if the place is relevant for the latest relationships + if (relationshipVersioningUtils.otherSideIsLatest(true, relationship.getLatestVersionStatus())) { + shiftSiblings( + relationship, true, oldLeftPlace, movedUpLeft, insertLeft, deletedFromLeft, + leftRelationships, leftMetadata + ); + } + if (relationshipVersioningUtils.otherSideIsLatest(false, relationship.getLatestVersionStatus())) { + shiftSiblings( + relationship, false, oldRightPlace, movedUpRight, insertRight, deletedFromRight, + rightRelationships, rightMetadata + ); } - //If useForPlace for the rightwardType is false for the relationshipType, - // we need to sort the relationships here based on the rightplace. - if (!virtualMetadataPopulator.isUseForPlaceTrueForRelationshipType(relationship.getRelationshipType(), false)) { - if (!rightRelationships.isEmpty()) { - rightRelationships.sort(Comparator.comparingInt(Relationship::getRightPlace)); - for (int i = 0; i < rightRelationships.size(); i++) { - rightRelationships.get(i).setRightPlace(i); - } - relationship.setRightPlace(rightRelationships.size()); - } else { - relationship.setRightPlace(0); - } + updateItem(context, leftItem); + updateItem(context, rightItem); - } else { - updateItem(context, rightItem); - - } context.restoreAuthSystemState(); + } + /** + * Return the MDVs in the Item's MDF corresponding to the given Relationship. + * Return an empty list if the Relationship isn't mapped to any MDF + * or if the mapping is configured with useForPlace=false. + * + * This returns actual metadata (not virtual) which in the same metadata field as the useForPlace. + * For a publication with 2 author relationships and 3 plain text dc.contributor.author values, + * it would return the 3 plain text dc.contributor.author values. + * For a person related to publications, it would return an empty list. + */ + private List getSiblingMetadata( + Item item, Relationship relationship, boolean isLeft + ) { + List metadata = new ArrayList<>(); + if (virtualMetadataPopulator.isUseForPlaceTrueForRelationshipType(relationship.getRelationshipType(), isLeft)) { + HashMap mapping; + if (isLeft) { + mapping = virtualMetadataPopulator.getMap().get(relationship.getRelationshipType().getLeftwardType()); + } else { + mapping = virtualMetadataPopulator.getMap().get(relationship.getRelationshipType().getRightwardType()); + } + if (mapping != null) { + for (String mdf : mapping.keySet()) { + metadata.addAll( + // Make sure we're only looking at database MDVs; if the relationship currently overlaps + // one of these, its virtual MDV will overwrite the database MDV in itemService.getMetadata() + // The relationship pass should be sufficient to move any sibling virtual MDVs. + item.getMetadata() + .stream() + .filter(mdv -> mdv.getMetadataField().toString().equals(mdf.replace(".", "_"))) + .collect(Collectors.toList()) + ); + } + } + } + return metadata; + } + + /** + * Set the left/right place of a Relationship + * - To a new place in case it's being moved + * - Resolve -1 to the actual last place based on the places of its sibling Relationships and/or MDVs + * and determine if it has been moved up in the list. + * + * Examples: + * - Insert a Relationship at place 3 + * newPlace starts out as null and is not updated. Return movedUp=false + * - Insert a Relationship at place -1 + * newPlace starts out as null and is resolved to e.g. 6. Update the Relationship and return movedUp=false + * - Move a Relationship from place 4 to 2 + * Update the Relationship and return movedUp=false. + * - Move a Relationship from place 2 to -1 + * newPlace starts out as -1 and is resolved to e.g. 5. Update the relationship and return movedUp=true. + * - Remove a relationship from place 1 + * Return movedUp=false + * + * @param relationship the Relationship that's being updated + * @param isLeft whether to consider the left side of the Relationship. + * This method should be called twice, once with isLeft=true and once with isLeft=false. + * Make sure this matches the provided relationships/metadata/oldPlace/newPlace. + * @param relationships the list of sibling Relationships + * @param metadata the list of sibling MDVs + * @param oldPlace the previous place for this Relationship, in case it has been moved. + * Otherwise, the current place of a deleted Relationship + * or the place a Relationship has been inserted. + * @param newPlace The new place for this Relationship. Will be null on insert/delete. + * @return true if the Relationship was moved and newPlace > oldPlace + */ + private boolean resolveRelationshipPlace( + Relationship relationship, boolean isLeft, + List relationships, List metadata, + int oldPlace, Integer newPlace + ) { + boolean movedUp = false; + + if (newPlace != null) { + // We're moving an existing Relationship... + if (newPlace == -1) { + // ...to the end of the list + int nextPlace = getNextPlace(relationships, metadata, isLeft); + if (nextPlace == oldPlace) { + // If this Relationship is already at the end, do nothing. + newPlace = oldPlace; + } else { + // Subtract 1 from the next place since we're moving, not inserting and + // the total number of Relationships stays the same. + newPlace = nextPlace - 1; + } + } + if (newPlace > oldPlace) { + // ...up the list. We have to keep track of this in order to shift correctly later on + movedUp = true; + } + } else if (oldPlace == -1) { + // We're _not_ moving an existing Relationship. The newPlace is already set in the Relationship object. + // We only need to resolve it to the end of the list if it's set to -1, otherwise we can just keep it as is. + newPlace = getNextPlace(relationships, metadata, isLeft); + } + + if (newPlace != null) { + setPlace(relationship, isLeft, newPlace); + } + + return movedUp; + } + + /** + * Return the index of the next place in a list of Relationships and Metadata. + * By not relying on the size of both lists we can support one-to-many virtual MDV mappings. + * @param isLeft whether to take the left or right place of each Relationship + */ + private int getNextPlace(List relationships, List metadata, boolean isLeft) { + return Stream.concat( + metadata.stream().map(MetadataValue::getPlace), + relationships.stream().map(r -> getPlace(r, isLeft)) + ).max(Integer::compare) + .map(integer -> integer + 1) + .orElse(0); + } + + /** + * Adjust the left/right place of sibling Relationships and MDVs + * + * Examples: with sibling Relationships R,S,T and metadata a,b,c + * - Insert T at place 1 aRbSc -> a T RbSc + * Shift all siblings with place >= 1 one place to the right + * - Delete R from place 2 aT R bSc -> aTbSc + * Shift all siblings with place > 2 one place to the left + * - Move S from place 3 to place 2 (movedUp=false) aTb S c -> aT S bc + * Shift all siblings with 2 < place <= 3 one place to the right + * - Move T from place 1 to place 3 (movedUp=true) a T Sbc -> aSb T c + * Shift all siblings with 1 < place <= 3 one place to the left + * + * @param relationship the Relationship that's being updated + * @param isLeft whether to consider the left side of the Relationship. + * This method should be called twice, once with isLeft=true and once with isLeft=false. + * Make sure this matches the provided relationships/metadata/oldPlace/newPlace. + * @param oldPlace the previous place for this Relationship, in case it has been moved. + * Otherwise, the current place of a deleted Relationship + * or the place a Relationship has been inserted. + * @param movedUp if this Relationship has been moved up the list, e.g. from place 2 to place 4 + * @param deleted whether this Relationship has been deleted + * @param relationships the list of sibling Relationships + * @param metadata the list of sibling MDVs + */ + private void shiftSiblings( + Relationship relationship, boolean isLeft, int oldPlace, boolean movedUp, boolean inserted, boolean deleted, + List relationships, List metadata + ) { + int newPlace = getPlace(relationship, isLeft); + + for (Relationship sibling : relationships) { + // NOTE: If and only if the other side of the relationship has "latest" status, the relationship will appear + // as a metadata value on the item at the current side (indicated by isLeft) of the relationship. + // + // Example: volume <----> issue (LEFT_ONLY) + // => LEFT_ONLY means that the volume has "latest" status, but the issue does NOT have "latest" status + // => the volume will appear in the metadata of the issue, + // but the issue will NOT appear in the metadata of the volume + // + // This means that the other side of the relationship has to have "latest" status, otherwise this + // relationship is NOT relevant for place calculation. + if (relationshipVersioningUtils.otherSideIsLatest(isLeft, sibling.getLatestVersionStatus())) { + int siblingPlace = getPlace(sibling, isLeft); + if ( + (deleted && siblingPlace > newPlace) + // If the relationship was deleted, all relationships after it should shift left + // We must make the distinction between deletes and moves because for inserts oldPlace == newPlace + || (movedUp && siblingPlace <= newPlace && siblingPlace > oldPlace) + // If the relationship was moved up e.g. from place 2 to 5, all relationships + // with place > 2 (the old place) and <= to 5 should shift left + ) { + setPlace(sibling, isLeft, siblingPlace - 1); + } else if ( + (inserted && siblingPlace >= newPlace) + // If the relationship was inserted, all relationships starting from that place should shift right + // We must make the distinction between inserts and moves because for inserts oldPlace == newPlace + || (!movedUp && siblingPlace >= newPlace && siblingPlace < oldPlace) + // If the relationship was moved down e.g. from place 5 to 2, all relationships + // with place >= 2 and < 5 (the old place) should shift right + ) { + setPlace(sibling, isLeft, siblingPlace + 1); + } + } + } + for (MetadataValue mdv : metadata) { + // NOTE: Plain text metadata values should ALWAYS be included in the place calculation, + // because they are by definition only visible/relevant to the side of the relationship + // that we are currently processing. + int mdvPlace = mdv.getPlace(); + if ( + (deleted && mdvPlace > newPlace) + // If the relationship was deleted, all metadata after it should shift left + // We must make the distinction between deletes and moves because for inserts oldPlace == newPlace + // If the reltionship was copied to metadata on deletion: + // - the plain text can be after the relationship (in which case it's moved forward again) + // - or before the relationship (in which case it remains in place) + || (movedUp && mdvPlace <= newPlace && mdvPlace > oldPlace) + // If the relationship was moved up e.g. from place 2 to 5, all metadata + // with place > 2 (the old place) and <= to 5 should shift left + ) { + mdv.setPlace(mdvPlace - 1); + } else if ( + (inserted && mdvPlace >= newPlace) + // If the relationship was inserted, all relationships starting from that place should shift right + // We must make the distinction between inserts and moves because for inserts oldPlace == newPlace + || (!movedUp && mdvPlace >= newPlace && mdvPlace < oldPlace) + // If the relationship was moved down e.g. from place 5 to 2, all relationships + // with place >= 2 and < 5 (the old place) should shift right + ) { + mdv.setPlace(mdvPlace + 1); + } + } + } + + private int getPlace(Relationship relationship, boolean isLeft) { + if (isLeft) { + return relationship.getLeftPlace(); + } else { + return relationship.getRightPlace(); + } + } + + private void setPlace(Relationship relationship, boolean isLeft, int place) { + if (isLeft) { + relationship.setLeftPlace(place); + } else { + relationship.setRightPlace(place); + } } @Override @@ -187,16 +527,6 @@ public class RelationshipServiceImpl implements RelationshipService { itemService.update(context, relatedItem); } - @Override - public int findNextLeftPlaceByLeftItem(Context context, Item item) throws SQLException { - return relationshipDAO.findNextLeftPlaceByLeftItem(context, item); - } - - @Override - public int findNextRightPlaceByRightItem(Context context, Item item) throws SQLException { - return relationshipDAO.findNextRightPlaceByRightItem(context, item); - } - private boolean isRelationshipValidToCreate(Context context, Relationship relationship) throws SQLException { RelationshipType relationshipType = relationship.getRelationshipType(); @@ -212,15 +542,19 @@ public class RelationshipServiceImpl implements RelationshipService { logRelationshipTypeDetailsForError(relationshipType); return false; } - if (!verifyMaxCardinality(context, relationship.getLeftItem(), + if (!relationship.getLatestVersionStatus().equals(LatestVersionStatus.LEFT_ONLY) + && !verifyMaxCardinality(context, relationship.getLeftItem(), relationshipType.getLeftMaxCardinality(), relationshipType, true)) { + //If RIGHT_ONLY => it's a copied relationship, and the count can be ignored log.warn("The relationship has been deemed invalid since the left item has more" + " relationships than the left max cardinality allows after we'd store this relationship"); logRelationshipTypeDetailsForError(relationshipType); return false; } - if (!verifyMaxCardinality(context, relationship.getRightItem(), + if (!relationship.getLatestVersionStatus().equals(LatestVersionStatus.RIGHT_ONLY) + && !verifyMaxCardinality(context, relationship.getRightItem(), relationshipType.getRightMaxCardinality(), relationshipType, false)) { + //If LEFT_ONLY => it's a copied relationship, and the count can be ignored log.warn("The relationship has been deemed invalid since the right item has more" + " relationships than the right max cardinality allows after we'd store this relationship"); logRelationshipTypeDetailsForError(relationshipType); @@ -279,14 +613,22 @@ public class RelationshipServiceImpl implements RelationshipService { } @Override - public List findByItem(Context context, Item item, Integer limit, Integer offset, - boolean excludeTilted) throws SQLException { + public List findByItem( + Context context, Item item, Integer limit, Integer offset, boolean excludeTilted + ) throws SQLException { + return findByItem(context, item, limit, offset, excludeTilted, true); + } - List list = relationshipDAO.findByItem(context, item, limit, offset, excludeTilted); + @Override + public List findByItem( + Context context, Item item, Integer limit, Integer offset, boolean excludeTilted, boolean excludeNonLatest + ) throws SQLException { + List list = + relationshipDAO.findByItem(context, item, limit, offset, excludeTilted, excludeNonLatest); list.sort((o1, o2) -> { int relationshipType = o1.getRelationshipType().getLeftwardType() - .compareTo(o2.getRelationshipType().getLeftwardType()); + .compareTo(o2.getRelationshipType().getLeftwardType()); if (relationshipType != 0) { return relationshipType; } else { @@ -377,7 +719,7 @@ public class RelationshipServiceImpl implements RelationshipService { if (authorizeService.authorizeActionBoolean(context, relationship.getLeftItem(), Constants.WRITE) || authorizeService.authorizeActionBoolean(context, relationship.getRightItem(), Constants.WRITE)) { relationshipDAO.delete(context, relationship); - updatePlaceInRelationship(context, relationship); + updatePlaceInRelationship(context, relationship, null, null, false, false); updateItemsInRelationship(context, relationship); } else { throw new AuthorizeException( @@ -450,7 +792,7 @@ public class RelationshipServiceImpl implements RelationshipService { + item.getID() + " due to " + currentDepth + " depth"); return; } - String entityTypeStringFromMetadata = relationshipMetadataService.getEntityTypeStringFromMetadata(item); + String entityTypeStringFromMetadata = itemService.getEntityTypeLabel(item); EntityType actualEntityType = entityTypeService.findByEntityType(context, entityTypeStringFromMetadata); // Get all types of relations for the current item List relationshipTypes = relationshipTypeService.findByEntityType(context, actualEntityType); @@ -510,6 +852,9 @@ public class RelationshipServiceImpl implements RelationshipService { /** * Converts virtual metadata from RelationshipMetadataValue objects to actual item metadata. + * The resulting MDVs are added in front or behind the Relationship's virtual MDVs. + * The Relationship's virtual MDVs may be shifted right, and all subsequent metadata will be shifted right. + * So this method ensures the places are still valid. * * @param context The relevant DSpace context * @param relationship The relationship containing the left and right items @@ -520,13 +865,20 @@ public class RelationshipServiceImpl implements RelationshipService { boolean copyToRightItem) throws SQLException, AuthorizeException { if (copyToLeftItem) { - String entityTypeString = relationshipMetadataService - .getEntityTypeStringFromMetadata(relationship.getLeftItem()); + String entityTypeString = itemService.getEntityTypeLabel(relationship.getLeftItem()); List relationshipMetadataValues = relationshipMetadataService.findRelationshipMetadataValueForItemRelationship(context, relationship.getLeftItem(), entityTypeString, relationship, true); for (RelationshipMetadataValue relationshipMetadataValue : relationshipMetadataValues) { - itemService.addAndShiftRightMetadata(context, relationship.getLeftItem(), + // This adds the plain text metadata values on the same spot as the virtual values. + // This will be overruled in org.dspace.content.DSpaceObjectServiceImpl.update + // in the line below but it's not important whether the plain text or virtual values end up on top. + // The virtual values will eventually be deleted, and the others shifted + // This is required because addAndShiftRightMetadata has issues on metadata fields containing + // relationship values which are not useForPlace, while the relationhip type has useForPlace + // E.g. when using addAndShiftRightMetadata on relation.isAuthorOfPublication, it will break the order + // from dc.contributor.author + itemService.addMetadata(context, relationship.getLeftItem(), relationshipMetadataValue.getMetadataField(). getMetadataSchema().getName(), relationshipMetadataValue.getMetadataField().getElement(), @@ -535,16 +887,16 @@ public class RelationshipServiceImpl implements RelationshipService { relationshipMetadataValue.getValue(), null, -1, relationshipMetadataValue.getPlace()); } + //This will ensure the new values no longer overlap, but won't break the order itemService.update(context, relationship.getLeftItem()); } if (copyToRightItem) { - String entityTypeString = relationshipMetadataService - .getEntityTypeStringFromMetadata(relationship.getRightItem()); + String entityTypeString = itemService.getEntityTypeLabel(relationship.getRightItem()); List relationshipMetadataValues = relationshipMetadataService.findRelationshipMetadataValueForItemRelationship(context, relationship.getRightItem(), entityTypeString, relationship, true); for (RelationshipMetadataValue relationshipMetadataValue : relationshipMetadataValues) { - itemService.addAndShiftRightMetadata(context, relationship.getRightItem(), + itemService.addMetadata(context, relationship.getRightItem(), relationshipMetadataValue.getMetadataField(). getMetadataSchema().getName(), relationshipMetadataValue.getMetadataField().getElement(), @@ -638,22 +990,46 @@ public class RelationshipServiceImpl implements RelationshipService { public List findByItemAndRelationshipType(Context context, Item item, RelationshipType relationshipType) throws SQLException { - return relationshipDAO.findByItemAndRelationshipType(context, item, relationshipType, -1, -1); + return findByItemAndRelationshipType(context, item, relationshipType, -1, -1, true); } @Override public List findByItemAndRelationshipType(Context context, Item item, RelationshipType relationshipType, int limit, int offset) throws SQLException { - return relationshipDAO.findByItemAndRelationshipType(context, item, relationshipType, limit, offset); + return findByItemAndRelationshipType(context, item, relationshipType, limit, offset, true); } @Override - public List findByItemAndRelationshipType(Context context, Item item, - RelationshipType relationshipType, boolean isLeft, - int limit, int offset) - throws SQLException { - return relationshipDAO.findByItemAndRelationshipType(context, item, relationshipType, isLeft, limit, offset); + public List findByItemAndRelationshipType( + Context context, Item item, RelationshipType relationshipType, int limit, int offset, boolean excludeNonLatest + ) throws SQLException { + return relationshipDAO + .findByItemAndRelationshipType(context, item, relationshipType, limit, offset, excludeNonLatest); + } + + @Override + public List findByItemAndRelationshipType( + Context context, Item item, RelationshipType relationshipType, boolean isLeft, int limit, int offset + ) throws SQLException { + return findByItemAndRelationshipType(context, item, relationshipType, isLeft, limit, offset, true); + } + + @Override + public List findByItemAndRelationshipType( + Context context, Item item, RelationshipType relationshipType, boolean isLeft, int limit, int offset, + boolean excludeNonLatest + ) throws SQLException { + return relationshipDAO + .findByItemAndRelationshipType(context, item, relationshipType, isLeft, limit, offset, excludeNonLatest); + } + + @Override + public List findByLatestItemAndRelationshipType( + Context context, Item latestItem, RelationshipType relationshipType, boolean isLeft + ) throws SQLException { + return relationshipDAO + .findByLatestItemAndRelationshipType(context, latestItem, relationshipType, isLeft); } @Override @@ -690,7 +1066,14 @@ public class RelationshipServiceImpl implements RelationshipService { @Override public int countByItem(Context context, Item item) throws SQLException { - return relationshipDAO.countByItem(context, item); + return countByItem(context, item, false, true); + } + + @Override + public int countByItem( + Context context, Item item, boolean excludeTilted, boolean excludeNonLatest + ) throws SQLException { + return relationshipDAO.countByItem(context, item, excludeTilted, excludeNonLatest); } @Override @@ -699,9 +1082,18 @@ public class RelationshipServiceImpl implements RelationshipService { } @Override - public int countByItemAndRelationshipType(Context context, Item item, RelationshipType relationshipType, - boolean isLeft) throws SQLException { - return relationshipDAO.countByItemAndRelationshipType(context, item, relationshipType, isLeft); + public int countByItemAndRelationshipType( + Context context, Item item, RelationshipType relationshipType, boolean isLeft + ) throws SQLException { + return countByItemAndRelationshipType(context, item, relationshipType, isLeft, true); + } + + @Override + public int countByItemAndRelationshipType( + Context context, Item item, RelationshipType relationshipType, boolean isLeft, boolean excludeNonLatest + ) throws SQLException { + return relationshipDAO + .countByItemAndRelationshipType(context, item, relationshipType, isLeft, excludeNonLatest); } @Override diff --git a/dspace-api/src/main/java/org/dspace/content/WorkspaceItemServiceImpl.java b/dspace-api/src/main/java/org/dspace/content/WorkspaceItemServiceImpl.java index d891dcf638..f40bb5256f 100644 --- a/dspace-api/src/main/java/org/dspace/content/WorkspaceItemServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/content/WorkspaceItemServiceImpl.java @@ -128,19 +128,23 @@ public class WorkspaceItemServiceImpl implements WorkspaceItemService { Optional colEntityType = getDSpaceEntityType(collection); Optional templateItemEntityType = getDSpaceEntityType(templateItem); - if (colEntityType.isPresent() && templateItemEntityType.isPresent() && + if (template && colEntityType.isPresent() && templateItemEntityType.isPresent() && !StringUtils.equals(colEntityType.get().getValue(), templateItemEntityType.get().getValue())) { throw new IllegalStateException("The template item has entity type : (" + templateItemEntityType.get().getValue() + ") different than collection entity type : " + colEntityType.get().getValue()); } - if (colEntityType.isPresent() && templateItemEntityType.isEmpty()) { + if (template && colEntityType.isPresent() && templateItemEntityType.isEmpty()) { MetadataValue original = colEntityType.get(); MetadataField metadataField = original.getMetadataField(); MetadataSchema metadataSchema = metadataField.getMetadataSchema(); - itemService.addMetadata(context, item, metadataSchema.getName(), metadataField.getElement(), - metadataField.getQualifier(), original.getLanguage(), original.getValue()); + // NOTE: dspace.entity.type = does not make sense + // the collection entity type is by default blank when a collection is first created + if (StringUtils.isNotBlank(original.getValue())) { + itemService.addMetadata(context, item, metadataSchema.getName(), metadataField.getElement(), + metadataField.getQualifier(), original.getLanguage(), original.getValue()); + } } if (template && (templateItem != null)) { diff --git a/dspace-api/src/main/java/org/dspace/content/authority/EPersonAuthority.java b/dspace-api/src/main/java/org/dspace/content/authority/EPersonAuthority.java new file mode 100644 index 0000000000..8d929a8d3b --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/content/authority/EPersonAuthority.java @@ -0,0 +1,127 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.content.authority; + +import java.sql.SQLException; +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; +import java.util.UUID; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.dspace.authorize.factory.AuthorizeServiceFactory; +import org.dspace.authorize.service.AuthorizeService; +import org.dspace.core.Context; +import org.dspace.eperson.EPerson; +import org.dspace.eperson.factory.EPersonServiceFactory; +import org.dspace.eperson.service.EPersonService; +import org.dspace.util.UUIDUtils; +import org.dspace.web.ContextUtil; + +/** + * Implementation of {@link ChoiceAuthority} based on EPerson. Allows you to set + * the id of an eperson as authority. + * + * @author Mykhaylo Boychuk (4science.it) + */ +public class EPersonAuthority implements ChoiceAuthority { + + private static final Logger log = LogManager.getLogger(EPersonAuthority.class); + + /** + * the name assigned to the specific instance by the PluginService, @see + * {@link NameAwarePlugin} + **/ + private String authorityName; + + private EPersonService ePersonService = EPersonServiceFactory.getInstance().getEPersonService(); + + private AuthorizeService authorizeService = AuthorizeServiceFactory.getInstance().getAuthorizeService(); + + @Override + public Choices getBestMatch(String text, String locale) { + return getMatches(text, 0, 2, locale); + } + + @Override + public Choices getMatches(String text, int start, int limit, String locale) { + if (limit <= 0) { + limit = 20; + } + + Context context = getContext(); + + List ePersons = searchEPersons(context, text, start, limit); + + List choiceList = new ArrayList(); + for (EPerson eperson : ePersons) { + choiceList.add(new Choice(eperson.getID().toString(), eperson.getFullName(), eperson.getFullName())); + } + Choice[] results = new Choice[choiceList.size()]; + results = choiceList.toArray(results); + return new Choices(results, start, ePersons.size(), Choices.CF_AMBIGUOUS, ePersons.size() > (start + limit), 0); + } + + @Override + public String getLabel(String key, String locale) { + + UUID uuid = UUIDUtils.fromString(key); + if (uuid == null) { + return null; + } + + Context context = getContext(); + try { + EPerson ePerson = ePersonService.find(context, uuid); + return ePerson != null ? ePerson.getFullName() : null; + } catch (SQLException e) { + log.error(e.getMessage(), e); + throw new RuntimeException(e.getMessage(), e); + } + + } + + private List searchEPersons(Context context, String text, int start, int limit) { + + if (!isCurrentUserAdminOrAccessGroupManager(context)) { + return Collections.emptyList(); + } + + try { + return ePersonService.search(context, text, start, limit); + } catch (SQLException e) { + log.error(e.getMessage(), e); + throw new RuntimeException(e.getMessage(), e); + } + + } + + private Context getContext() { + Context context = ContextUtil.obtainCurrentRequestContext(); + return context != null ? context : new Context(); + } + + private boolean isCurrentUserAdminOrAccessGroupManager(Context context) { + try { + return authorizeService.isAdmin(context) || authorizeService.isAccountManager(context); + } catch (SQLException e) { + throw new RuntimeException(e); + } + } + + @Override + public String getPluginInstanceName() { + return authorityName; + } + + @Override + public void setPluginInstanceName(String name) { + this.authorityName = name; + } +} \ No newline at end of file diff --git a/dspace-api/src/main/java/org/dspace/content/crosswalk/AIPDIMCrosswalk.java b/dspace-api/src/main/java/org/dspace/content/crosswalk/AIPDIMCrosswalk.java index 2d919baa9d..4b77e4807a 100644 --- a/dspace-api/src/main/java/org/dspace/content/crosswalk/AIPDIMCrosswalk.java +++ b/dspace-api/src/main/java/org/dspace/content/crosswalk/AIPDIMCrosswalk.java @@ -14,8 +14,8 @@ import java.util.List; import org.dspace.authorize.AuthorizeException; import org.dspace.content.DSpaceObject; import org.dspace.core.Context; -import org.jdom.Element; -import org.jdom.Namespace; +import org.jdom2.Element; +import org.jdom2.Namespace; /** * Crosswalk descriptive metadata to and from DIM (DSpace Intermediate diff --git a/dspace-api/src/main/java/org/dspace/content/crosswalk/AIPTechMDCrosswalk.java b/dspace-api/src/main/java/org/dspace/content/crosswalk/AIPTechMDCrosswalk.java index 8ffddf715f..978cabfb4b 100644 --- a/dspace-api/src/main/java/org/dspace/content/crosswalk/AIPTechMDCrosswalk.java +++ b/dspace-api/src/main/java/org/dspace/content/crosswalk/AIPTechMDCrosswalk.java @@ -40,8 +40,8 @@ import org.dspace.handle.factory.HandleServiceFactory; import org.dspace.handle.service.HandleService; import org.dspace.services.ConfigurationService; import org.dspace.services.factory.DSpaceServicesFactory; -import org.jdom.Element; -import org.jdom.Namespace; +import org.jdom2.Element; +import org.jdom2.Namespace; /** * Crosswalk of technical metadata for DSpace AIP. This is diff --git a/dspace-api/src/main/java/org/dspace/content/crosswalk/DIMDisseminationCrosswalk.java b/dspace-api/src/main/java/org/dspace/content/crosswalk/DIMDisseminationCrosswalk.java index 3f4d6bd44e..4365d9a485 100644 --- a/dspace-api/src/main/java/org/dspace/content/crosswalk/DIMDisseminationCrosswalk.java +++ b/dspace-api/src/main/java/org/dspace/content/crosswalk/DIMDisseminationCrosswalk.java @@ -23,8 +23,8 @@ import org.dspace.content.factory.ContentServiceFactory; import org.dspace.content.service.ItemService; import org.dspace.core.Constants; import org.dspace.core.Context; -import org.jdom.Element; -import org.jdom.Namespace; +import org.jdom2.Element; +import org.jdom2.Namespace; /** * DIM dissemination crosswalk diff --git a/dspace-api/src/main/java/org/dspace/content/crosswalk/DIMIngestionCrosswalk.java b/dspace-api/src/main/java/org/dspace/content/crosswalk/DIMIngestionCrosswalk.java index ad922a65f2..4217308e65 100644 --- a/dspace-api/src/main/java/org/dspace/content/crosswalk/DIMIngestionCrosswalk.java +++ b/dspace-api/src/main/java/org/dspace/content/crosswalk/DIMIngestionCrosswalk.java @@ -19,8 +19,8 @@ import org.dspace.content.factory.ContentServiceFactory; import org.dspace.content.service.ItemService; import org.dspace.core.Constants; import org.dspace.core.Context; -import org.jdom.Element; -import org.jdom.Namespace; +import org.jdom2.Element; +import org.jdom2.Namespace; /** * DIM ingestion crosswalk diff --git a/dspace-api/src/main/java/org/dspace/content/crosswalk/DisseminationCrosswalk.java b/dspace-api/src/main/java/org/dspace/content/crosswalk/DisseminationCrosswalk.java index 23e1965d7b..3e4fe21f8f 100644 --- a/dspace-api/src/main/java/org/dspace/content/crosswalk/DisseminationCrosswalk.java +++ b/dspace-api/src/main/java/org/dspace/content/crosswalk/DisseminationCrosswalk.java @@ -14,8 +14,8 @@ import java.util.List; import org.dspace.authorize.AuthorizeException; import org.dspace.content.DSpaceObject; import org.dspace.core.Context; -import org.jdom.Element; -import org.jdom.Namespace; +import org.jdom2.Element; +import org.jdom2.Namespace; /** * Dissemination Crosswalk plugin -- translate DSpace native diff --git a/dspace-api/src/main/java/org/dspace/content/crosswalk/IngestionCrosswalk.java b/dspace-api/src/main/java/org/dspace/content/crosswalk/IngestionCrosswalk.java index 7edfb6f79f..bb73c83c45 100644 --- a/dspace-api/src/main/java/org/dspace/content/crosswalk/IngestionCrosswalk.java +++ b/dspace-api/src/main/java/org/dspace/content/crosswalk/IngestionCrosswalk.java @@ -14,7 +14,7 @@ import java.util.List; import org.dspace.authorize.AuthorizeException; import org.dspace.content.DSpaceObject; import org.dspace.core.Context; -import org.jdom.Element; +import org.jdom2.Element; /** * Ingestion Crosswalk plugin -- translate an external metadata format diff --git a/dspace-api/src/main/java/org/dspace/content/crosswalk/METSDisseminationCrosswalk.java b/dspace-api/src/main/java/org/dspace/content/crosswalk/METSDisseminationCrosswalk.java index e44774a672..b8a4a8aef3 100644 --- a/dspace-api/src/main/java/org/dspace/content/crosswalk/METSDisseminationCrosswalk.java +++ b/dspace-api/src/main/java/org/dspace/content/crosswalk/METSDisseminationCrosswalk.java @@ -24,11 +24,11 @@ import org.dspace.core.Context; import org.dspace.core.factory.CoreServiceFactory; import org.dspace.services.ConfigurationService; import org.dspace.services.factory.DSpaceServicesFactory; -import org.jdom.Document; -import org.jdom.Element; -import org.jdom.JDOMException; -import org.jdom.Namespace; -import org.jdom.input.SAXBuilder; +import org.jdom2.Document; +import org.jdom2.Element; +import org.jdom2.JDOMException; +import org.jdom2.Namespace; +import org.jdom2.input.SAXBuilder; /** * METS dissemination crosswalk diff --git a/dspace-api/src/main/java/org/dspace/content/crosswalk/METSRightsCrosswalk.java b/dspace-api/src/main/java/org/dspace/content/crosswalk/METSRightsCrosswalk.java index 559d463be2..7f6622841b 100644 --- a/dspace-api/src/main/java/org/dspace/content/crosswalk/METSRightsCrosswalk.java +++ b/dspace-api/src/main/java/org/dspace/content/crosswalk/METSRightsCrosswalk.java @@ -35,8 +35,8 @@ import org.dspace.eperson.Group; import org.dspace.eperson.factory.EPersonServiceFactory; import org.dspace.eperson.service.EPersonService; import org.dspace.eperson.service.GroupService; -import org.jdom.Element; -import org.jdom.Namespace; +import org.jdom2.Element; +import org.jdom2.Namespace; /** * METSRights Ingestion and Dissemination Crosswalk diff --git a/dspace-api/src/main/java/org/dspace/content/crosswalk/MODSDisseminationCrosswalk.java b/dspace-api/src/main/java/org/dspace/content/crosswalk/MODSDisseminationCrosswalk.java index 182fcebe2f..1e63be5ba1 100644 --- a/dspace-api/src/main/java/org/dspace/content/crosswalk/MODSDisseminationCrosswalk.java +++ b/dspace-api/src/main/java/org/dspace/content/crosswalk/MODSDisseminationCrosswalk.java @@ -15,7 +15,6 @@ import java.sql.SQLException; import java.util.ArrayList; import java.util.Enumeration; import java.util.HashMap; -import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Properties; @@ -42,16 +41,18 @@ import org.dspace.handle.factory.HandleServiceFactory; import org.dspace.handle.service.HandleService; import org.dspace.services.ConfigurationService; import org.dspace.services.factory.DSpaceServicesFactory; -import org.jdom.Attribute; -import org.jdom.Document; -import org.jdom.Element; -import org.jdom.JDOMException; -import org.jdom.Namespace; -import org.jdom.Text; -import org.jdom.Verifier; -import org.jdom.input.SAXBuilder; -import org.jdom.output.XMLOutputter; -import org.jdom.xpath.XPath; +import org.jdom2.Attribute; +import org.jdom2.Document; +import org.jdom2.Element; +import org.jdom2.JDOMException; +import org.jdom2.Namespace; +import org.jdom2.Text; +import org.jdom2.Verifier; +import org.jdom2.filter.Filters; +import org.jdom2.input.SAXBuilder; +import org.jdom2.output.XMLOutputter; +import org.jdom2.xpath.XPathExpression; +import org.jdom2.xpath.XPathFactory; /** * Configurable MODS Crosswalk @@ -156,7 +157,7 @@ public class MODSDisseminationCrosswalk extends SelfNamedPlugin static class modsTriple { public String qdc = null; public Element xml = null; - public XPath xpath = null; + public XPathExpression xpath = null; /** * Initialize from text versions of QDC, XML and XPath. @@ -171,9 +172,9 @@ public class MODSDisseminationCrosswalk extends SelfNamedPlugin final String postlog = ""; try { result.qdc = qdc; - result.xpath = XPath.newInstance(xpath); - result.xpath.addNamespace(MODS_NS.getPrefix(), MODS_NS.getURI()); - result.xpath.addNamespace(XLINK_NS); + result.xpath = + XPathFactory.instance() + .compile(xpath, Filters.fpassthrough(), null, MODS_NS, XLINK_NS); Document d = builder.build(new StringReader(prolog + xml + postlog)); result.xml = (Element) d.getRootElement().getContent(0); } catch (JDOMException | IOException je) { @@ -295,6 +296,7 @@ public class MODSDisseminationCrosswalk extends SelfNamedPlugin * @throws IOException if IO error * @throws SQLException if database error * @throws AuthorizeException if authorization error + * @return List of Elements */ @Override public List disseminateList(Context context, DSpaceObject dso) @@ -352,37 +354,29 @@ public class MODSDisseminationCrosswalk extends SelfNamedPlugin if (trip == null) { log.warn("WARNING: " + getPluginInstanceName() + ": No MODS mapping for \"" + qdc + "\""); } else { - try { - Element me = (Element) trip.xml.clone(); - if (addSchema) { - me.setAttribute("schemaLocation", schemaLocation, XSI_NS); - } - Iterator ni = trip.xpath.selectNodes(me).iterator(); - if (!ni.hasNext()) { - log.warn("XPath \"" + trip.xpath.getXPath() + - "\" found no elements in \"" + - outputUgly.outputString(me) + - "\", qdc=" + qdc); - } - while (ni.hasNext()) { - Object what = ni.next(); - if (what instanceof Element) { - ((Element) what).setText(checkedString(value)); - } else if (what instanceof Attribute) { - ((Attribute) what).setValue(checkedString(value)); - } else if (what instanceof Text) { - ((Text) what).setText(checkedString(value)); - } else { - log.warn("Got unknown object from XPath, class=" + what.getClass().getName()); - } - } - result.add(me); - } catch (JDOMException je) { - log.error("Error following XPath in modsTriple: context=" + - outputUgly.outputString(trip.xml) + - ", xpath=" + trip.xpath.getXPath() + ", exception=" + - je.toString()); + Element me = (Element) trip.xml.clone(); + if (addSchema) { + me.setAttribute("schemaLocation", schemaLocation, XSI_NS); } + List matches = trip.xpath.evaluate(me); + if (matches.isEmpty()) { + log.warn("XPath \"" + trip.xpath.getExpression() + + "\" found no elements in \"" + + outputUgly.outputString(me) + + "\", qdc=" + qdc); + } + for (Object match: matches) { + if (match instanceof Element) { + ((Element) match).setText(checkedString(value)); + } else if (match instanceof Attribute) { + ((Attribute) match).setValue(checkedString(value)); + } else if (match instanceof Text) { + ((Text) match).setText(checkedString(value)); + } else { + log.warn("Got unknown object from XPath, class=" + match.getClass().getName()); + } + } + result.add(me); } } return result; @@ -423,9 +417,7 @@ public class MODSDisseminationCrosswalk extends SelfNamedPlugin String title = site.getName(); String url = site.getURL(); - if (identifier_uri != null) { - metadata.add(createDCValue("identifier.uri", null, identifier_uri)); - } + metadata.add(createDCValue("identifier.uri", null, identifier_uri)); //FIXME: adding two URIs for now (site handle and URL), in case site isn't using handles if (url != null) { @@ -472,9 +464,7 @@ public class MODSDisseminationCrosswalk extends SelfNamedPlugin metadata.add(createDCValue("description", "tableofcontents", description_table)); } - if (identifier_uri != null) { - metadata.add(createDCValue("identifier.uri", null, identifier_uri)); - } + metadata.add(createDCValue("identifier.uri", null, identifier_uri)); if (rights != null) { metadata.add(createDCValue("rights", null, rights)); @@ -526,9 +516,7 @@ public class MODSDisseminationCrosswalk extends SelfNamedPlugin metadata.add(createDCValue("description", "tableofcontents", description_table)); } - if (identifier_uri != null) { - metadata.add(createDCValue("identifier", "uri", identifier_uri)); - } + metadata.add(createDCValue("identifier", "uri", identifier_uri)); if (provenance != null) { metadata.add(createDCValue("provenance", null, provenance)); diff --git a/dspace-api/src/main/java/org/dspace/content/crosswalk/NullIngestionCrosswalk.java b/dspace-api/src/main/java/org/dspace/content/crosswalk/NullIngestionCrosswalk.java index 994e15601d..562dadaca0 100644 --- a/dspace-api/src/main/java/org/dspace/content/crosswalk/NullIngestionCrosswalk.java +++ b/dspace-api/src/main/java/org/dspace/content/crosswalk/NullIngestionCrosswalk.java @@ -15,9 +15,9 @@ import org.apache.logging.log4j.Logger; import org.dspace.authorize.AuthorizeException; import org.dspace.content.DSpaceObject; import org.dspace.core.Context; -import org.jdom.Element; -import org.jdom.output.Format; -import org.jdom.output.XMLOutputter; +import org.jdom2.Element; +import org.jdom2.output.Format; +import org.jdom2.output.XMLOutputter; /** * "Null" ingestion crosswalk diff --git a/dspace-api/src/main/java/org/dspace/content/crosswalk/OAIDCIngestionCrosswalk.java b/dspace-api/src/main/java/org/dspace/content/crosswalk/OAIDCIngestionCrosswalk.java index 10bd5ce6fa..6b0ecae780 100644 --- a/dspace-api/src/main/java/org/dspace/content/crosswalk/OAIDCIngestionCrosswalk.java +++ b/dspace-api/src/main/java/org/dspace/content/crosswalk/OAIDCIngestionCrosswalk.java @@ -20,8 +20,8 @@ import org.dspace.content.factory.ContentServiceFactory; import org.dspace.content.service.ItemService; import org.dspace.core.Constants; import org.dspace.core.Context; -import org.jdom.Element; -import org.jdom.Namespace; +import org.jdom2.Element; +import org.jdom2.Namespace; /** * DIM ingestion crosswalk diff --git a/dspace-api/src/main/java/org/dspace/content/crosswalk/OREDisseminationCrosswalk.java b/dspace-api/src/main/java/org/dspace/content/crosswalk/OREDisseminationCrosswalk.java index 3dde093784..ac1c434322 100644 --- a/dspace-api/src/main/java/org/dspace/content/crosswalk/OREDisseminationCrosswalk.java +++ b/dspace-api/src/main/java/org/dspace/content/crosswalk/OREDisseminationCrosswalk.java @@ -31,8 +31,8 @@ import org.dspace.core.Context; import org.dspace.core.Utils; import org.dspace.services.ConfigurationService; import org.dspace.services.factory.DSpaceServicesFactory; -import org.jdom.Element; -import org.jdom.Namespace; +import org.jdom2.Element; +import org.jdom2.Namespace; /** * ORE dissemination crosswalk diff --git a/dspace-api/src/main/java/org/dspace/content/crosswalk/OREIngestionCrosswalk.java b/dspace-api/src/main/java/org/dspace/content/crosswalk/OREIngestionCrosswalk.java index 80c424e782..f756aae225 100644 --- a/dspace-api/src/main/java/org/dspace/content/crosswalk/OREIngestionCrosswalk.java +++ b/dspace-api/src/main/java/org/dspace/content/crosswalk/OREIngestionCrosswalk.java @@ -34,12 +34,13 @@ import org.dspace.content.service.BundleService; import org.dspace.content.service.ItemService; import org.dspace.core.Constants; import org.dspace.core.Context; -import org.jdom.Attribute; -import org.jdom.Document; -import org.jdom.Element; -import org.jdom.JDOMException; -import org.jdom.Namespace; -import org.jdom.xpath.XPath; +import org.jdom2.Attribute; +import org.jdom2.Document; +import org.jdom2.Element; +import org.jdom2.Namespace; +import org.jdom2.filter.Filters; +import org.jdom2.xpath.XPathExpression; +import org.jdom2.xpath.XPathFactory; /** * ORE ingestion crosswalk @@ -113,23 +114,21 @@ public class OREIngestionCrosswalk Document doc = new Document(); doc.addContent(root.detach()); - XPath xpathLinks; List aggregatedResources; String entryId; - try { - xpathLinks = XPath.newInstance("/atom:entry/atom:link[@rel=\"" + ORE_NS.getURI() + "aggregates" + "\"]"); - xpathLinks.addNamespace(ATOM_NS); - aggregatedResources = xpathLinks.selectNodes(doc); + XPathExpression xpathLinks = + XPathFactory.instance() + .compile("/atom:entry/atom:link[@rel=\"" + ORE_NS.getURI() + "aggregates" + "\"]", + Filters.element(), null, ATOM_NS); + aggregatedResources = xpathLinks.evaluate(doc); - xpathLinks = XPath.newInstance("/atom:entry/atom:link[@rel='alternate']/@href"); - xpathLinks.addNamespace(ATOM_NS); - entryId = ((Attribute) xpathLinks.selectSingleNode(doc)).getValue(); - } catch (JDOMException e) { - throw new CrosswalkException("JDOM exception occurred while ingesting the ORE", e); - } + XPathExpression xpathAltHref = + XPathFactory.instance() + .compile("/atom:entry/atom:link[@rel='alternate']/@href", + Filters.attribute(), null, ATOM_NS); + entryId = xpathAltHref.evaluateFirst(doc).getValue(); // Next for each resource, create a bitstream - XPath xpathDesc; NumberFormat nf = NumberFormat.getInstance(); nf.setGroupingUsed(false); nf.setMinimumIntegerDigits(4); @@ -140,16 +139,12 @@ public class OREIngestionCrosswalk String bundleName; Element desc = null; - try { - xpathDesc = XPath.newInstance( - "/atom:entry/oreatom:triples/rdf:Description[@rdf:about=\"" + this.encodeForURL(href) + "\"][1]"); - xpathDesc.addNamespace(ATOM_NS); - xpathDesc.addNamespace(ORE_ATOM); - xpathDesc.addNamespace(RDF_NS); - desc = (Element) xpathDesc.selectSingleNode(doc); - } catch (JDOMException e) { - log.warn("Could not find description for {}", href, e); - } + XPathExpression xpathDesc = + XPathFactory.instance() + .compile("/atom:entry/oreatom:triples/rdf:Description[@rdf:about=\"" + + this.encodeForURL(href) + "\"][1]", + Filters.element(), null, ATOM_NS, ORE_ATOM, RDF_NS); + desc = xpathDesc.evaluateFirst(doc); if (desc != null && desc.getChild("type", RDF_NS).getAttributeValue("resource", RDF_NS) .equals(DS_NS.getURI() + "DSpaceBitstream")) { diff --git a/dspace-api/src/main/java/org/dspace/content/crosswalk/PREMISCrosswalk.java b/dspace-api/src/main/java/org/dspace/content/crosswalk/PREMISCrosswalk.java index e4e387a3ec..39b6c8f29c 100644 --- a/dspace-api/src/main/java/org/dspace/content/crosswalk/PREMISCrosswalk.java +++ b/dspace-api/src/main/java/org/dspace/content/crosswalk/PREMISCrosswalk.java @@ -30,8 +30,8 @@ import org.dspace.core.Constants; import org.dspace.core.Context; import org.dspace.services.ConfigurationService; import org.dspace.services.factory.DSpaceServicesFactory; -import org.jdom.Element; -import org.jdom.Namespace; +import org.jdom2.Element; +import org.jdom2.Namespace; /** * PREMIS Crosswalk diff --git a/dspace-api/src/main/java/org/dspace/content/crosswalk/ParameterizedDisseminationCrosswalk.java b/dspace-api/src/main/java/org/dspace/content/crosswalk/ParameterizedDisseminationCrosswalk.java index 312aed3543..5d9322339d 100644 --- a/dspace-api/src/main/java/org/dspace/content/crosswalk/ParameterizedDisseminationCrosswalk.java +++ b/dspace-api/src/main/java/org/dspace/content/crosswalk/ParameterizedDisseminationCrosswalk.java @@ -14,7 +14,7 @@ import java.util.Map; import org.dspace.authorize.AuthorizeException; import org.dspace.content.DSpaceObject; import org.dspace.core.Context; -import org.jdom.Element; +import org.jdom2.Element; /** * Translate DSpace native metadata into an external XML format, with parameters. diff --git a/dspace-api/src/main/java/org/dspace/content/crosswalk/QDCCrosswalk.java b/dspace-api/src/main/java/org/dspace/content/crosswalk/QDCCrosswalk.java index f3c51a5d46..2fdbaaad00 100644 --- a/dspace-api/src/main/java/org/dspace/content/crosswalk/QDCCrosswalk.java +++ b/dspace-api/src/main/java/org/dspace/content/crosswalk/QDCCrosswalk.java @@ -36,10 +36,10 @@ import org.dspace.core.Context; import org.dspace.core.SelfNamedPlugin; import org.dspace.services.ConfigurationService; import org.dspace.services.factory.DSpaceServicesFactory; -import org.jdom.Document; -import org.jdom.Element; -import org.jdom.Namespace; -import org.jdom.input.SAXBuilder; +import org.jdom2.Document; +import org.jdom2.Element; +import org.jdom2.Namespace; +import org.jdom2.input.SAXBuilder; /** * Configurable QDC Crosswalk @@ -290,7 +290,7 @@ public class QDCCrosswalk extends SelfNamedPlugin qdc2element.put(qdc, element); element2qdc.put(makeQualifiedTagName(element), qdc); log.debug("Building Maps: qdc=\"" + qdc + "\", element=\"" + element.toString() + "\""); - } catch (org.jdom.JDOMException je) { + } catch (org.jdom2.JDOMException je) { throw new CrosswalkInternalException( "Failed parsing XML fragment in properties file: \"" + prolog + val + postlog + "\": " + je .toString(), je); @@ -326,6 +326,7 @@ public class QDCCrosswalk extends SelfNamedPlugin * @throws IOException if IO error * @throws SQLException if database error * @throws AuthorizeException if authorization error + * @return List of Elements */ @Override public List disseminateList(Context context, DSpaceObject dso) diff --git a/dspace-api/src/main/java/org/dspace/content/crosswalk/RoleCrosswalk.java b/dspace-api/src/main/java/org/dspace/content/crosswalk/RoleCrosswalk.java index d36ff3edf5..2c763036ce 100644 --- a/dspace-api/src/main/java/org/dspace/content/crosswalk/RoleCrosswalk.java +++ b/dspace-api/src/main/java/org/dspace/content/crosswalk/RoleCrosswalk.java @@ -26,12 +26,12 @@ import org.dspace.core.factory.CoreServiceFactory; import org.dspace.services.ConfigurationService; import org.dspace.services.factory.DSpaceServicesFactory; import org.dspace.workflow.WorkflowException; -import org.jdom.Document; -import org.jdom.Element; -import org.jdom.JDOMException; -import org.jdom.Namespace; -import org.jdom.input.SAXBuilder; -import org.jdom.output.XMLOutputter; +import org.jdom2.Document; +import org.jdom2.Element; +import org.jdom2.JDOMException; +import org.jdom2.Namespace; +import org.jdom2.input.SAXBuilder; +import org.jdom2.output.XMLOutputter; /** * Role Crosswalk diff --git a/dspace-api/src/main/java/org/dspace/content/crosswalk/SimpleDCDisseminationCrosswalk.java b/dspace-api/src/main/java/org/dspace/content/crosswalk/SimpleDCDisseminationCrosswalk.java index 22ec68070a..2f91c3aa07 100644 --- a/dspace-api/src/main/java/org/dspace/content/crosswalk/SimpleDCDisseminationCrosswalk.java +++ b/dspace-api/src/main/java/org/dspace/content/crosswalk/SimpleDCDisseminationCrosswalk.java @@ -24,8 +24,8 @@ import org.dspace.content.service.ItemService; import org.dspace.core.Constants; import org.dspace.core.Context; import org.dspace.core.SelfNamedPlugin; -import org.jdom.Element; -import org.jdom.Namespace; +import org.jdom2.Element; +import org.jdom2.Namespace; /** * Disseminator for Simple Dublin Core metadata in XML format. @@ -84,6 +84,7 @@ public class SimpleDCDisseminationCrosswalk extends SelfNamedPlugin * @throws IOException if IO error * @throws SQLException if database error * @throws AuthorizeException if authorization error + * @return List of Elements */ @Override public List disseminateList(Context context, DSpaceObject dso) diff --git a/dspace-api/src/main/java/org/dspace/content/crosswalk/XHTMLHeadDisseminationCrosswalk.java b/dspace-api/src/main/java/org/dspace/content/crosswalk/XHTMLHeadDisseminationCrosswalk.java index d03d2dd887..7b25f69ce3 100644 --- a/dspace-api/src/main/java/org/dspace/content/crosswalk/XHTMLHeadDisseminationCrosswalk.java +++ b/dspace-api/src/main/java/org/dspace/content/crosswalk/XHTMLHeadDisseminationCrosswalk.java @@ -34,9 +34,9 @@ import org.dspace.core.Context; import org.dspace.core.SelfNamedPlugin; import org.dspace.services.ConfigurationService; import org.dspace.services.factory.DSpaceServicesFactory; -import org.jdom.Element; -import org.jdom.Namespace; -import org.jdom.Verifier; +import org.jdom2.Element; +import org.jdom2.Namespace; +import org.jdom2.Verifier; /** * Crosswalk for creating appropriate <meta> elements to appear in the @@ -90,17 +90,17 @@ public class XHTMLHeadDisseminationCrosswalk * Maps DSpace metadata field to name to use in XHTML head element, e.g. * dc.creator or dc.description.abstract */ - private Map names; + private final Map names; /** * Maps DSpace metadata field to scheme for that field, if any */ - private Map schemes; + private final Map schemes; /** * Schemas to add -- maps schema.NAME to schema URL */ - private Map schemaURLs; + private final Map schemaURLs; public XHTMLHeadDisseminationCrosswalk() throws IOException { names = new HashMap<>(); @@ -109,17 +109,9 @@ public class XHTMLHeadDisseminationCrosswalk // Read in configuration Properties crosswalkProps = new Properties(); - FileInputStream fis = new FileInputStream(config); - try { + + try (FileInputStream fis = new FileInputStream(config);) { crosswalkProps.load(fis); - } finally { - if (fis != null) { - try { - fis.close(); - } catch (IOException ioe) { - // ignore - } - } } Enumeration e = crosswalkProps.keys(); @@ -178,6 +170,7 @@ public class XHTMLHeadDisseminationCrosswalk * @throws IOException if IO error * @throws SQLException if database error * @throws AuthorizeException if authorization error + * @return List of Elements */ @Override public List disseminateList(Context context, DSpaceObject dso) throws CrosswalkException, diff --git a/dspace-api/src/main/java/org/dspace/content/crosswalk/XSLTCrosswalk.java b/dspace-api/src/main/java/org/dspace/content/crosswalk/XSLTCrosswalk.java index 1c85fd82c5..d4ccebf82e 100644 --- a/dspace-api/src/main/java/org/dspace/content/crosswalk/XSLTCrosswalk.java +++ b/dspace-api/src/main/java/org/dspace/content/crosswalk/XSLTCrosswalk.java @@ -21,7 +21,7 @@ import javax.xml.transform.stream.StreamSource; import org.dspace.core.SelfNamedPlugin; import org.dspace.services.ConfigurationService; import org.dspace.services.factory.DSpaceServicesFactory; -import org.jdom.Namespace; +import org.jdom2.Namespace; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -130,12 +130,6 @@ public abstract class XSLTCrosswalk extends SelfNamedPlugin { return aliasList.toArray(new String[aliasList.size()]); } - /** - * We need to force this, because some dependency elsewhere interferes. - */ - private static final String TRANSFORMER_FACTORY_CLASS - = "com.sun.org.apache.xalan.internal.xsltc.trax.TransformerFactoryImpl"; - private Transformer transformer = null; private File transformFile = null; private long transformLastModified = 0; @@ -181,8 +175,7 @@ public abstract class XSLTCrosswalk extends SelfNamedPlugin { Source transformSource = new StreamSource(new FileInputStream(transformFile)); TransformerFactory transformerFactory - = TransformerFactory.newInstance( - TRANSFORMER_FACTORY_CLASS, null); + = TransformerFactory.newInstance(); transformer = transformerFactory.newTransformer(transformSource); transformLastModified = transformFile.lastModified(); } catch (TransformerConfigurationException | FileNotFoundException e) { diff --git a/dspace-api/src/main/java/org/dspace/content/crosswalk/XSLTDisseminationCrosswalk.java b/dspace-api/src/main/java/org/dspace/content/crosswalk/XSLTDisseminationCrosswalk.java index 6c30c1b1a4..26371b46aa 100644 --- a/dspace-api/src/main/java/org/dspace/content/crosswalk/XSLTDisseminationCrosswalk.java +++ b/dspace-api/src/main/java/org/dspace/content/crosswalk/XSLTDisseminationCrosswalk.java @@ -18,6 +18,7 @@ import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; +import java.util.stream.Collectors; import javax.xml.transform.Transformer; import javax.xml.transform.TransformerException; @@ -41,14 +42,15 @@ import org.dspace.core.factory.CoreServiceFactory; import org.dspace.handle.factory.HandleServiceFactory; import org.dspace.services.ConfigurationService; import org.dspace.services.factory.DSpaceServicesFactory; -import org.jdom.Document; -import org.jdom.Element; -import org.jdom.Namespace; -import org.jdom.Verifier; -import org.jdom.output.Format; -import org.jdom.output.XMLOutputter; -import org.jdom.transform.JDOMResult; -import org.jdom.transform.JDOMSource; +import org.jdom2.Content; +import org.jdom2.Document; +import org.jdom2.Element; +import org.jdom2.Namespace; +import org.jdom2.Verifier; +import org.jdom2.output.Format; +import org.jdom2.output.XMLOutputter; +import org.jdom2.transform.JDOMResult; +import org.jdom2.transform.JDOMSource; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -244,6 +246,7 @@ public class XSLTDisseminationCrosswalk * @throws SQLException if database error * @throws AuthorizeException if authorization error * @see DisseminationCrosswalk + * @return List of Elements */ @Override public List disseminateList(Context context, DSpaceObject dso) @@ -268,7 +271,12 @@ public class XSLTDisseminationCrosswalk try { JDOMResult result = new JDOMResult(); xform.transform(new JDOMSource(createDIM(dso).getChildren()), result); - return result.getResult(); + List contentList = result.getResult(); + // Transform List into List + List elementList = contentList.stream() + .filter(obj -> obj instanceof Element) + .map(Element.class::cast).collect(Collectors.toList()); + return elementList; } catch (TransformerException e) { LOG.error("Got error: " + e.toString()); throw new CrosswalkInternalException("XSL translation failed: " + e.toString(), e); diff --git a/dspace-api/src/main/java/org/dspace/content/crosswalk/XSLTIngestionCrosswalk.java b/dspace-api/src/main/java/org/dspace/content/crosswalk/XSLTIngestionCrosswalk.java index 37a822374d..63ef5f7336 100644 --- a/dspace-api/src/main/java/org/dspace/content/crosswalk/XSLTIngestionCrosswalk.java +++ b/dspace-api/src/main/java/org/dspace/content/crosswalk/XSLTIngestionCrosswalk.java @@ -12,6 +12,7 @@ import java.io.IOException; import java.sql.SQLException; import java.util.Iterator; import java.util.List; +import java.util.stream.Collectors; import javax.xml.transform.Transformer; import javax.xml.transform.TransformerException; @@ -34,13 +35,14 @@ import org.dspace.content.service.ItemService; import org.dspace.core.Constants; import org.dspace.core.Context; import org.dspace.core.factory.CoreServiceFactory; -import org.jdom.Document; -import org.jdom.Element; -import org.jdom.input.SAXBuilder; -import org.jdom.output.Format; -import org.jdom.output.XMLOutputter; -import org.jdom.transform.JDOMResult; -import org.jdom.transform.JDOMSource; +import org.jdom2.Content; +import org.jdom2.Document; +import org.jdom2.Element; +import org.jdom2.input.SAXBuilder; +import org.jdom2.output.Format; +import org.jdom2.output.XMLOutputter; +import org.jdom2.transform.JDOMResult; +import org.jdom2.transform.JDOMSource; /** * Configurable XSLT-driven ingestion Crosswalk @@ -141,7 +143,12 @@ public class XSLTIngestionCrosswalk try { JDOMResult result = new JDOMResult(); xform.transform(new JDOMSource(metadata), result); - ingestDIM(context, dso, result.getResult(), createMissingMetadataFields); + List contentList = result.getResult(); + // Transform List into List + List elementList = contentList.stream() + .filter(obj -> obj instanceof Element) + .map(Element.class::cast).collect(Collectors.toList()); + ingestDIM(context, dso, elementList, createMissingMetadataFields); } catch (TransformerException e) { log.error("Got error: " + e.toString()); throw new CrosswalkInternalException("XSL Transformation failed: " + e.toString(), e); diff --git a/dspace-api/src/main/java/org/dspace/content/dao/ItemDAO.java b/dspace-api/src/main/java/org/dspace/content/dao/ItemDAO.java index 4c391d973b..86da51e6cc 100644 --- a/dspace-api/src/main/java/org/dspace/content/dao/ItemDAO.java +++ b/dspace-api/src/main/java/org/dspace/content/dao/ItemDAO.java @@ -32,8 +32,22 @@ public interface ItemDAO extends DSpaceObjectLegacySupportDAO { public Iterator findAll(Context context, boolean archived, int limit, int offset) throws SQLException; + @Deprecated public Iterator findAll(Context context, boolean archived, boolean withdrawn) throws SQLException; + /** + * Find all items that are: + * - NOT in the workspace + * - NOT in the workflow + * - NOT a template item for e.g. a collection + * + * This implies that the result also contains older versions of items and withdrawn items. + * @param context the DSpace context. + * @return iterator over all regular items. + * @throws SQLException if database error. + */ + public Iterator findAllRegularItems(Context context) throws SQLException; + /** * Find all Items modified since a Date. * diff --git a/dspace-api/src/main/java/org/dspace/content/dao/ProcessDAO.java b/dspace-api/src/main/java/org/dspace/content/dao/ProcessDAO.java index 4ef26cffcb..69bac319c6 100644 --- a/dspace-api/src/main/java/org/dspace/content/dao/ProcessDAO.java +++ b/dspace-api/src/main/java/org/dspace/content/dao/ProcessDAO.java @@ -8,8 +8,10 @@ package org.dspace.content.dao; import java.sql.SQLException; +import java.util.Date; import java.util.List; +import org.dspace.content.ProcessStatus; import org.dspace.core.Context; import org.dspace.core.GenericDAO; import org.dspace.scripts.Process; @@ -81,4 +83,18 @@ public interface ProcessDAO extends GenericDAO { int countTotalWithParameters(Context context, ProcessQueryParameterContainer processQueryParameterContainer) throws SQLException; + + /** + * Find all the processes with one of the given status and with a creation time + * older than the specified date. + * + * @param context The relevant DSpace context + * @param statuses the statuses of the processes to search for + * @param date the creation date to search for + * @return The list of all Processes which match requirements + * @throws SQLException If something goes wrong + */ + List findByStatusAndCreationTimeOlderThan(Context context, List statuses, Date date) + throws SQLException; + } diff --git a/dspace-api/src/main/java/org/dspace/content/dao/RelationshipDAO.java b/dspace-api/src/main/java/org/dspace/content/dao/RelationshipDAO.java index 57b950a36b..a152b5b902 100644 --- a/dspace-api/src/main/java/org/dspace/content/dao/RelationshipDAO.java +++ b/dspace-api/src/main/java/org/dspace/content/dao/RelationshipDAO.java @@ -14,6 +14,7 @@ import java.util.UUID; import org.dspace.content.Item; import org.dspace.content.Relationship; import org.dspace.content.RelationshipType; +import org.dspace.content.dao.pojo.ItemUuidAndRelationshipId; import org.dspace.core.Context; import org.dspace.core.GenericDAO; @@ -28,53 +29,38 @@ public interface RelationshipDAO extends GenericDAO { /** * This method returns a list of Relationship objects that have the given Item object * as a leftItem or a rightItem - * @param context The relevant DSpace context - * @param item The item that should be either a leftItem or a rightItem of all - * the Relationship objects in the returned list - * @param excludeTilted If true, excludes tilted relationships - * @return The list of Relationship objects that contain either a left or a - * right item that is equal to the given item - * @throws SQLException If something goes wrong + * @param context The relevant DSpace context + * @param item The item that should be either a leftItem or a rightItem of all + * the Relationship objects in the returned list + * @param excludeTilted If true, excludes tilted relationships + * @param excludeNonLatest If true, excludes all relationships for which the other item has a more recent version + * that is relevant for this relationship + * @return The list of Relationship objects that contain either a left or a + * right item that is equal to the given item + * @throws SQLException If something goes wrong */ - List findByItem(Context context, Item item, boolean excludeTilted) throws SQLException; + List findByItem( + Context context, Item item, boolean excludeTilted, boolean excludeNonLatest + ) throws SQLException; /** * This method returns a list of Relationship objects that have the given Item object * as a leftItem or a rightItem - * @param context The relevant DSpace context - * @param item The item that should be either a leftItem or a rightItem of all - * the Relationship objects in the returned list - * @param limit paging limit - * @param offset paging offset - * @param excludeTilted If true, excludes tilted relationships - * @return The list of Relationship objects that contain either a left or a - * right item that is equal to the given item - * @throws SQLException If something goes wrong + * @param context The relevant DSpace context + * @param item The item that should be either a leftItem or a rightItem of all + * the Relationship objects in the returned list + * @param limit paging limit + * @param offset paging offset + * @param excludeTilted If true, excludes tilted relationships + * @param excludeNonLatest If true, excludes all relationships for which the other item has a more recent version + * that is relevant for this relationship + * @return The list of Relationship objects that contain either a left or a + * right item that is equal to the given item + * @throws SQLException If something goes wrong */ - List findByItem(Context context, Item item, Integer limit, Integer offset, boolean excludeTilted) - throws SQLException; - - /** - * This method returns the next leftplace integer to use for a relationship with this item as the leftItem - * - * @param context The relevant DSpace context - * @param item The item to be matched on leftItem - * @return The next integer to be used for the leftplace of a relationship with the given item - * as a left item - * @throws SQLException If something goes wrong - */ - int findNextLeftPlaceByLeftItem(Context context, Item item) throws SQLException; - - /** - * This method returns the next rightplace integer to use for a relationship with this item as the rightItem - * - * @param context The relevant DSpace context - * @param item The item to be matched on rightItem - * @return The next integer to be used for the rightplace of a relationship with the given item - * as a right item - * @throws SQLException If something goes wrong - */ - int findNextRightPlaceByRightItem(Context context, Item item) throws SQLException; + List findByItem( + Context context, Item item, Integer limit, Integer offset, boolean excludeTilted, boolean excludeNonLatest + ) throws SQLException; /** * This method returns a list of Relationship objects for the given RelationshipType object. @@ -108,34 +94,69 @@ public interface RelationshipDAO extends GenericDAO { * It will construct a list of all Relationship objects that have the given RelationshipType object * as the relationshipType property * @param context The relevant DSpace context + * @param item item to filter by * @param relationshipType The RelationshipType object to be checked on * @param limit paging limit * @param offset paging offset - * @param item item to filter by + * @param excludeNonLatest If true, excludes all relationships for which the other item has a more recent version + * that is relevant for this relationship * @return A list of Relationship objects that have the given RelationshipType object as the * relationshipType property * @throws SQLException If something goes wrong */ - List findByItemAndRelationshipType(Context context, Item item, RelationshipType relationshipType, - Integer limit, Integer offset) throws SQLException; + List findByItemAndRelationshipType( + Context context, Item item, RelationshipType relationshipType, Integer limit, Integer offset, + boolean excludeNonLatest + ) throws SQLException; /** * This method returns a list of Relationship objects for the given RelationshipType object. * It will construct a list of all Relationship objects that have the given RelationshipType object * as the relationshipType property * @param context The relevant DSpace context + * @param item item to filter by * @param relationshipType The RelationshipType object to be checked on + * @param isLeft Is item left or right * @param limit paging limit * @param offset paging offset - * @param item item to filter by - * @param isLeft Is item left or right + * @param excludeNonLatest If true, excludes all relationships for which the other item has a more recent version + * that is relevant for this relationship * @return A list of Relationship objects that have the given RelationshipType object as the * relationshipType property * @throws SQLException If something goes wrong */ - List findByItemAndRelationshipType(Context context, Item item, RelationshipType relationshipType, - boolean isLeft, Integer limit, Integer offset) - throws SQLException; + List findByItemAndRelationshipType( + Context context, Item item, RelationshipType relationshipType, boolean isLeft, Integer limit, Integer offset, + boolean excludeNonLatest + ) throws SQLException; + + /** + * This method returns the UUIDs of all items that have a relationship with the given item, from the perspective + * of the other item. In other words, given a relationship with the given item, the given item should have + * "latest status" in order for the other item uuid to be returned. + * + * This method differs from the "excludeNonLatest" property in other methods, + * because in this method the current item should have "latest status" to return the other item, + * whereas with "excludeNonLatest" the other item should have "latest status" to be returned. + * + * This method is used to index items in solr; when searching for related items of one of the returned uuids, + * the given item should appear as a search result. + * + * NOTE: This method does not return {@link Relationship}s for performance, because doing so would eagerly fetch + * the items on both sides, which is unnecessary. + * NOTE: tilted relationships are NEVER excluded when fetching one relationship type. + * @param context the DSpace context. + * @param latestItem the target item; only relationships where this item has "latest status" should be considered. + * @param relationshipType the relationship type for which relationships should be selected. + * @param isLeft whether the entity type of the item occurs on the left or right side of the relationship type. + * This is redundant in most cases, but necessary because relationship types my have + * the same entity type on both sides. + * @return a list containing pairs of relationship ids and item uuids. + * @throws SQLException if something goes wrong. + */ + public List findByLatestItemAndRelationshipType( + Context context, Item latestItem, RelationshipType relationshipType, boolean isLeft + ) throws SQLException; /** * This method returns a list of Relationship objects for the given typeName @@ -183,28 +204,34 @@ public interface RelationshipDAO extends GenericDAO { /** * This method returns a count of Relationship objects that have the given Item object * as a leftItem or a rightItem - * @param context The relevant DSpace context - * @param item The item that should be either a leftItem or a rightItem of all - * the Relationship objects in the returned list + * @param context The relevant DSpace context + * @param item The item that should be either a leftItem or a rightItem of all + * the Relationship objects in the returned list + * @param excludeTilted if true, excludes tilted relationships + * @param excludeNonLatest if true, exclude relationships for which the opposite item is not the latest version + * that is relevant * @return The list of Relationship objects that contain either a left or a * right item that is equal to the given item * @throws SQLException If something goes wrong */ - int countByItem(Context context, Item item) throws SQLException; + int countByItem(Context context, Item item, boolean excludeTilted, boolean excludeNonLatest) throws SQLException; /** * Count total number of relationships (rows in relationship table) by an item and a relationship type and a boolean * indicating whether the item should be the leftItem or the rightItem * - * @param context context - * @param relationshipType relationship type to filter by - * @param item item to filter by - * @param isLeft Indicating whether the counted Relationships should have the given Item on the left side or not + * @param context context + * @param relationshipType relationship type to filter by + * @param item item to filter by + * @param isLeft indicating whether the counted Relationships should have the given Item on the left side + * @param excludeNonLatest if true, exclude relationships for which the opposite item is not the latest version + * that is relevant * @return total count * @throws SQLException if database error */ - int countByItemAndRelationshipType(Context context, Item item, RelationshipType relationshipType, boolean isLeft) - throws SQLException; + int countByItemAndRelationshipType( + Context context, Item item, RelationshipType relationshipType, boolean isLeft, boolean excludeNonLatest + ) throws SQLException; /** * Count total number of relationships (rows in relationship table) given a typeName diff --git a/dspace-api/src/main/java/org/dspace/content/dao/impl/ItemDAOImpl.java b/dspace-api/src/main/java/org/dspace/content/dao/impl/ItemDAOImpl.java index c4125696a8..aad8cf3c50 100644 --- a/dspace-api/src/main/java/org/dspace/content/dao/impl/ItemDAOImpl.java +++ b/dspace-api/src/main/java/org/dspace/content/dao/impl/ItemDAOImpl.java @@ -79,6 +79,20 @@ public class ItemDAOImpl extends AbstractHibernateDSODAO implements ItemDA return iterate(query); } + @Override + public Iterator findAllRegularItems(Context context) throws SQLException { + // NOTE: This query includes archived items, withdrawn items and older versions of items. + // It does not include workspace, workflow or template items. + Query query = createQuery( + context, + "SELECT i FROM Item as i " + + "LEFT JOIN Version as v ON i = v.item " + + "WHERE i.inArchive=true or i.withdrawn=true or (i.inArchive=false and v.id IS NOT NULL) " + + "ORDER BY i.id" + ); + return iterate(query); + } + @Override public Iterator findAll(Context context, boolean archived, boolean withdrawn, boolean discoverable, Date lastModified) diff --git a/dspace-api/src/main/java/org/dspace/content/dao/impl/ProcessDAOImpl.java b/dspace-api/src/main/java/org/dspace/content/dao/impl/ProcessDAOImpl.java index 5c8083a86b..23ce6ce381 100644 --- a/dspace-api/src/main/java/org/dspace/content/dao/impl/ProcessDAOImpl.java +++ b/dspace-api/src/main/java/org/dspace/content/dao/impl/ProcessDAOImpl.java @@ -7,7 +7,10 @@ */ package org.dspace.content.dao.impl; +import static org.dspace.scripts.Process_.CREATION_TIME; + import java.sql.SQLException; +import java.util.Date; import java.util.LinkedList; import java.util.List; import java.util.Map; @@ -17,6 +20,7 @@ import javax.persistence.criteria.Predicate; import javax.persistence.criteria.Root; import org.apache.commons.lang3.StringUtils; +import org.dspace.content.ProcessStatus; import org.dspace.content.dao.ProcessDAO; import org.dspace.core.AbstractHibernateDAO; import org.dspace.core.Context; @@ -147,6 +151,23 @@ public class ProcessDAOImpl extends AbstractHibernateDAO implements Pro } + @Override + public List findByStatusAndCreationTimeOlderThan(Context context, List statuses, + Date date) throws SQLException { + + CriteriaBuilder criteriaBuilder = getCriteriaBuilder(context); + CriteriaQuery criteriaQuery = getCriteriaQuery(criteriaBuilder, Process.class); + + Root processRoot = criteriaQuery.from(Process.class); + criteriaQuery.select(processRoot); + + Predicate creationTimeLessThanGivenDate = criteriaBuilder.lessThan(processRoot.get(CREATION_TIME), date); + Predicate statusIn = processRoot.get(Process_.PROCESS_STATUS).in(statuses); + criteriaQuery.where(criteriaBuilder.and(creationTimeLessThanGivenDate, statusIn)); + + return list(context, criteriaQuery, false, Process.class, -1, -1); + } + } diff --git a/dspace-api/src/main/java/org/dspace/content/dao/impl/RelationshipDAOImpl.java b/dspace-api/src/main/java/org/dspace/content/dao/impl/RelationshipDAOImpl.java index 48baf45f23..e2f84bc1cb 100644 --- a/dspace-api/src/main/java/org/dspace/content/dao/impl/RelationshipDAOImpl.java +++ b/dspace-api/src/main/java/org/dspace/content/dao/impl/RelationshipDAOImpl.java @@ -11,17 +11,22 @@ import java.sql.SQLException; import java.util.ArrayList; import java.util.List; import java.util.UUID; +import java.util.stream.Collectors; import javax.persistence.Query; +import javax.persistence.Tuple; import javax.persistence.criteria.CriteriaBuilder; import javax.persistence.criteria.CriteriaQuery; +import javax.persistence.criteria.Predicate; import javax.persistence.criteria.Root; import org.dspace.content.Item; +import org.dspace.content.Item_; import org.dspace.content.Relationship; import org.dspace.content.RelationshipType; import org.dspace.content.RelationshipType_; import org.dspace.content.Relationship_; import org.dspace.content.dao.RelationshipDAO; +import org.dspace.content.dao.pojo.ItemUuidAndRelationshipId; import org.dspace.content.factory.ContentServiceFactory; import org.dspace.content.service.RelationshipTypeService; import org.dspace.core.AbstractHibernateDAO; @@ -30,95 +35,152 @@ import org.dspace.core.Context; public class RelationshipDAOImpl extends AbstractHibernateDAO implements RelationshipDAO { @Override - public List findByItem(Context context, Item item, boolean excludeTilted) throws SQLException { - return findByItem(context, item, -1, -1, excludeTilted); + public List findByItem( + Context context, Item item, boolean excludeTilted, boolean excludeNonLatest + ) throws SQLException { + return findByItem(context, item, -1, -1, excludeTilted, excludeNonLatest); } @Override - public List findByItem(Context context, Item item, Integer limit, Integer offset, - boolean excludeTilted) throws SQLException { - + public List findByItem( + Context context, Item item, Integer limit, Integer offset, boolean excludeTilted, boolean excludeNonLatest + ) throws SQLException { CriteriaBuilder criteriaBuilder = getCriteriaBuilder(context); - CriteriaQuery criteriaQuery = getCriteriaQuery(criteriaBuilder, Relationship.class); + CriteriaQuery criteriaQuery = getCriteriaQuery(criteriaBuilder, Relationship.class); Root relationshipRoot = criteriaQuery.from(Relationship.class); criteriaQuery.select(relationshipRoot); - if (excludeTilted) { - // If this item is the left item, - // return relationships for types which are not tilted right (tilted is either left nor null) - // If this item is the right item, - // return relationships for types which are not tilted left (tilted is either right nor null) - criteriaQuery - .where(criteriaBuilder.or( - criteriaBuilder.and( - criteriaBuilder.equal(relationshipRoot.get(Relationship_.leftItem), item), - criteriaBuilder.or( - criteriaBuilder.isNull(relationshipRoot.get(Relationship_.relationshipType) - .get(RelationshipType_.tilted)), - criteriaBuilder.notEqual(relationshipRoot - .get(Relationship_.relationshipType) - .get(RelationshipType_.tilted), RelationshipType.Tilted.RIGHT))), - criteriaBuilder.and( - criteriaBuilder.equal(relationshipRoot.get(Relationship_.rightItem), item), - criteriaBuilder.or( - criteriaBuilder.isNull(relationshipRoot.get(Relationship_.relationshipType) - .get(RelationshipType_.tilted)), - criteriaBuilder.notEqual(relationshipRoot - .get(Relationship_.relationshipType) - .get(RelationshipType_.tilted), RelationshipType.Tilted.LEFT))))); - } else { - criteriaQuery - .where(criteriaBuilder.or(criteriaBuilder.equal(relationshipRoot.get(Relationship_.leftItem), item), - criteriaBuilder.equal(relationshipRoot.get(Relationship_.rightItem), item))); - } + + criteriaQuery.where( + criteriaBuilder.or( + getLeftItemPredicate(criteriaBuilder, relationshipRoot, item, excludeTilted, excludeNonLatest), + getRightItemPredicate(criteriaBuilder, relationshipRoot, item, excludeTilted, excludeNonLatest) + ) + ); + return list(context, criteriaQuery, false, Relationship.class, limit, offset); } - @Override - public int countByItem(Context context, Item item) - throws SQLException { + /** + * Get the predicate for a criteria query that selects relationships by their left item. + * @param criteriaBuilder the criteria builder. + * @param relationshipRoot the relationship root. + * @param item the item that is being searched for. + * @param excludeTilted if true, exclude tilted relationships. + * @param excludeNonLatest if true, exclude relationships for which the opposite item is not the latest version + * that is relevant. + * @return a predicate that satisfies the given restrictions. + */ + protected Predicate getLeftItemPredicate( + CriteriaBuilder criteriaBuilder, Root relationshipRoot, Item item, + boolean excludeTilted, boolean excludeNonLatest + ) { + List predicates = new ArrayList<>(); + // match relationships based on the left item + predicates.add( + criteriaBuilder.equal(relationshipRoot.get(Relationship_.leftItem), item) + ); + + if (excludeTilted) { + // if this item is the left item, + // return relationships for types which are NOT tilted right (tilted is either left nor null) + predicates.add( + criteriaBuilder.or( + criteriaBuilder.isNull( + relationshipRoot.get(Relationship_.relationshipType).get(RelationshipType_.tilted) + ), + criteriaBuilder.notEqual( + relationshipRoot.get(Relationship_.relationshipType).get(RelationshipType_.tilted), + RelationshipType.Tilted.RIGHT + ) + ) + ); + } + + if (excludeNonLatest) { + // if this item is the left item, + // return relationships for which the right item is the "latest" version that is relevant. + predicates.add( + criteriaBuilder.notEqual( + relationshipRoot.get(Relationship_.LATEST_VERSION_STATUS), + Relationship.LatestVersionStatus.LEFT_ONLY + ) + ); + } + + return criteriaBuilder.and(predicates.toArray(new Predicate[]{})); + } + + /** + * Get the predicate for a criteria query that selects relationships by their right item. + * @param criteriaBuilder the criteria builder. + * @param relationshipRoot the relationship root. + * @param item the item that is being searched for. + * @param excludeTilted if true, exclude tilted relationships. + * @param excludeNonLatest if true, exclude relationships for which the opposite item is not the latest version + * that is relevant. + * @return a predicate that satisfies the given restrictions. + */ + protected Predicate getRightItemPredicate( + CriteriaBuilder criteriaBuilder, Root relationshipRoot, Item item, + boolean excludeTilted, boolean excludeNonLatest + ) { + List predicates = new ArrayList<>(); + + // match relationships based on the right item + predicates.add( + criteriaBuilder.equal(relationshipRoot.get(Relationship_.rightItem), item) + ); + + if (excludeTilted) { + // if this item is the right item, + // return relationships for types which are NOT tilted left (tilted is either right nor null) + predicates.add( + criteriaBuilder.or( + criteriaBuilder.isNull( + relationshipRoot.get(Relationship_.relationshipType).get(RelationshipType_.tilted) + ), + criteriaBuilder.notEqual( + relationshipRoot.get(Relationship_.relationshipType).get(RelationshipType_.tilted), + RelationshipType.Tilted.LEFT + ) + ) + ); + } + + if (excludeNonLatest) { + // if this item is the right item, + // return relationships for which the left item is the "latest" version that is relevant. + predicates.add( + criteriaBuilder.notEqual( + relationshipRoot.get(Relationship_.LATEST_VERSION_STATUS), + Relationship.LatestVersionStatus.RIGHT_ONLY + ) + ); + } + + return criteriaBuilder.and(predicates.toArray(new Predicate[]{})); + } + + @Override + public int countByItem( + Context context, Item item, boolean excludeTilted, boolean excludeNonLatest + ) throws SQLException { CriteriaBuilder criteriaBuilder = getCriteriaBuilder(context); CriteriaQuery criteriaQuery = getCriteriaQuery(criteriaBuilder, Relationship.class); Root relationshipRoot = criteriaQuery.from(Relationship.class); criteriaQuery.select(relationshipRoot); - criteriaQuery - .where(criteriaBuilder.or(criteriaBuilder.equal(relationshipRoot.get(Relationship_.leftItem), item), - criteriaBuilder.equal(relationshipRoot.get(Relationship_.rightItem), item))); + + criteriaQuery.where( + criteriaBuilder.or( + getLeftItemPredicate(criteriaBuilder, relationshipRoot, item, excludeTilted, excludeNonLatest), + getRightItemPredicate(criteriaBuilder, relationshipRoot, item, excludeTilted, excludeNonLatest) + ) + ); + return count(context, criteriaQuery, criteriaBuilder, relationshipRoot); } - @Override - public int findNextLeftPlaceByLeftItem(Context context, Item item) throws SQLException { - CriteriaBuilder criteriaBuilder = getCriteriaBuilder(context); - CriteriaQuery criteriaQuery = getCriteriaQuery(criteriaBuilder, Relationship.class); - Root relationshipRoot = criteriaQuery.from(Relationship.class); - criteriaQuery.select(relationshipRoot); - criteriaQuery.where(criteriaBuilder.equal(relationshipRoot.get(Relationship_.leftItem), item)); - List list = list(context, criteriaQuery, false, Relationship.class, -1, -1); - list.sort((o1, o2) -> o2.getLeftPlace() - o1.getLeftPlace()); - if (!list.isEmpty()) { - return list.get(0).getLeftPlace() + 1; - } else { - return 0; - } - } - - @Override - public int findNextRightPlaceByRightItem(Context context, Item item) throws SQLException { - CriteriaBuilder criteriaBuilder = getCriteriaBuilder(context); - CriteriaQuery criteriaQuery = getCriteriaQuery(criteriaBuilder, Relationship.class); - Root relationshipRoot = criteriaQuery.from(Relationship.class); - criteriaQuery.select(relationshipRoot); - criteriaQuery.where(criteriaBuilder.equal(relationshipRoot.get(Relationship_.rightItem), item)); - List list = list(context, criteriaQuery, false, Relationship.class, -1, -1); - list.sort((o1, o2) -> o2.getRightPlace() - o1.getRightPlace()); - if (!list.isEmpty()) { - return list.get(0).getRightPlace() + 1; - } else { - return 0; - } - } - @Override public List findByRelationshipType(Context context, RelationshipType relationshipType) throws SQLException { @@ -140,49 +202,132 @@ public class RelationshipDAOImpl extends AbstractHibernateDAO impl } @Override - public List findByItemAndRelationshipType(Context context, Item item, - RelationshipType relationshipType, Integer limit, - Integer offset) - throws SQLException { - + public List findByItemAndRelationshipType( + Context context, Item item, RelationshipType relationshipType, Integer limit, Integer offset, + boolean excludeNonLatest + ) throws SQLException { CriteriaBuilder criteriaBuilder = getCriteriaBuilder(context); CriteriaQuery criteriaQuery = getCriteriaQuery(criteriaBuilder, Relationship.class); Root relationshipRoot = criteriaQuery.from(Relationship.class); criteriaQuery.select(relationshipRoot); - criteriaQuery - .where(criteriaBuilder.equal(relationshipRoot.get(Relationship_.relationshipType), - relationshipType), criteriaBuilder.or - (criteriaBuilder.equal(relationshipRoot.get(Relationship_.leftItem), item), - criteriaBuilder.equal(relationshipRoot.get(Relationship_.rightItem), item))); + + criteriaQuery.where( + criteriaBuilder.equal(relationshipRoot.get(Relationship_.relationshipType), relationshipType), + criteriaBuilder.or( + getLeftItemPredicate(criteriaBuilder, relationshipRoot, item, false, excludeNonLatest), + getRightItemPredicate(criteriaBuilder, relationshipRoot, item, false, excludeNonLatest) + ) + ); + return list(context, criteriaQuery, true, Relationship.class, limit, offset); } @Override - public List findByItemAndRelationshipType(Context context, Item item, - RelationshipType relationshipType, boolean isLeft, - Integer limit, Integer offset) - throws SQLException { - + public List findByItemAndRelationshipType( + Context context, Item item, RelationshipType relationshipType, boolean isLeft, Integer limit, Integer offset, + boolean excludeNonLatest + ) throws SQLException { CriteriaBuilder criteriaBuilder = getCriteriaBuilder(context); CriteriaQuery criteriaQuery = getCriteriaQuery(criteriaBuilder, Relationship.class); Root relationshipRoot = criteriaQuery.from(Relationship.class); criteriaQuery.select(relationshipRoot); + if (isLeft) { - criteriaQuery - .where(criteriaBuilder.equal(relationshipRoot.get(Relationship_.relationshipType), - relationshipType), - criteriaBuilder.equal(relationshipRoot.get(Relationship_.leftItem), item)); + criteriaQuery.where( + criteriaBuilder.equal(relationshipRoot.get(Relationship_.relationshipType), relationshipType), + getLeftItemPredicate(criteriaBuilder, relationshipRoot, item, false, excludeNonLatest) + ); criteriaQuery.orderBy(criteriaBuilder.asc(relationshipRoot.get(Relationship_.leftPlace))); } else { - criteriaQuery - .where(criteriaBuilder.equal(relationshipRoot.get(Relationship_.relationshipType), - relationshipType), - criteriaBuilder.equal(relationshipRoot.get(Relationship_.rightItem), item)); + criteriaQuery.where( + criteriaBuilder.equal(relationshipRoot.get(Relationship_.relationshipType), relationshipType), + getRightItemPredicate(criteriaBuilder, relationshipRoot, item, false, excludeNonLatest) + ); criteriaQuery.orderBy(criteriaBuilder.asc(relationshipRoot.get(Relationship_.rightPlace))); } + return list(context, criteriaQuery, true, Relationship.class, limit, offset); } + @Override + public List findByLatestItemAndRelationshipType( + Context context, Item latestItem, RelationshipType relationshipType, boolean isLeft + ) throws SQLException { + final String relationshipIdAlias = "relationshipId"; + final String itemUuidAlias = "itemUuid"; + + CriteriaBuilder criteriaBuilder = getCriteriaBuilder(context); + CriteriaQuery criteriaQuery = criteriaBuilder.createTupleQuery(); + Root relationshipRoot = criteriaQuery.from(Relationship.class); + + ArrayList predicates = new ArrayList<>(); + + // all relationships should have the specified relationship type + predicates.add( + criteriaBuilder.equal(relationshipRoot.get(Relationship_.relationshipType), relationshipType) + ); + + if (isLeft) { + // match relationships based on the left item + predicates.add( + criteriaBuilder.equal(relationshipRoot.get(Relationship_.leftItem), latestItem) + ); + + // the left item has to have "latest status" => accept BOTH and LEFT_ONLY + predicates.add( + criteriaBuilder.notEqual( + relationshipRoot.get(Relationship_.LATEST_VERSION_STATUS), + Relationship.LatestVersionStatus.RIGHT_ONLY + ) + ); + + // return the UUIDs of the right item + criteriaQuery.multiselect( + relationshipRoot.get(Relationship_.id).alias(relationshipIdAlias), + relationshipRoot.get(Relationship_.rightItem).get(Item_.id).alias(itemUuidAlias) + ); + } else { + // match relationships based on the right item + predicates.add( + criteriaBuilder.equal(relationshipRoot.get(Relationship_.rightItem), latestItem) + ); + + // the right item has to have "latest status" => accept BOTH and RIGHT_ONLY + predicates.add( + criteriaBuilder.notEqual( + relationshipRoot.get(Relationship_.LATEST_VERSION_STATUS), + Relationship.LatestVersionStatus.LEFT_ONLY + ) + ); + + // return the UUIDs of the left item + criteriaQuery.multiselect( + relationshipRoot.get(Relationship_.id).alias(relationshipIdAlias), + relationshipRoot.get(Relationship_.leftItem).get(Item_.id).alias(itemUuidAlias) + ); + } + + // all predicates are chained with the AND operator + criteriaQuery.where(predicates.toArray(new Predicate[]{})); + + // deduplicate result + criteriaQuery.distinct(true); + + // execute query + Query query = this.getHibernateSession(context).createQuery(criteriaQuery); + query.setHint("org.hibernate.cacheable", true); + List resultList = query.getResultList(); + + // convert types + return resultList.stream() + .map(Tuple.class::cast) + .map(t -> new ItemUuidAndRelationshipId( + (UUID) t.get(itemUuidAlias), + (Integer) t.get(relationshipIdAlias) + )) + .collect(Collectors.toList()); + } + @Override public List findByTypeName(Context context, String typeName) throws SQLException { @@ -228,24 +373,26 @@ public class RelationshipDAOImpl extends AbstractHibernateDAO impl } @Override - public int countByItemAndRelationshipType(Context context, Item item, RelationshipType relationshipType, - boolean isLeft) throws SQLException { - + public int countByItemAndRelationshipType( + Context context, Item item, RelationshipType relationshipType, boolean isLeft, boolean excludeNonLatest + ) throws SQLException { CriteriaBuilder criteriaBuilder = getCriteriaBuilder(context); CriteriaQuery criteriaQuery = getCriteriaQuery(criteriaBuilder, Relationship.class); Root relationshipRoot = criteriaQuery.from(Relationship.class); criteriaQuery.select(relationshipRoot); + if (isLeft) { - criteriaQuery - .where(criteriaBuilder.equal(relationshipRoot.get(Relationship_.relationshipType), - relationshipType), - criteriaBuilder.equal(relationshipRoot.get(Relationship_.leftItem), item)); + criteriaQuery.where( + criteriaBuilder.equal(relationshipRoot.get(Relationship_.relationshipType), relationshipType), + getLeftItemPredicate(criteriaBuilder, relationshipRoot, item, false, excludeNonLatest) + ); } else { - criteriaQuery - .where(criteriaBuilder.equal(relationshipRoot.get(Relationship_.relationshipType), - relationshipType), - criteriaBuilder.equal(relationshipRoot.get(Relationship_.rightItem), item)); + criteriaQuery.where( + criteriaBuilder.equal(relationshipRoot.get(Relationship_.relationshipType), relationshipType), + getRightItemPredicate(criteriaBuilder, relationshipRoot, item, false, excludeNonLatest) + ); } + return count(context, criteriaQuery, criteriaBuilder, relationshipRoot); } diff --git a/dspace-api/src/main/java/org/dspace/content/dao/pojo/ItemUuidAndRelationshipId.java b/dspace-api/src/main/java/org/dspace/content/dao/pojo/ItemUuidAndRelationshipId.java new file mode 100644 index 0000000000..6668b0d211 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/content/dao/pojo/ItemUuidAndRelationshipId.java @@ -0,0 +1,37 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.content.dao.pojo; + +import java.util.UUID; + +import org.dspace.content.Relationship; +import org.dspace.content.dao.RelationshipDAO; +import org.springframework.lang.NonNull; + +/** + * Used by {@link RelationshipDAO#findByLatestItemAndRelationshipType} to avoid creating {@link Relationship}s. + */ +public class ItemUuidAndRelationshipId { + + private final UUID itemUuid; + private final int relationshipId; + + public ItemUuidAndRelationshipId(@NonNull UUID itemUuid, @NonNull int relationshipId) { + this.itemUuid = itemUuid; + this.relationshipId = relationshipId; + } + + public UUID getItemUuid() { + return this.itemUuid; + } + + public int getRelationshipId() { + return this.relationshipId; + } + +} diff --git a/dspace-api/src/main/java/org/dspace/content/logic/DefaultFilter.java b/dspace-api/src/main/java/org/dspace/content/logic/DefaultFilter.java index c0649e9ea2..490c3949ea 100644 --- a/dspace-api/src/main/java/org/dspace/content/logic/DefaultFilter.java +++ b/dspace-api/src/main/java/org/dspace/content/logic/DefaultFilter.java @@ -7,7 +7,8 @@ */ package org.dspace.content.logic; -import org.apache.log4j.Logger; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; import org.dspace.content.Item; import org.dspace.core.Context; @@ -21,7 +22,7 @@ import org.dspace.core.Context; */ public class DefaultFilter implements Filter { private LogicalStatement statement; - private static Logger log = Logger.getLogger(Filter.class); + private final static Logger log = LogManager.getLogger(); /** * Set statement from Spring configuration in item-filters.xml diff --git a/dspace-api/src/main/java/org/dspace/content/logic/condition/AbstractCondition.java b/dspace-api/src/main/java/org/dspace/content/logic/condition/AbstractCondition.java index 7a87e13066..0202243265 100644 --- a/dspace-api/src/main/java/org/dspace/content/logic/condition/AbstractCondition.java +++ b/dspace-api/src/main/java/org/dspace/content/logic/condition/AbstractCondition.java @@ -12,7 +12,6 @@ import java.util.Map; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.dspace.content.Item; -import org.dspace.content.factory.ContentServiceFactory; import org.dspace.content.logic.LogicalStatementException; import org.dspace.content.service.CollectionService; import org.dspace.content.service.ItemService; @@ -32,10 +31,10 @@ public abstract class AbstractCondition implements Condition { private Map parameters; // Declare and instantiate spring services - //@Autowired(required = true) - protected ItemService itemService = ContentServiceFactory.getInstance().getItemService(); - //@Autowired(required = true) - protected CollectionService collectionService = ContentServiceFactory.getInstance().getCollectionService(); + @Autowired(required = true) + protected ItemService itemService; + @Autowired(required = true) + protected CollectionService collectionService; @Autowired(required = true) protected HandleService handleService; diff --git a/dspace-api/src/main/java/org/dspace/content/logic/condition/InCommunityCondition.java b/dspace-api/src/main/java/org/dspace/content/logic/condition/InCommunityCondition.java index b9c1d15d2a..9f588f9c3b 100644 --- a/dspace-api/src/main/java/org/dspace/content/logic/condition/InCommunityCondition.java +++ b/dspace-api/src/main/java/org/dspace/content/logic/condition/InCommunityCondition.java @@ -10,7 +10,8 @@ package org.dspace.content.logic.condition; import java.sql.SQLException; import java.util.List; -import org.apache.log4j.Logger; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; import org.dspace.content.Collection; import org.dspace.content.Community; import org.dspace.content.DSpaceObject; @@ -26,7 +27,7 @@ import org.dspace.core.Context; * @version $Revision$ */ public class InCommunityCondition extends AbstractCondition { - private static Logger log = Logger.getLogger(InCommunityCondition.class); + private final static Logger log = LogManager.getLogger(); /** * Return true if item is in one of the specified collections diff --git a/dspace-api/src/main/java/org/dspace/content/logic/condition/IsWithdrawnCondition.java b/dspace-api/src/main/java/org/dspace/content/logic/condition/IsWithdrawnCondition.java index 6475ef09e2..6424e6f35f 100644 --- a/dspace-api/src/main/java/org/dspace/content/logic/condition/IsWithdrawnCondition.java +++ b/dspace-api/src/main/java/org/dspace/content/logic/condition/IsWithdrawnCondition.java @@ -7,7 +7,8 @@ */ package org.dspace.content.logic.condition; -import org.apache.log4j.Logger; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; import org.dspace.content.Item; import org.dspace.content.logic.LogicalStatementException; import org.dspace.core.Context; @@ -19,7 +20,7 @@ import org.dspace.core.Context; * @version $Revision$ */ public class IsWithdrawnCondition extends AbstractCondition { - private static Logger log = Logger.getLogger(IsWithdrawnCondition.class); + private final static Logger log = LogManager.getLogger(); /** * Return true if item is withdrawn diff --git a/dspace-api/src/main/java/org/dspace/content/logic/condition/MetadataValueMatchCondition.java b/dspace-api/src/main/java/org/dspace/content/logic/condition/MetadataValueMatchCondition.java index d9c774485a..4e30c75a2a 100644 --- a/dspace-api/src/main/java/org/dspace/content/logic/condition/MetadataValueMatchCondition.java +++ b/dspace-api/src/main/java/org/dspace/content/logic/condition/MetadataValueMatchCondition.java @@ -11,7 +11,8 @@ import java.util.List; import java.util.regex.Matcher; import java.util.regex.Pattern; -import org.apache.log4j.Logger; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; import org.dspace.content.Item; import org.dspace.content.MetadataValue; import org.dspace.content.logic.LogicalStatementException; @@ -26,7 +27,7 @@ import org.dspace.core.Context; */ public class MetadataValueMatchCondition extends AbstractCondition { - private static Logger log = Logger.getLogger(MetadataValueMatchCondition.class); + private final static Logger log = LogManager.getLogger(); /** * Return true if any value for a specified field in the item matches a specified regex pattern diff --git a/dspace-api/src/main/java/org/dspace/content/logic/condition/MetadataValuesMatchCondition.java b/dspace-api/src/main/java/org/dspace/content/logic/condition/MetadataValuesMatchCondition.java index df9cbfbf1d..74ccfa4ca8 100644 --- a/dspace-api/src/main/java/org/dspace/content/logic/condition/MetadataValuesMatchCondition.java +++ b/dspace-api/src/main/java/org/dspace/content/logic/condition/MetadataValuesMatchCondition.java @@ -11,7 +11,8 @@ import java.util.List; import java.util.regex.Matcher; import java.util.regex.Pattern; -import org.apache.log4j.Logger; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; import org.dspace.content.Item; import org.dspace.content.MetadataValue; import org.dspace.content.logic.LogicalStatementException; @@ -26,7 +27,7 @@ import org.dspace.core.Context; */ public class MetadataValuesMatchCondition extends AbstractCondition { - private static Logger log = Logger.getLogger(MetadataValuesMatchCondition.class); + private final static Logger log = LogManager.getLogger(); /** * Return true if any value for a specified field in the item matches any of the specified regex patterns diff --git a/dspace-api/src/main/java/org/dspace/content/logic/condition/ReadableByGroupCondition.java b/dspace-api/src/main/java/org/dspace/content/logic/condition/ReadableByGroupCondition.java index e76772803c..65f9925222 100644 --- a/dspace-api/src/main/java/org/dspace/content/logic/condition/ReadableByGroupCondition.java +++ b/dspace-api/src/main/java/org/dspace/content/logic/condition/ReadableByGroupCondition.java @@ -10,7 +10,8 @@ package org.dspace.content.logic.condition; import java.sql.SQLException; import java.util.List; -import org.apache.log4j.Logger; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; import org.dspace.authorize.ResourcePolicy; import org.dspace.authorize.factory.AuthorizeServiceFactory; import org.dspace.authorize.service.AuthorizeService; @@ -27,7 +28,7 @@ import org.dspace.core.Context; * @version $Revision$ */ public class ReadableByGroupCondition extends AbstractCondition { - private static Logger log = Logger.getLogger(ReadableByGroupCondition.class); + private final static Logger log = LogManager.getLogger(); // Authorize service AuthorizeService authorizeService = AuthorizeServiceFactory.getInstance().getAuthorizeService(); diff --git a/dspace-api/src/main/java/org/dspace/content/packager/AbstractMETSDisseminator.java b/dspace-api/src/main/java/org/dspace/content/packager/AbstractMETSDisseminator.java index 471b9ba27c..685fd9000d 100644 --- a/dspace-api/src/main/java/org/dspace/content/packager/AbstractMETSDisseminator.java +++ b/dspace-api/src/main/java/org/dspace/content/packager/AbstractMETSDisseminator.java @@ -14,9 +14,7 @@ import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; -import java.io.UnsupportedEncodingException; import java.lang.reflect.InvocationTargetException; -import java.net.URLEncoder; import java.sql.SQLException; import java.util.HashMap; import java.util.Iterator; @@ -83,10 +81,10 @@ import org.dspace.license.factory.LicenseServiceFactory; import org.dspace.license.service.CreativeCommonsService; import org.dspace.services.ConfigurationService; import org.dspace.services.factory.DSpaceServicesFactory; -import org.jdom.Element; -import org.jdom.Namespace; -import org.jdom.output.Format; -import org.jdom.output.XMLOutputter; +import org.jdom2.Element; +import org.jdom2.Namespace; +import org.jdom2.output.Format; +import org.jdom2.output.XMLOutputter; /** * Base class for disseminator of @@ -328,45 +326,43 @@ public abstract class AbstractMETSDisseminator Mets manifest = makeManifest(context, dso, params, extraStreams); // copy extra (metadata, license, etc) bitstreams into zip, update manifest - if (extraStreams != null) { - for (Map.Entry ment : extraStreams.getMap().entrySet()) { - MdRef ref = ment.getKey(); + for (Map.Entry ment : extraStreams.getMap().entrySet()) { + MdRef ref = ment.getKey(); - // Both Deposit Licenses & CC Licenses which are referenced as "extra streams" may already be - // included in our Package (if their bundles are already included in the section of manifest). - // So, do a special check to see if we need to link up extra License entries to the bitstream - // in the . - // (this ensures that we don't accidentally add the same License file to our package twice) - linkLicenseRefsToBitstreams(context, params, dso, ref); + // Both Deposit Licenses & CC Licenses which are referenced as "extra streams" may already be + // included in our Package (if their bundles are already included in the section of manifest). + // So, do a special check to see if we need to link up extra License entries to the bitstream + // in the . + // (this ensures that we don't accidentally add the same License file to our package twice) + linkLicenseRefsToBitstreams(context, params, dso, ref); - //If this 'mdRef' is NOT already linked up to a file in the package, - // then its file must be missing. So, we are going to add a new - // file to the Zip package. - if (ref.getXlinkHref() == null || ref.getXlinkHref().isEmpty()) { - InputStream is = ment.getValue(); + //If this 'mdRef' is NOT already linked up to a file in the package, + // then its file must be missing. So, we are going to add a new + // file to the Zip package. + if (ref.getXlinkHref() == null || ref.getXlinkHref().isEmpty()) { + InputStream is = ment.getValue(); - // create a hopefully unique filename within the Zip - String fname = gensym("metadata"); - // link up this 'mdRef' to point to that file - ref.setXlinkHref(fname); - if (log.isDebugEnabled()) { - log.debug("Writing EXTRA stream to Zip: " + fname); - } - //actually add the file to the Zip package - ZipEntry ze = new ZipEntry(fname); - if (lmTime != 0) { - ze.setTime(lmTime); - } else { - // Set a default modified date so that checksum of Zip doesn't change if Zip contents are - // unchanged - ze.setTime(DEFAULT_MODIFIED_DATE); - } - zip.putNextEntry(ze); - Utils.copy(is, zip); - zip.closeEntry(); - - is.close(); + // create a hopefully unique filename within the Zip + String fname = gensym("metadata"); + // link up this 'mdRef' to point to that file + ref.setXlinkHref(fname); + if (log.isDebugEnabled()) { + log.debug("Writing EXTRA stream to Zip: " + fname); } + //actually add the file to the Zip package + ZipEntry ze = new ZipEntry(fname); + if (lmTime != 0) { + ze.setTime(lmTime); + } else { + // Set a default modified date so that checksum of Zip doesn't change if Zip contents are + // unchanged + ze.setTime(DEFAULT_MODIFIED_DATE); + } + zip.putNextEntry(ze); + Utils.copy(is, zip); + zip.closeEntry(); + + is.close(); } } @@ -467,17 +463,17 @@ public abstract class AbstractMETSDisseminator Utils.copy(input, zip); input.close(); } else { - log.warn("Adding zero-length file for Bitstream, SID=" - + String.valueOf(bitstream.getSequenceID()) + log.warn("Adding zero-length file for Bitstream, uuid=" + + String.valueOf(bitstream.getID()) + ", not authorized for READ."); } zip.closeEntry(); } else if (unauth != null && unauth.equalsIgnoreCase("skip")) { - log.warn("Skipping Bitstream, SID=" + String - .valueOf(bitstream.getSequenceID()) + ", not authorized for READ."); + log.warn("Skipping Bitstream, uuid=" + String + .valueOf(bitstream.getID()) + ", not authorized for READ."); } else { throw new AuthorizeException( - "Not authorized to read Bitstream, SID=" + String.valueOf(bitstream.getSequenceID())); + "Not authorized to read Bitstream, uuid=" + String.valueOf(bitstream.getID())); } } } @@ -898,12 +894,12 @@ public abstract class AbstractMETSDisseminator continue; } else if (!(unauth != null && unauth.equalsIgnoreCase("zero"))) { throw new AuthorizeException( - "Not authorized to read Bitstream, SID=" + String.valueOf(bitstream.getSequenceID())); + "Not authorized to read Bitstream, uuid=" + String.valueOf(bitstream.getID())); } } - String sid = String.valueOf(bitstream.getSequenceID()); - String fileID = bitstreamIDstart + sid; + String uuid = String.valueOf(bitstream.getID()); + String fileID = bitstreamIDstart + uuid; edu.harvard.hul.ois.mets.File file = new edu.harvard.hul.ois.mets.File(); file.setID(fileID); file.setSEQ(bitstream.getSequenceID()); @@ -926,7 +922,7 @@ public abstract class AbstractMETSDisseminator * extracted text or a thumbnail, so we use the name to work * out which bitstream to be in the same group as */ - String groupID = "GROUP_" + bitstreamIDstart + sid; + String groupID = "GROUP_" + bitstreamIDstart + uuid; if ((bundle.getName() != null) && (bundle.getName().equals("THUMBNAIL") || bundle.getName().startsWith("TEXT"))) { @@ -936,7 +932,7 @@ public abstract class AbstractMETSDisseminator bitstream); if (original != null) { groupID = "GROUP_" + bitstreamIDstart - + original.getSequenceID(); + + String.valueOf(original.getID()); } } file.setGROUPID(groupID); @@ -1405,7 +1401,7 @@ public abstract class AbstractMETSDisseminator // if bare manifest, use external "persistent" URI for bitstreams if (params != null && (params.getBooleanProperty("manifestOnly", false))) { // Try to build a persistent(-ish) URI for bitstream - // Format: {site-base-url}/bitstream/{item-handle}/{sequence-id}/{bitstream-name} + // Format: {site-ui-url}/bitstreams/{bitstream-uuid} try { // get handle of parent Item of this bitstream, if there is one: String handle = null; @@ -1416,26 +1412,13 @@ public abstract class AbstractMETSDisseminator handle = bi.get(0).getHandle(); } } - if (handle != null) { - return configurationService - .getProperty("dspace.ui.url") - + "/bitstream/" - + handle - + "/" - + String.valueOf(bitstream.getSequenceID()) - + "/" - + URLEncoder.encode(bitstream.getName(), "UTF-8"); - } else { //no Handle assigned, so persistent(-ish) URI for bitstream is - // Format: {site-base-url}/retrieve/{bitstream-internal-id} - return configurationService - .getProperty("dspace.ui.url") - + "/retrieve/" - + String.valueOf(bitstream.getID()); - } + return configurationService + .getProperty("dspace.ui.url") + + "/bitstreams/" + + String.valueOf(bitstream.getID()) + + "/download"; } catch (SQLException e) { log.error("Database problem", e); - } catch (UnsupportedEncodingException e) { - log.error("Unknown character set", e); } // We should only get here if we failed to build a nice URL above diff --git a/dspace-api/src/main/java/org/dspace/content/packager/AbstractMETSIngester.java b/dspace-api/src/main/java/org/dspace/content/packager/AbstractMETSIngester.java index 9a7fffdec5..98277c4f9c 100644 --- a/dspace-api/src/main/java/org/dspace/content/packager/AbstractMETSIngester.java +++ b/dspace-api/src/main/java/org/dspace/content/packager/AbstractMETSIngester.java @@ -51,7 +51,7 @@ import org.dspace.services.ConfigurationService; import org.dspace.services.factory.DSpaceServicesFactory; import org.dspace.workflow.WorkflowException; import org.dspace.workflow.factory.WorkflowServiceFactory; -import org.jdom.Element; +import org.jdom2.Element; /** * Base class for package ingester of METS (Metadata Encoding and Transmission diff --git a/dspace-api/src/main/java/org/dspace/content/packager/DSpaceAIPIngester.java b/dspace-api/src/main/java/org/dspace/content/packager/DSpaceAIPIngester.java index 954a68bfc1..e7be7ab511 100644 --- a/dspace-api/src/main/java/org/dspace/content/packager/DSpaceAIPIngester.java +++ b/dspace-api/src/main/java/org/dspace/content/packager/DSpaceAIPIngester.java @@ -20,7 +20,7 @@ import org.dspace.content.crosswalk.CrosswalkException; import org.dspace.content.crosswalk.MetadataValidationException; import org.dspace.core.Constants; import org.dspace.core.Context; -import org.jdom.Element; +import org.jdom2.Element; /** * Subclass of the METS packager framework to ingest a DSpace diff --git a/dspace-api/src/main/java/org/dspace/content/packager/DSpaceMETSIngester.java b/dspace-api/src/main/java/org/dspace/content/packager/DSpaceMETSIngester.java index da3965534f..380764268c 100644 --- a/dspace-api/src/main/java/org/dspace/content/packager/DSpaceMETSIngester.java +++ b/dspace-api/src/main/java/org/dspace/content/packager/DSpaceMETSIngester.java @@ -23,7 +23,7 @@ import org.dspace.core.Constants; import org.dspace.core.Context; import org.dspace.core.factory.CoreServiceFactory; import org.dspace.core.service.PluginService; -import org.jdom.Element; +import org.jdom2.Element; /** * Packager plugin to ingest a diff --git a/dspace-api/src/main/java/org/dspace/content/packager/METSManifest.java b/dspace-api/src/main/java/org/dspace/content/packager/METSManifest.java index 8fb8172aeb..3399bdf0f0 100644 --- a/dspace-api/src/main/java/org/dspace/content/packager/METSManifest.java +++ b/dspace-api/src/main/java/org/dspace/content/packager/METSManifest.java @@ -35,15 +35,17 @@ import org.dspace.core.Context; import org.dspace.core.factory.CoreServiceFactory; import org.dspace.services.ConfigurationService; import org.dspace.services.factory.DSpaceServicesFactory; -import org.jdom.Content; -import org.jdom.Document; -import org.jdom.Element; -import org.jdom.JDOMException; -import org.jdom.Namespace; -import org.jdom.input.SAXBuilder; -import org.jdom.output.Format; -import org.jdom.output.XMLOutputter; -import org.jdom.xpath.XPath; +import org.jdom2.Content; +import org.jdom2.Document; +import org.jdom2.Element; +import org.jdom2.JDOMException; +import org.jdom2.Namespace; +import org.jdom2.filter.Filters; +import org.jdom2.input.SAXBuilder; +import org.jdom2.output.Format; +import org.jdom2.output.XMLOutputter; +import org.jdom2.xpath.XPathExpression; +import org.jdom2.xpath.XPathFactory; /** *

    @@ -382,15 +384,12 @@ public class METSManifest { public List getMdFiles() throws MetadataValidationException { if (mdFiles == null) { - try { - // Use a special namespace with known prefix - // so we get the right prefix. - XPath xpath = XPath.newInstance("descendant::mets:mdRef"); - xpath.addNamespace(metsNS); - mdFiles = xpath.selectNodes(mets); - } catch (JDOMException je) { - throw new MetadataValidationException("Failed while searching for mdRef elements in manifest: ", je); - } + // Use a special namespace with known prefix + // so we get the right prefix. + XPathExpression xpath = + XPathFactory.instance() + .compile("descendant::mets:mdRef", Filters.element(), null, metsNS); + mdFiles = xpath.evaluate(mets); } return mdFiles; } @@ -414,25 +413,22 @@ public class METSManifest { return null; } - try { - XPath xpath = XPath.newInstance( - "mets:fileSec/mets:fileGrp[@USE=\"CONTENT\"]/mets:file[@GROUPID=\"" + groupID + "\"]"); - xpath.addNamespace(metsNS); - List oFiles = xpath.selectNodes(mets); - if (oFiles.size() > 0) { - if (log.isDebugEnabled()) { - log.debug("Got ORIGINAL file for derived=" + file.toString()); - } - Element flocat = ((Element) oFiles.get(0)).getChild("FLocat", metsNS); - if (flocat != null) { - return flocat.getAttributeValue("href", xlinkNS); - } + XPathExpression xpath = + XPathFactory.instance() + .compile( + "mets:fileSec/mets:fileGrp[@USE=\"CONTENT\"]/mets:file[@GROUPID=\"" + groupID + "\"]", + Filters.element(), null, metsNS); + List oFiles = xpath.evaluate(mets); + if (oFiles.size() > 0) { + if (log.isDebugEnabled()) { + log.debug("Got ORIGINAL file for derived=" + file.toString()); + } + Element flocat = oFiles.get(0).getChild("FLocat", metsNS); + if (flocat != null) { + return flocat.getAttributeValue("href", xlinkNS); } - return null; - } catch (JDOMException je) { - log.warn("Got exception on XPATH looking for Original file, " + je.toString()); - return null; } + return null; } // translate bundle name from METS to DSpace; METS may be "CONTENT" @@ -888,20 +884,16 @@ public class METSManifest { // use only when path varies each time you call it. protected Element getElementByXPath(String path, boolean nullOk) throws MetadataValidationException { - try { - XPath xpath = XPath.newInstance(path); - xpath.addNamespace(metsNS); - xpath.addNamespace(xlinkNS); - Object result = xpath.selectSingleNode(mets); - if (result == null && nullOk) { - return null; - } else if (result instanceof Element) { - return (Element) result; - } else { - throw new MetadataValidationException("METSManifest: Failed to resolve XPath, path=\"" + path + "\""); - } - } catch (JDOMException je) { - throw new MetadataValidationException("METSManifest: Failed to resolve XPath, path=\"" + path + "\"", je); + XPathExpression xpath = + XPathFactory.instance() + .compile(path, Filters.element(), null, metsNS, xlinkNS); + Element result = xpath.evaluateFirst(mets); + if (result == null && nullOk) { + return null; + } else if (result == null && !nullOk) { + throw new MetadataValidationException("METSManifest: Failed to resolve XPath, path=\"" + path + "\""); + } else { + return result; } } diff --git a/dspace-api/src/main/java/org/dspace/content/packager/RoleDisseminator.java b/dspace-api/src/main/java/org/dspace/content/packager/RoleDisseminator.java index 8643f60f6c..f627779af8 100644 --- a/dspace-api/src/main/java/org/dspace/content/packager/RoleDisseminator.java +++ b/dspace-api/src/main/java/org/dspace/content/packager/RoleDisseminator.java @@ -35,7 +35,7 @@ import org.dspace.eperson.PasswordHash; import org.dspace.eperson.factory.EPersonServiceFactory; import org.dspace.eperson.service.EPersonService; import org.dspace.eperson.service.GroupService; -import org.jdom.Namespace; +import org.jdom2.Namespace; /** * Plugin to export all Group and EPerson objects in XML, perhaps for reloading. diff --git a/dspace-api/src/main/java/org/dspace/content/service/CollectionService.java b/dspace-api/src/main/java/org/dspace/content/service/CollectionService.java index 522bdac224..a5b2b7d8d8 100644 --- a/dspace-api/src/main/java/org/dspace/content/service/CollectionService.java +++ b/dspace-api/src/main/java/org/dspace/content/service/CollectionService.java @@ -33,6 +33,11 @@ import org.dspace.eperson.Group; public interface CollectionService extends DSpaceObjectService, DSpaceObjectLegacySupportService { + /* + * Field used to sort community and collection lists at solr + */ + public static final String SOLR_SORT_FIELD = "dc.title_sort"; + /** * Create a new collection with a new ID. * Once created the collection is added to the given community @@ -46,7 +51,6 @@ public interface CollectionService public Collection create(Context context, Community community) throws SQLException, AuthorizeException; - /** * Create a new collection with the supplied handle and with a new ID. * Once created the collection is added to the given community diff --git a/dspace-api/src/main/java/org/dspace/content/service/ItemService.java b/dspace-api/src/main/java/org/dspace/content/service/ItemService.java index d5e2f67767..8b7badf223 100644 --- a/dspace-api/src/main/java/org/dspace/content/service/ItemService.java +++ b/dspace-api/src/main/java/org/dspace/content/service/ItemService.java @@ -21,6 +21,7 @@ import org.dspace.content.Bitstream; import org.dspace.content.Bundle; import org.dspace.content.Collection; import org.dspace.content.Community; +import org.dspace.content.EntityType; import org.dspace.content.Item; import org.dspace.content.MetadataField; import org.dspace.content.MetadataValue; @@ -111,8 +112,22 @@ public interface ItemService * @return an iterator over the items in the archive. * @throws SQLException if database error */ + @Deprecated public Iterator findAllUnfiltered(Context context) throws SQLException; + /** + * Find all items that are: + * - NOT in the workspace + * - NOT in the workflow + * - NOT a template item for e.g. a collection + * + * This implies that the result also contains older versions of items and withdrawn items. + * @param context the DSpace context. + * @return iterator over all regular items. + * @throws SQLException if database error. + */ + public Iterator findAllRegularItems(Context context) throws SQLException; + /** * Find all the items in the archive by a given submitter. The order is * indeterminate. Only items with the "in archive" flag set are included. @@ -579,6 +594,37 @@ public interface ItemService */ public boolean canCreateNewVersion(Context context, Item item) throws SQLException; + /** + * Returns an iterator of in archive items possessing the passed metadata field, or only + * those matching the passed value, if value is not Item.ANY + * + * @param context DSpace context object + * @param schema metadata field schema + * @param element metadata field element + * @param qualifier metadata field qualifier + * @param value field value or Item.ANY to match any value + * @return an iterator over the items matching that authority value + * @throws SQLException if database error + * @throws AuthorizeException if authorization error + */ + public Iterator findArchivedByMetadataField(Context context, String schema, + String element, String qualifier, + String value) throws SQLException, AuthorizeException; + + /** + * Returns an iterator of in archive items possessing the passed metadata field, or only + * those matching the passed value, if value is not Item.ANY + * + * @param context DSpace context object + * @param metadataField metadata + * @param value field value or Item.ANY to match any value + * @return an iterator over the items matching that authority value + * @throws SQLException if database error + * @throws AuthorizeException if authorization error + */ + public Iterator findArchivedByMetadataField(Context context, String metadataField, String value) + throws SQLException, AuthorizeException; + /** * Returns an iterator of Items possessing the passed metadata field, or only * those matching the passed value, if value is not Item.ANY @@ -618,7 +664,7 @@ public interface ItemService */ public Iterator findByAuthorityValue(Context context, String schema, String element, String qualifier, String value) - throws SQLException, AuthorizeException, IOException; + throws SQLException, AuthorizeException; public Iterator findByMetadataFieldAuthority(Context context, String mdString, String authority) @@ -783,4 +829,19 @@ public interface ItemService public List getMetadata(Item item, String schema, String element, String qualifier, String lang, boolean enableVirtualMetadata); + /** + * Retrieve the label of the entity type of the given item. + * @param item the item. + * @return the label of the entity type, taken from the item metadata, or null if not found. + */ + public String getEntityTypeLabel(Item item); + + /** + * Retrieve the entity type of the given item. + * @param context the DSpace context. + * @param item the item. + * @return the entity type of the given item, or null if not found. + */ + public EntityType getEntityType(Context context, Item item) throws SQLException; + } diff --git a/dspace-api/src/main/java/org/dspace/content/service/RelationshipService.java b/dspace-api/src/main/java/org/dspace/content/service/RelationshipService.java index 2e0bb6f2be..719f966e46 100644 --- a/dspace-api/src/main/java/org/dspace/content/service/RelationshipService.java +++ b/dspace-api/src/main/java/org/dspace/content/service/RelationshipService.java @@ -14,7 +14,9 @@ import java.util.UUID; import org.dspace.authorize.AuthorizeException; import org.dspace.content.Item; import org.dspace.content.Relationship; +import org.dspace.content.Relationship.LatestVersionStatus; import org.dspace.content.RelationshipType; +import org.dspace.content.dao.pojo.ItemUuidAndRelationshipId; import org.dspace.core.Context; import org.dspace.service.DSpaceCRUDService; @@ -49,6 +51,25 @@ public interface RelationshipService extends DSpaceCRUDService { List findByItem(Context context, Item item, Integer limit, Integer offset, boolean excludeTilted) throws SQLException; + /** + * Retrieves the list of Relationships currently in the system for which the given Item is either + * a leftItem or a rightItem object + * @param context The relevant DSpace context + * @param item The Item that has to be the left or right item for the relationship to be + * included in the list + * @param limit paging limit + * @param offset paging offset + * @param excludeTilted If true, excludes tilted relationships + * @param excludeNonLatest If true, excludes all relationships for which the other item has a more recent version + * that is relevant for this relationship + * @return The list of relationships for which each relationship adheres to the above + * listed constraint + * @throws SQLException If something goes wrong + */ + List findByItem( + Context context, Item item, Integer limit, Integer offset, boolean excludeTilted, boolean excludeNonLatest + ) throws SQLException; + /** * Retrieves the full list of relationships currently in the system * @param context The relevant DSpace context @@ -79,30 +100,54 @@ public interface RelationshipService extends DSpaceCRUDService { public Relationship create(Context context, Relationship relationship) throws SQLException, AuthorizeException; /** - * This method returns the next leftplace integer to use for a relationship with this item as the leftItem + * Move the given relationship to a new leftPlace and/or rightPlace. * - * @param context The relevant DSpace context - * @param item The item that has to be the leftItem of a relationship for it to qualify - * @return The next integer to be used for the leftplace of a relationship with the given item - * as a left item - * @throws SQLException If something goes wrong + * This will + * 1. verify whether the move is authorized + * 2. move the relationship to the specified left/right place + * 3. update the left/right place of other relationships and/or metadata in order to resolve the move without + * leaving any gaps + * + * At least one of the new places should be non-null, otherwise no changes will be made. + * + * @param context The relevant DSpace context + * @param relationship The Relationship to move + * @param newLeftPlace The value to set the leftPlace of this Relationship to + * @param newRightPlace The value to set the rightPlace of this Relationship to + * @return The moved relationship with updated place variables + * @throws SQLException If something goes wrong + * @throws AuthorizeException If the user is not authorized to update the Relationship or its Items */ - int findNextLeftPlaceByLeftItem(Context context, Item item) throws SQLException; + Relationship move(Context context, Relationship relationship, Integer newLeftPlace, Integer newRightPlace) + throws SQLException, AuthorizeException; /** - * This method returns the next rightplace integer to use for a relationship with this item as the rightItem + * Move the given relationship to a new leftItem and/or rightItem. * - * @param context The relevant DSpace context - * @param item The item that has to be the rightitem of a relationship for it to qualify - * @return The next integer to be used for the rightplace of a relationship with the given item - * as a right item - * @throws SQLException If something goes wrong + * This will + * 1. move the relationship to the last place in its current left or right Item. This ensures that we don't leave + * any gaps when moving the relationship to a new Item. + * If only one of the relationship's Items is changed,the order of relationships and metadatain the other + * will not be affected + * 2. insert the relationship into the new Item(s) + * + * At least one of the new Items should be non-null, otherwise no changes will be made. + * + * @param context The relevant DSpace context + * @param relationship The Relationship to move + * @param newLeftItem The value to set the leftItem of this Relationship to + * @param newRightItem The value to set the rightItem of this Relationship to + * @return The moved relationship with updated left/right Items variables + * @throws SQLException If something goes wrong + * @throws AuthorizeException If the user is not authorized to update the Relationship or its Items */ - int findNextRightPlaceByRightItem(Context context, Item item) throws SQLException; + Relationship move(Context context, Relationship relationship, Item newLeftItem, Item newRightItem) + throws SQLException, AuthorizeException; /** * This method returns a list of Relationships for which the leftItem or rightItem is equal to the given * Item object and for which the RelationshipType object is equal to the relationshipType property + * NOTE: tilted relationships are NEVER excluded when fetching one relationship type * @param context The relevant DSpace context * @param item The Item object to be matched on the leftItem or rightItem for the relationship * @param relationshipType The RelationshipType object that will be used to check the Relationship on @@ -117,6 +162,7 @@ public interface RelationshipService extends DSpaceCRUDService { /** * This method returns a list of Relationships for which the leftItem or rightItem is equal to the given * Item object and for which the RelationshipType object is equal to the relationshipType property + * NOTE: tilted relationships are NEVER excluded when fetching one relationship type * @param context The relevant DSpace context * @param item The Item object to be matched on the leftItem or rightItem for the relationship * @param relationshipType The RelationshipType object that will be used to check the Relationship on @@ -131,6 +177,24 @@ public interface RelationshipService extends DSpaceCRUDService { /** * This method returns a list of Relationships for which the leftItem or rightItem is equal to the given * Item object and for which the RelationshipType object is equal to the relationshipType property + * NOTE: tilted relationships are NEVER excluded when fetching one relationship type + * @param context The relevant DSpace context + * @param item The Item object to be matched on the leftItem or rightItem for the relationship + * @param relationshipType The RelationshipType object that will be used to check the Relationship on + * @param excludeNonLatest If true, excludes all relationships for which the other item has a more recent version + * that is relevant for this relationship + * @return The list of Relationship objects that have the given Item object as leftItem or rightItem and + * for which the relationshipType property is equal to the given RelationshipType + * @throws SQLException If something goes wrong + */ + public List findByItemAndRelationshipType( + Context context, Item item, RelationshipType relationshipType, int limit, int offset, boolean excludeNonLatest + ) throws SQLException; + + /** + * This method returns a list of Relationships for which the leftItem or rightItem is equal to the given + * Item object and for which the RelationshipType object is equal to the relationshipType property + * NOTE: tilted relationships are NEVER excluded when fetching one relationship type * @param context The relevant DSpace context * @param item The Item object to be matched on the leftItem or rightItem for the relationship * @param relationshipType The RelationshipType object that will be used to check the Relationship on @@ -145,17 +209,51 @@ public interface RelationshipService extends DSpaceCRUDService { throws SQLException; /** - * This method will update the place for the Relationship and all other relationships found by the items and - * relationship type of the given Relationship. It will give this Relationship the last place in both the - * left and right place determined by querying for the list of leftRelationships and rightRelationships - * by the leftItem, rightItem and relationshipType of the given Relationship. - * @param context The relevant DSpace context - * @param relationship The Relationship object that will have it's place updated and that will be used - * to retrieve the other relationships whose place might need to be updated - * @throws SQLException If something goes wrong + * This method returns a list of Relationships for which the leftItem or rightItem is equal to the given + * Item object and for which the RelationshipType object is equal to the relationshipType property + * NOTE: tilted relationships are NEVER excluded when fetching one relationship type + * @param context The relevant DSpace context + * @param item The Item object to be matched on the leftItem or rightItem for the relationship + * @param relationshipType The RelationshipType object that will be used to check the Relationship on + * @param isLeft Is the item left or right + * @param excludeNonLatest If true, excludes all relationships for which the other item has a more recent version + * that is relevant for this relationship + * @return The list of Relationship objects that have the given Item object as leftItem or rightItem and + * for which the relationshipType property is equal to the given RelationshipType + * @throws SQLException If something goes wrong */ - public void updatePlaceInRelationship(Context context, Relationship relationship) - throws SQLException, AuthorizeException; + public List findByItemAndRelationshipType( + Context context, Item item, RelationshipType relationshipType, boolean isLeft, int limit, int offset, + boolean excludeNonLatest + ) throws SQLException; + + /** + * This method returns the UUIDs of all items that have a relationship with the given item, from the perspective + * of the other item. In other words, given a relationship with the given item, the given item should have + * "latest status" in order for the other item uuid to be returned. + * + * This method differs from the "excludeNonLatest" property in other methods, + * because in this method the current item should have "latest status" to return the other item, + * whereas with "excludeNonLatest" the other item should have "latest status" to be returned. + * + * This method is used to index items in solr; when searching for related items of one of the returned uuids, + * the given item should appear as a search result. + * + * NOTE: This method does not return {@link Relationship}s for performance, because doing so would eagerly fetch + * the items on both sides, which is unnecessary. + * NOTE: tilted relationships are NEVER excluded when fetching one relationship type. + * @param context the DSpace context. + * @param latestItem the target item; only relationships where this item has "latest status" should be considered. + * @param relationshipType the relationship type for which relationships should be selected. + * @param isLeft whether the entity type of the item occurs on the left or right side of the relationship type. + * This is redundant in most cases, but necessary because relationship types my have + * the same entity type on both sides. + * @return a list containing pairs of relationship ids and item uuids. + * @throws SQLException if something goes wrong. + */ + public List findByLatestItemAndRelationshipType( + Context context, Item latestItem, RelationshipType relationshipType, boolean isLeft + ) throws SQLException; /** * This method will update the given item's metadata order. @@ -174,6 +272,7 @@ public interface RelationshipService extends DSpaceCRUDService { /** * This method returns a list of Relationship objects for which the relationshipType property is equal to the given * RelationshipType object + * NOTE: tilted relationships are NEVER excluded when fetching one relationship type * @param context The relevant DSpace context * @param relationshipType The RelationshipType object that will be used to check the Relationship on * @return The list of Relationship objects for which the given RelationshipType object is equal @@ -185,6 +284,7 @@ public interface RelationshipService extends DSpaceCRUDService { /** * This method returns a list of Relationship objets for which the relationshipType property is equal to the given * RelationshipType object + * NOTE: tilted relationships are NEVER excluded when fetching one relationship type * @param context The relevant DSpace context * @param relationshipType The RelationshipType object that will be used to check the Relationship on * @param limit paging limit @@ -198,6 +298,27 @@ public interface RelationshipService extends DSpaceCRUDService { /** * This method is used to construct a Relationship object with all it's variables + * @param c The relevant DSpace context + * @param leftItem The leftItem Item object for the relationship + * @param rightItem The rightItem Item object for the relationship + * @param relationshipType The RelationshipType object for the relationship + * @param leftPlace The leftPlace integer for the relationship + * @param rightPlace The rightPlace integer for the relationship + * @param leftwardValue The leftwardValue string for the relationship + * @param rightwardValue The rightwardValue string for the relationship + * @param latestVersionStatus The latestVersionStatus value for the relationship + * @return The created Relationship object with the given properties + * @throws AuthorizeException If something goes wrong + * @throws SQLException If something goes wrong + */ + Relationship create( + Context c, Item leftItem, Item rightItem, RelationshipType relationshipType, int leftPlace, int rightPlace, + String leftwardValue, String rightwardValue, LatestVersionStatus latestVersionStatus + ) throws AuthorizeException, SQLException; + + /** + * This method is used to construct a Relationship object with all it's variables, + * except the latest version status * @param c The relevant DSpace context * @param leftItem The leftItem Item object for the relationship * @param rightItem The rightItem Item object for the relationship @@ -210,14 +331,15 @@ public interface RelationshipService extends DSpaceCRUDService { * @throws AuthorizeException If something goes wrong * @throws SQLException If something goes wrong */ - Relationship create(Context c, Item leftItem, Item rightItem, RelationshipType relationshipType, - int leftPlace, int rightPlace, String leftwardValue, String rightwardValue) - throws AuthorizeException, SQLException; + Relationship create( + Context c, Item leftItem, Item rightItem, RelationshipType relationshipType, int leftPlace, int rightPlace, + String leftwardValue, String rightwardValue + ) throws AuthorizeException, SQLException; /** * This method is used to construct a Relationship object with all it's variables, - * except the leftward and rightward labels + * except the leftward label, rightward label and latest version status * @param c The relevant DSpace context * @param leftItem The leftItem Item object for the relationship * @param rightItem The rightItem Item object for the relationship @@ -267,7 +389,7 @@ public interface RelationshipService extends DSpaceCRUDService { /** * Count total number of relationships (rows in relationship table) by a relationship type - * + * NOTE: tilted relationships are NEVER excluded when fetching one relationship type * @param context context * @param relationshipType relationship type to filter by * @return total count @@ -287,10 +409,25 @@ public interface RelationshipService extends DSpaceCRUDService { */ int countByItem(Context context, Item item) throws SQLException; + /** + * This method returns a count of Relationship objects that have the given Item object + * as a leftItem or a rightItem + * @param context The relevant DSpace context + * @param item The item that should be either a leftItem or a rightItem of all + * the Relationship objects in the returned list + * @param excludeTilted if true, excludes tilted relationships + * @param excludeNonLatest if true, exclude relationships for which the opposite item is not the latest version + * that is relevant + * @return The list of Relationship objects that contain either a left or a + * right item that is equal to the given item + * @throws SQLException If something goes wrong + */ + int countByItem(Context context, Item item, boolean excludeTilted, boolean excludeNonLatest) throws SQLException; + /** * Count total number of relationships (rows in relationship table) by a relationship type and a boolean indicating * whether the relationship should contain the item on the left side or not - * + * NOTE: tilted relationships are NEVER excluded when fetching one relationship type * @param context context * @param relationshipType relationship type to filter by * @param isLeft Indicating whether the counted Relationships should have the given Item on the left side or not @@ -300,6 +437,22 @@ public interface RelationshipService extends DSpaceCRUDService { int countByItemAndRelationshipType(Context context, Item item, RelationshipType relationshipType, boolean isLeft) throws SQLException; + /** + * Count total number of relationships (rows in relationship table) by a relationship type and a boolean indicating + * whether the relationship should contain the item on the left side or not + * NOTE: tilted relationships are NEVER excluded when fetching one relationship type + * @param context context + * @param relationshipType relationship type to filter by + * @param isLeft Indicating whether the counted Relationships should have the given Item on the left side + * @param excludeNonLatest If true, excludes all relationships for which the other item has a more recent version + * that is relevant for this relationship + * @return total count with the given parameters + * @throws SQLException if database error + */ + int countByItemAndRelationshipType( + Context context, Item item, RelationshipType relationshipType, boolean isLeft, boolean excludeNonLatest + ) throws SQLException; + /** * Count total number of relationships (rows in relationship table) * by a relationship leftward or rightward typeName diff --git a/dspace-api/src/main/java/org/dspace/core/AbstractHibernateDAO.java b/dspace-api/src/main/java/org/dspace/core/AbstractHibernateDAO.java index 34a04056ce..32ad747d76 100644 --- a/dspace-api/src/main/java/org/dspace/core/AbstractHibernateDAO.java +++ b/dspace-api/src/main/java/org/dspace/core/AbstractHibernateDAO.java @@ -155,12 +155,11 @@ public abstract class AbstractHibernateDAO implements GenericDAO { * @return A list of distinct results as depicted by the CriteriaQuery and parameters * @throws SQLException */ - public List list(Context context, CriteriaQuery criteriaQuery, boolean cacheable, Class clazz, int maxResults, - int offset) throws SQLException { + public List list( + Context context, CriteriaQuery criteriaQuery, boolean cacheable, Class clazz, int maxResults, int offset + ) throws SQLException { criteriaQuery.distinct(true); - @SuppressWarnings("unchecked") - List result = (List) executeCriteriaQuery(context, criteriaQuery, cacheable, maxResults, offset); - return result; + return executeCriteriaQuery(context, criteriaQuery, cacheable, maxResults, offset); } /** @@ -183,12 +182,12 @@ public abstract class AbstractHibernateDAO implements GenericDAO { * @return A list of results determined by the CriteriaQuery and parameters * @throws SQLException */ - public List list(Context context, CriteriaQuery criteriaQuery, boolean cacheable, Class clazz, int maxResults, - int offset, boolean distinct) throws SQLException { + public List list( + Context context, CriteriaQuery criteriaQuery, boolean cacheable, Class clazz, int maxResults, int offset, + boolean distinct + ) throws SQLException { criteriaQuery.distinct(distinct); - @SuppressWarnings("unchecked") - List result = (List) executeCriteriaQuery(context, criteriaQuery, cacheable, maxResults, offset); - return result; + return executeCriteriaQuery(context, criteriaQuery, cacheable, maxResults, offset); } /** diff --git a/dspace-api/src/main/java/org/dspace/core/Context.java b/dspace-api/src/main/java/org/dspace/core/Context.java index f5edcf2eb0..82b39dd2df 100644 --- a/dspace-api/src/main/java/org/dspace/core/Context.java +++ b/dspace-api/src/main/java/org/dspace/core/Context.java @@ -10,6 +10,7 @@ package org.dspace.core; import java.sql.SQLException; import java.util.ArrayList; import java.util.Deque; +import java.util.HashSet; import java.util.LinkedList; import java.util.List; import java.util.Locale; @@ -91,12 +92,12 @@ public class Context implements AutoCloseable { /** * Group IDs of special groups user is a member of */ - private List specialGroups; + private Set specialGroups; /** * Temporary store for the specialGroups when the current user is temporary switched */ - private List specialGroupsPreviousState; + private Set specialGroupsPreviousState; /** * The currently used authentication method @@ -183,7 +184,7 @@ public class Context implements AutoCloseable { extraLogInfo = ""; ignoreAuth = false; - specialGroups = new ArrayList<>(); + specialGroups = new HashSet<>(); authStateChangeHistory = new ConcurrentLinkedDeque<>(); authStateClassCallHistory = new ConcurrentLinkedDeque<>(); @@ -537,6 +538,36 @@ public class Context implements AutoCloseable { } } + /** + * Rollback the current transaction with the database, without persisting any + * pending changes. The database connection is not closed and can be reused + * afterwards. + * + * WARNING: After calling this method all previously fetched entities are + * "detached" (pending changes are not tracked anymore). You have to reload all + * entities you still want to work with manually after this method call (see + * {@link Context#reloadEntity(ReloadableEntity)}). + * + * @throws SQLException When rollbacking the transaction in the database fails. + */ + public void rollback() throws SQLException { + // If Context is no longer open/valid, just note that it has already been closed + if (!isValid()) { + log.info("rollback() was called on a closed Context object. No changes to abort."); + return; + } + + try { + // Rollback ONLY if we have a database transaction, and it is NOT Read Only + if (!isReadOnly() && isTransactionAlive()) { + dbConnection.rollback(); + reloadContextBoundEntities(); + } + } finally { + events = null; + } + } + /** * Close the context, without committing any of the changes performed using * this context. The database connection is freed. No exception is thrown if @@ -656,6 +687,15 @@ public class Context implements AutoCloseable { return myGroups; } + /** + * Get a set of all of the special groups uuids that current user is a member of. + * + * @return list of special groups uuids + */ + public Set getSpecialGroupUuids() { + return CollectionUtils.isEmpty(specialGroups) ? Set.of() : specialGroups; + } + /** * Temporary change the user bound to the context, empty the special groups that * are retained to allow subsequent restore @@ -673,12 +713,12 @@ public class Context implements AutoCloseable { currentUserPreviousState = currentUser; specialGroupsPreviousState = specialGroups; - specialGroups = new ArrayList<>(); + specialGroups = new HashSet<>(); currentUser = newUser; } /** - * Restore the user bound to the context and his special groups + * Restore the user bound to the context and their special groups * * @throws IllegalStateException if no switch was performed before */ @@ -904,4 +944,11 @@ public class Context implements AutoCloseable { public void setAuthenticationMethod(final String authenticationMethod) { this.authenticationMethod = authenticationMethod; } + + /** + * Check if the user of the context is switched. + */ + public boolean isContextUserSwitched() { + return currentUserPreviousState != null; + } } diff --git a/dspace-api/src/main/java/org/dspace/core/Email.java b/dspace-api/src/main/java/org/dspace/core/Email.java index c0d191caf5..64da629bcc 100644 --- a/dspace-api/src/main/java/org/dspace/core/Email.java +++ b/dspace-api/src/main/java/org/dspace/core/Email.java @@ -152,14 +152,14 @@ public class Email { private static final String RESOURCE_REPOSITORY_NAME = "Email"; private static final Properties VELOCITY_PROPERTIES = new Properties(); static { - VELOCITY_PROPERTIES.put(Velocity.RESOURCE_LOADER, "string"); - VELOCITY_PROPERTIES.put("string.resource.loader.description", + VELOCITY_PROPERTIES.put(Velocity.RESOURCE_LOADERS, "string"); + VELOCITY_PROPERTIES.put("resource.loader.string.description", "Velocity StringResource loader"); - VELOCITY_PROPERTIES.put("string.resource.loader.class", + VELOCITY_PROPERTIES.put("resource.loader.string.class", StringResourceLoader.class.getName()); - VELOCITY_PROPERTIES.put("string.resource.loader.repository.name", + VELOCITY_PROPERTIES.put("resource.loader.string.repository.name", RESOURCE_REPOSITORY_NAME); - VELOCITY_PROPERTIES.put("string.resource.loader.repository.static", + VELOCITY_PROPERTIES.put("resource.loader.string.repository.static", "false"); } @@ -355,7 +355,7 @@ public class Email { for (String headerName : config.getArrayProperty("mail.message.headers")) { String headerValue = (String) vctx.get(headerName); if ("subject".equalsIgnoreCase(headerName)) { - if (null != subject) { + if (null != headerValue) { subject = headerValue; } } else if ("charset".equalsIgnoreCase(headerName)) { diff --git a/dspace-api/src/main/java/org/dspace/core/I18nUtil.java b/dspace-api/src/main/java/org/dspace/core/I18nUtil.java index a853c3597e..0fc48b908b 100644 --- a/dspace-api/src/main/java/org/dspace/core/I18nUtil.java +++ b/dspace-api/src/main/java/org/dspace/core/I18nUtil.java @@ -346,7 +346,7 @@ public class I18nUtil { } } - if (fileNameL != null && !fileFound) { + if (!fileFound) { File fileTmp = new File(fileNameL + fileType); if (fileTmp.exists()) { fileFound = true; diff --git a/dspace-api/src/main/java/org/dspace/core/LegacyPluginServiceImpl.java b/dspace-api/src/main/java/org/dspace/core/LegacyPluginServiceImpl.java index 7bbbd91d0a..e92ea137f3 100644 --- a/dspace-api/src/main/java/org/dspace/core/LegacyPluginServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/core/LegacyPluginServiceImpl.java @@ -10,7 +10,6 @@ package org.dspace.core; import java.io.BufferedReader; import java.io.FileReader; import java.io.IOException; -import java.io.Serializable; import java.lang.reflect.Array; import java.lang.reflect.InvocationTargetException; import java.util.ArrayList; @@ -173,7 +172,7 @@ public class LegacyPluginServiceImpl implements PluginService { throws PluginInstantiationException { // cache of config data for Sequence Plugins; format its // -> [ .. ] (value is Array) - Map sequenceConfig = new HashMap(); + Map sequenceConfig = new HashMap<>(); // cache the configuration for this interface after grovelling it once: // format is prefix. = @@ -220,10 +219,7 @@ public class LegacyPluginServiceImpl implements PluginService { // Map of named plugin classes, [intfc,name] -> class // Also contains intfc -> "marker" to mark when interface has been loaded. - private Map namedPluginClasses = new HashMap(); - - // Map of cached (reusable) named plugin instances, [class,name] -> instance - private Map namedInstanceCache = new HashMap(); + private final Map namedPluginClasses = new HashMap<>(); // load and cache configuration data for the given interface. private void configureNamedPlugin(String iname) @@ -413,14 +409,14 @@ public class LegacyPluginServiceImpl implements PluginService { String iname = interfaceClass.getName(); configureNamedPlugin(iname); String prefix = iname + SEP; - ArrayList result = new ArrayList(); + ArrayList result = new ArrayList<>(); for (String key : namedPluginClasses.keySet()) { if (key.startsWith(prefix)) { result.add(key.substring(prefix.length())); } } - if (result.size() == 0) { + if (result.isEmpty()) { log.error("Cannot find any names for named plugin, interface=" + iname); } @@ -508,10 +504,10 @@ public class LegacyPluginServiceImpl implements PluginService { */ // tables of config keys for each type of config line: - Map singleKey = new HashMap(); - Map sequenceKey = new HashMap(); - Map namedKey = new HashMap(); - Map selfnamedKey = new HashMap(); + Map singleKey = new HashMap<>(); + Map sequenceKey = new HashMap<>(); + Map namedKey = new HashMap<>(); + Map selfnamedKey = new HashMap<>(); // Find all property keys starting with "plugin." List keys = configurationService.getPropertyKeys("plugin."); @@ -533,7 +529,7 @@ public class LegacyPluginServiceImpl implements PluginService { // 2. Build up list of all interfaces and test that they are loadable. // don't bother testing that they are "interface" rather than "class" // since either one will work for the Plugin Manager. - ArrayList allInterfaces = new ArrayList(); + ArrayList allInterfaces = new ArrayList<>(); allInterfaces.addAll(singleKey.keySet()); allInterfaces.addAll(sequenceKey.keySet()); allInterfaces.addAll(namedKey.keySet()); @@ -547,7 +543,6 @@ public class LegacyPluginServiceImpl implements PluginService { // - each class is loadable. // - plugin.selfnamed values are each subclass of SelfNamedPlugin // - save classname in allImpls - Map allImpls = new HashMap(); // single plugins - just check that it has a valid impl. class ii = singleKey.keySet().iterator(); @@ -558,9 +553,6 @@ public class LegacyPluginServiceImpl implements PluginService { log.error("Single plugin config not found for: " + SINGLE_PREFIX + key); } else { val = val.trim(); - if (checkClassname(val, "implementation class")) { - allImpls.put(val, val); - } } } @@ -571,12 +563,6 @@ public class LegacyPluginServiceImpl implements PluginService { String[] vals = configurationService.getArrayProperty(SEQUENCE_PREFIX + key); if (vals == null || vals.length == 0) { log.error("Sequence plugin config not found for: " + SEQUENCE_PREFIX + key); - } else { - for (String val : vals) { - if (checkClassname(val, "implementation class")) { - allImpls.put(val, val); - } - } } } @@ -591,7 +577,6 @@ public class LegacyPluginServiceImpl implements PluginService { } else { for (String val : vals) { if (checkClassname(val, "selfnamed implementation class")) { - allImpls.put(val, val); checkSelfNamed(val); } } @@ -609,15 +594,6 @@ public class LegacyPluginServiceImpl implements PluginService { log.error("Named plugin config not found for: " + NAMED_PREFIX + key); } else { checkNames(key); - for (String val : vals) { - // each named plugin has two parts to the value, format: - // [classname] = [plugin-name] - String val_split[] = val.split("\\s*=\\s*"); - String classname = val_split[0]; - if (checkClassname(classname, "implementation class")) { - allImpls.put(classname, classname); - } - } } } } diff --git a/dspace-api/src/main/java/org/dspace/curate/CitationPage.java b/dspace-api/src/main/java/org/dspace/ctask/general/CitationPage.java similarity index 80% rename from dspace-api/src/main/java/org/dspace/curate/CitationPage.java rename to dspace-api/src/main/java/org/dspace/ctask/general/CitationPage.java index dbdd070145..fa630029b8 100644 --- a/dspace-api/src/main/java/org/dspace/curate/CitationPage.java +++ b/dspace-api/src/main/java/org/dspace/ctask/general/CitationPage.java @@ -5,8 +5,9 @@ * * http://www.dspace.org/license/ */ -package org.dspace.curate; +package org.dspace.ctask.general; +import java.io.ByteArrayInputStream; import java.io.IOException; import java.io.InputStream; import java.sql.SQLException; @@ -17,6 +18,9 @@ import java.util.Map; import org.apache.logging.log4j.Logger; import org.dspace.authorize.AuthorizeException; +import org.dspace.authorize.ResourcePolicy; +import org.dspace.authorize.factory.AuthorizeServiceFactory; +import org.dspace.authorize.service.ResourcePolicyService; import org.dspace.content.Bitstream; import org.dspace.content.Bundle; import org.dspace.content.DSpaceObject; @@ -25,6 +29,10 @@ import org.dspace.content.factory.ContentServiceFactory; import org.dspace.content.service.BitstreamService; import org.dspace.content.service.BundleService; import org.dspace.core.Context; +import org.dspace.curate.AbstractCurationTask; +import org.dspace.curate.Curator; +import org.dspace.curate.Distributive; +import org.dspace.curate.Mutative; import org.dspace.disseminate.factory.DisseminateServiceFactory; import org.dspace.disseminate.service.CitationDocumentService; @@ -66,6 +74,10 @@ public class CitationPage extends AbstractCurationTask { protected BitstreamService bitstreamService = ContentServiceFactory.getInstance().getBitstreamService(); protected BundleService bundleService = ContentServiceFactory.getInstance().getBundleService(); + protected ResourcePolicyService resourcePolicyService = AuthorizeServiceFactory.getInstance() + .getResourcePolicyService(); + + private Map displayMap = new HashMap(); /** * {@inheritDoc} @@ -94,13 +106,17 @@ public class CitationPage extends AbstractCurationTask { protected void performItem(Item item) throws SQLException { //Determine if the DISPLAY bundle exits. If not, create it. List dBundles = itemService.getBundles(item, CitationPage.DISPLAY_BUNDLE_NAME); + Bundle original = itemService.getBundles(item, "ORIGINAL").get(0); Bundle dBundle = null; if (dBundles == null || dBundles.isEmpty()) { try { dBundle = bundleService.create(Curator.curationContext(), item, CitationPage.DISPLAY_BUNDLE_NAME); + // don't inherit now otherwise they will be copied over the moved bitstreams + resourcePolicyService.removeAllPolicies(Curator.curationContext(), dBundle); } catch (AuthorizeException e) { - log.error("User not authroized to create bundle on item \"" - + item.getName() + "\": " + e.getMessage()); + log.error("User not authroized to create bundle on item \"{}\": {}", + item::getName, e::getMessage); + return; } } else { dBundle = dBundles.get(0); @@ -108,7 +124,6 @@ public class CitationPage extends AbstractCurationTask { //Create a map of the bitstreams in the displayBundle. This is used to //check if the bundle being cited is already in the display bundle. - Map displayMap = new HashMap<>(); for (Bitstream bs : dBundle.getBitstreams()) { displayMap.put(bs.getName(), bs); } @@ -119,13 +134,15 @@ public class CitationPage extends AbstractCurationTask { List pBundles = itemService.getBundles(item, CitationPage.PRESERVATION_BUNDLE_NAME); Bundle pBundle = null; List bundles = new ArrayList<>(); - if (pBundles != null && pBundles.size() > 0) { + if (pBundles != null && !pBundles.isEmpty()) { pBundle = pBundles.get(0); bundles.addAll(itemService.getBundles(item, "ORIGINAL")); bundles.addAll(pBundles); } else { try { pBundle = bundleService.create(Curator.curationContext(), item, CitationPage.PRESERVATION_BUNDLE_NAME); + // don't inherit now otherwise they will be copied over the moved bitstreams + resourcePolicyService.removeAllPolicies(Curator.curationContext(), pBundle); } catch (AuthorizeException e) { log.error("User not authroized to create bundle on item \"" + item.getName() + "\": " + e.getMessage()); @@ -154,10 +171,14 @@ public class CitationPage extends AbstractCurationTask { try { //Create the cited document InputStream citedInputStream = - citationDocument.makeCitedDocument(Curator.curationContext(), bitstream).getLeft(); + new ByteArrayInputStream( + citationDocument.makeCitedDocument(Curator.curationContext(), bitstream).getLeft()); //Add the cited document to the approiate bundle this.addCitedPageToItem(citedInputStream, bundle, pBundle, - dBundle, displayMap, item, bitstream); + dBundle, item, bitstream); + // now set the policies of the preservation and display bundle + clonePolicies(Curator.curationContext(), original, pBundle); + clonePolicies(Curator.curationContext(), original, dBundle); } catch (Exception e) { //Could be many things, but nothing that should be //expected. @@ -200,8 +221,6 @@ public class CitationPage extends AbstractCurationTask { * @param pBundle The preservation bundle. The original document should be * put in here if it is not already. * @param dBundle The display bundle. The cited document gets put in here. - * @param displayMap The map of bitstream names to bitstreams in the display - * bundle. * @param item The item containing the bundles being used. * @param bitstream The original source bitstream. * @throws SQLException if database error @@ -209,7 +228,7 @@ public class CitationPage extends AbstractCurationTask { * @throws IOException if IO error */ protected void addCitedPageToItem(InputStream citedDoc, Bundle bundle, Bundle pBundle, - Bundle dBundle, Map displayMap, Item item, + Bundle dBundle, Item item, Bitstream bitstream) throws SQLException, AuthorizeException, IOException { //If we are modifying a file that is not in the //preservation bundle then we have to move it there. @@ -237,7 +256,8 @@ public class CitationPage extends AbstractCurationTask { citedBitstream.setName(context, bitstream.getName()); bitstreamService.setFormat(context, citedBitstream, bitstream.getFormat(Curator.curationContext())); citedBitstream.setDescription(context, bitstream.getDescription()); - + displayMap.put(bitstream.getName(), citedBitstream); + clonePolicies(context, bitstream, citedBitstream); this.resBuilder.append(" Added ") .append(citedBitstream.getName()) .append(" to the ") @@ -249,4 +269,16 @@ public class CitationPage extends AbstractCurationTask { itemService.update(context, item); this.status = Curator.CURATE_SUCCESS; } + + private void clonePolicies(Context context, DSpaceObject source,DSpaceObject target) + throws SQLException, AuthorizeException { + resourcePolicyService.removeAllPolicies(context, target); + for (ResourcePolicy rp: source.getResourcePolicies()) { + ResourcePolicy newPolicy = resourcePolicyService.clone(context, rp); + newPolicy.setdSpaceObject(target); + newPolicy.setAction(rp.getAction()); + resourcePolicyService.update(context, newPolicy); + } + + } } diff --git a/dspace-api/src/main/java/org/dspace/ctask/general/CreateMissingIdentifiers.java b/dspace-api/src/main/java/org/dspace/ctask/general/CreateMissingIdentifiers.java new file mode 100644 index 0000000000..9639461426 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/ctask/general/CreateMissingIdentifiers.java @@ -0,0 +1,94 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ + +package org.dspace.ctask.general; + +import java.io.IOException; +import java.sql.SQLException; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.dspace.authorize.AuthorizeException; +import org.dspace.content.DSpaceObject; +import org.dspace.content.Item; +import org.dspace.core.Constants; +import org.dspace.core.Context; +import org.dspace.curate.AbstractCurationTask; +import org.dspace.curate.Curator; +import org.dspace.identifier.IdentifierException; +import org.dspace.identifier.IdentifierProvider; +import org.dspace.identifier.VersionedHandleIdentifierProviderWithCanonicalHandles; +import org.dspace.identifier.factory.IdentifierServiceFactory; +import org.dspace.identifier.service.IdentifierService; +import org.dspace.services.factory.DSpaceServicesFactory; + +/** + * Ensure that an object has all of the identifiers that it should, minting them + * as necessary. + * + * @author Mark H. Wood {@literal } + */ +public class CreateMissingIdentifiers + extends AbstractCurationTask { + private static final Logger LOG = LogManager.getLogger(); + + @Override + public int perform(DSpaceObject dso) + throws IOException { + // Only some kinds of model objects get identifiers + if (!(dso instanceof Item)) { + return Curator.CURATE_SKIP; + } + + // XXX Temporary escape when an incompatible provider is configured. + // XXX Remove this when the provider is fixed. + boolean compatible = DSpaceServicesFactory + .getInstance() + .getServiceManager() + .getServiceByName( + VersionedHandleIdentifierProviderWithCanonicalHandles.class.getCanonicalName(), + IdentifierProvider.class) == null; + if (!compatible) { + setResult("This task is not compatible with VersionedHandleIdentifierProviderWithCanonicalHandles"); + return Curator.CURATE_ERROR; + } + // XXX End of escape + + String typeText = Constants.typeText[dso.getType()]; + + // Get a Context + Context context; + try { + context = Curator.curationContext(); + } catch (SQLException ex) { + report("Could not get the curation Context: " + ex.getMessage()); + return Curator.CURATE_ERROR; + } + + // Find the IdentifierService implementation + IdentifierService identifierService = IdentifierServiceFactory + .getInstance() + .getIdentifierService(); + + // Register any missing identifiers. + try { + identifierService.register(context, dso); + } catch (AuthorizeException | IdentifierException | SQLException ex) { + String message = ex.getMessage(); + report(String.format("Identifier(s) not minted for %s %s: %s%n", + typeText, dso.getID().toString(), message)); + LOG.error("Identifier(s) not minted: {}", message); + return Curator.CURATE_ERROR; + } + + // Success! + report(String.format("%s %s registered.%n", + typeText, dso.getID().toString())); + return Curator.CURATE_SUCCESS; + } +} diff --git a/dspace-api/src/main/java/org/dspace/ctask/general/MetadataWebService.java b/dspace-api/src/main/java/org/dspace/ctask/general/MetadataWebService.java index edeb2a6d02..f7ab18c01e 100644 --- a/dspace-api/src/main/java/org/dspace/ctask/general/MetadataWebService.java +++ b/dspace-api/src/main/java/org/dspace/ctask/general/MetadataWebService.java @@ -10,11 +10,13 @@ package org.dspace.ctask.general; import java.io.IOException; import java.io.InputStream; import java.sql.SQLException; +import java.util.ArrayDeque; import java.util.ArrayList; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; +import java.util.Queue; import java.util.regex.Matcher; import java.util.regex.Pattern; import javax.xml.XMLConstants; @@ -33,6 +35,7 @@ import org.apache.http.HttpStatus; import org.apache.http.client.methods.HttpGet; import org.apache.http.impl.client.CloseableHttpClient; import org.apache.http.impl.client.HttpClientBuilder; +import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.dspace.authorize.AuthorizeException; import org.dspace.content.DSpaceObject; @@ -60,18 +63,18 @@ import org.xml.sax.SAXException; * Intended use: cataloging tool in workflow and general curation. * The task uses a URL 'template' to compose the service call, e.g. * - * {@code http://www.sherpa.ac.uk/romeo/api29.php?issn=\{dc.identifier.issn\}} + *

    {@code http://www.sherpa.ac.uk/romeo/api29.php?issn=\{dc.identifier.issn\}} * - * Task will substitute the value of the passed item's metadata field + *

    Task will substitute the value of the passed item's metadata field * in the {parameter} position. If multiple values are present in the * item field, the first value is used. * - * The task uses another property (the datamap) to determine what data + *

    The task uses another property (the datamap) to determine what data * to extract from the service response and how to use it, e.g. * - * {@code //publisher/name=>dc.publisher,//romeocolour} + *

    {@code //publisher/name=>dc.publisher,//romeocolour} * - * Task will evaluate the left-hand side (or entire token) of each + *

    Task will evaluate the left-hand side (or entire token) of each * comma-separated token in the property as an XPath 1.0 expression into * the response document, and if there is a mapping symbol (e.g. {@code '=>'}) and * value, it will assign the response document value(s) to the named @@ -79,48 +82,52 @@ import org.xml.sax.SAXException; * multiple values, they will all be assigned to the item field. The * mapping symbol governs the nature of metadata field assignment: * - * {@code '->'} mapping will add to any existing values in the item field - * {@code '=>'} mapping will replace any existing values in the item field - * {@code '~>'} mapping will add *only* if item field has no existing values + *

      + *
    • {@code '->'} mapping will add to any existing values in the item field
    • + *
    • {@code '=>'} mapping will replace any existing values in the item field
    • + *
    • {@code '~>'} mapping will add *only* if item field has no existing values
    • + *
    * - * Unmapped data (without a mapping symbol) will simply be added to the task + *

    Unmapped data (without a mapping symbol) will simply be added to the task * result string, prepended by the XPath expression (a little prettified). * Each label/value pair in the result string is separated by a space, * unless the optional 'separator' property is defined. * - * A very rudimentary facility for transformation of data is supported, e.g. + *

    A very rudimentary facility for transformation of data is supported, e.g. * - * {@code http://www.crossref.org/openurl/?id=\{doi:dc.relation.isversionof\}&format=unixref} + *

    {@code http://www.crossref.org/openurl/?id=\{doi:dc.relation.isversionof\}&format=unixref} * - * The 'doi:' prefix will cause the task to look for a 'transform' with that + *

    The 'doi:' prefix will cause the task to look for a 'transform' with that * name, which is applied to the metadata value before parameter substitution * occurs. Transforms are defined in a task property such as the following: * - * {@code transform.doi = match 10. trunc 60} + *

    {@code transform.doi = match 10. trunc 60} * - * This means exclude the value string up to the occurrence of '10.', then + *

    This means exclude the value string up to the occurrence of '10.', then * truncate after 60 characters. The only transform functions currently defined: * - * {@code 'cut' } = remove number leading characters - * {@code 'trunc' } = remove trailing characters after number length - * {@code 'match' } = start match at pattern - * {@code 'text' } = append literal characters (enclose in ' ' when whitespace needed) + *

      + *
    • {@code 'cut' } = remove number leading characters
    • + *
    • {@code 'trunc' } = remove trailing characters after number length
    • + *
    • {@code 'match' } = start match at pattern
    • + *
    • {@code 'text' } = append literal characters (enclose in ' ' when whitespace needed)
    • + *
    * - * If the transform results in an invalid state (e.g. cutting more characters + *

    If the transform results in an invalid state (e.g. cutting more characters * than are in the value), the condition will be logged and the * un-transformed value used. * - * Transforms may also be used in datamaps, e.g. + *

    Transforms may also be used in datamaps, e.g. * - * {@code //publisher/name=>shorten:dc.publisher,//romeocolour} + *

    {@code //publisher/name=>shorten:dc.publisher,//romeocolour} * - * which would apply the 'shorten' transform to the service response value(s) + *

    which would apply the 'shorten' transform to the service response value(s) * prior to metadata field assignment. * - * An optional property 'headers' may be defined to stipulate any HTTP headers + *

    An optional property 'headers' may be defined to stipulate any HTTP headers * required in the service call. The property syntax is double-pipe separated headers: * - * {@code Accept: text/xml||Cache-Control: no-cache} + *

    {@code Accept: text/xml||Cache-Control: no-cache} * * @author richardrodgers */ @@ -128,9 +135,9 @@ import org.xml.sax.SAXException; @Suspendable public class MetadataWebService extends AbstractCurationTask implements NamespaceContext { /** - * log4j category + * logging category */ - private static final Logger log = org.apache.logging.log4j.LogManager.getLogger(MetadataWebService.class); + private static final Logger log = LogManager.getLogger(); // transform token parsing pattern protected Pattern ttPattern = Pattern.compile("\'([^\']*)\'|(\\S+)"); // URL of web service with template parameters @@ -360,42 +367,45 @@ public class MetadataWebService extends AbstractCurationTask implements Namespac if (transDef == null) { return value; } - String[] tokens = tokenize(transDef); + Queue tokens = tokenize(transDef); String retValue = value; - for (int i = 0; i < tokens.length; i += 2) { - if ("cut".equals(tokens[i]) || "trunc".equals(tokens[i])) { - int index = Integer.parseInt(tokens[i + 1]); + while (!tokens.isEmpty()) { + String function = tokens.poll(); + if ("cut".equals(function) || "trunc".equals(function)) { + String argument = tokens.poll(); + int index = Integer.parseInt(argument); if (retValue.length() > index) { - if ("cut".equals(tokens[i])) { + if ("cut".equals(function)) { retValue = retValue.substring(index); } else { retValue = retValue.substring(0, index); } - } else if ("cut".equals(tokens[i])) { - log.error("requested cut: " + index + " exceeds value length"); + } else if ("cut".equals(function)) { + log.error("requested cut: {} exceeds value length", index); return value; } - } else if ("match".equals(tokens[i])) { - int index2 = retValue.indexOf(tokens[i + 1]); + } else if ("match".equals(function)) { + String argument = tokens.poll(); + int index2 = retValue.indexOf(argument); if (index2 > 0) { retValue = retValue.substring(index2); } else { - log.error("requested match: " + tokens[i + 1] + " failed"); + log.error("requested match: {} failed", argument); return value; } - } else if ("text".equals(tokens[i])) { - retValue = retValue + tokens[i + 1]; + } else if ("text".equals(function)) { + retValue = retValue + tokens.poll(); } else { - log.error(" unknown transform operation: " + tokens[i]); + log.error(" unknown transform operation: " + function); return value; } } return retValue; } - protected String[] tokenize(String text) { - List list = new ArrayList<>(); + protected Queue tokenize(String text) { Matcher m = ttPattern.matcher(text); + Queue list = new ArrayDeque<>(m.groupCount()); while (m.find()) { if (m.group(1) != null) { list.add(m.group(1)); @@ -403,7 +413,7 @@ public class MetadataWebService extends AbstractCurationTask implements Namespac list.add(m.group(2)); } } - return list.toArray(new String[0]); + return list; } protected int getMapIndex(String mapping) { diff --git a/dspace-api/src/main/java/org/dspace/curate/Curation.java b/dspace-api/src/main/java/org/dspace/curate/Curation.java index a01c731189..b3af072a32 100644 --- a/dspace-api/src/main/java/org/dspace/curate/Curation.java +++ b/dspace-api/src/main/java/org/dspace/curate/Curation.java @@ -189,7 +189,7 @@ public class Curation extends DSpaceRunnable { * @throws FileNotFoundException If file of command line variable -r reporter is not found */ private Curator initCurator() throws FileNotFoundException { - Curator curator = new Curator(); + Curator curator = new Curator(handler); OutputStream reporterStream; if (null == this.reporter) { reporterStream = new NullOutputStream(); @@ -259,12 +259,19 @@ public class Curation extends DSpaceRunnable { super.handler.logError("EPerson not found: " + currentUserUuid); throw new IllegalArgumentException("Unable to find a user with uuid: " + currentUserUuid); } + assignSpecialGroupsInContext(); this.context.setCurrentUser(eperson); } catch (SQLException e) { handler.handleException("Something went wrong trying to fetch eperson for uuid: " + currentUserUuid, e); } } + protected void assignSpecialGroupsInContext() throws SQLException { + for (UUID uuid : handler.getSpecialGroups()) { + context.setSpecialGroup(uuid); + } + } + /** * Fills in some optional command line options. * Checks if there are missing required options or invalid values for options. diff --git a/dspace-api/src/main/java/org/dspace/curate/CurationCliScriptConfiguration.java b/dspace-api/src/main/java/org/dspace/curate/CurationCliScriptConfiguration.java index 5e1d014873..eaa04f4778 100644 --- a/dspace-api/src/main/java/org/dspace/curate/CurationCliScriptConfiguration.java +++ b/dspace-api/src/main/java/org/dspace/curate/CurationCliScriptConfiguration.java @@ -19,7 +19,6 @@ public class CurationCliScriptConfiguration extends CurationScriptConfiguration< public Options getOptions() { options = super.getOptions(); options.addOption("e", "eperson", true, "email address of curating eperson"); - options.getOption("e").setType(String.class); options.getOption("e").setRequired(true); return options; } diff --git a/dspace-api/src/main/java/org/dspace/curate/Curator.java b/dspace-api/src/main/java/org/dspace/curate/Curator.java index aa6cb14fda..4076fab519 100644 --- a/dspace-api/src/main/java/org/dspace/curate/Curator.java +++ b/dspace-api/src/main/java/org/dspace/curate/Curator.java @@ -9,6 +9,7 @@ package org.dspace.curate; import java.io.IOException; import java.sql.SQLException; +import java.text.MessageFormat; import java.util.ArrayList; import java.util.HashMap; import java.util.Iterator; @@ -30,6 +31,7 @@ import org.dspace.core.Context; import org.dspace.core.factory.CoreServiceFactory; import org.dspace.handle.factory.HandleServiceFactory; import org.dspace.handle.service.HandleService; +import org.dspace.scripts.handler.DSpaceRunnableHandler; /** * Curator orchestrates and manages the application of a one or more curation @@ -90,6 +92,17 @@ public class Curator { protected CommunityService communityService; protected ItemService itemService; protected HandleService handleService; + protected DSpaceRunnableHandler handler; + + /** + * constructor that uses an handler for logging + * + * @param handler {@code DSpaceRunnableHandler} used to logs infos + */ + public Curator(DSpaceRunnableHandler handler) { + this(); + this.handler = handler; + } /** * No-arg constructor @@ -338,7 +351,7 @@ public class Curator { */ public void report(String message) { if (null == reporter) { - log.warn("report called with no Reporter set: {}", message); + logWarning("report called with no Reporter set: {}", message); return; } @@ -435,7 +448,7 @@ public class Curator { // Site-wide Tasks really should have an EPerson performer associated with them, // otherwise they are run as an "anonymous" user with limited access rights. if (ctx.getCurrentUser() == null && !ctx.ignoreAuthorization()) { - log.warn("You are running one or more Site-Wide curation tasks in ANONYMOUS USER mode," + + logWarning("You are running one or more Site-Wide curation tasks in ANONYMOUS USER mode," + " as there is no EPerson 'performer' associated with this task. To associate an EPerson " + "'performer' " + " you should ensure tasks are called via the Curator.curate(Context, ID) method."); @@ -546,7 +559,7 @@ public class Curator { } statusCode = task.perform(dso); String id = (dso.getHandle() != null) ? dso.getHandle() : "workflow item: " + dso.getID(); - log.info(logMessage(id)); + logInfo(logMessage(id)); visit(dso); return !suspend(statusCode); } catch (IOException ioe) { @@ -562,7 +575,7 @@ public class Curator { throw new IOException("Context or identifier is null"); } statusCode = task.perform(c, id); - log.info(logMessage(id)); + logInfo(logMessage(id)); visit(null); return !suspend(statusCode); } catch (IOException ioe) { @@ -604,5 +617,51 @@ public class Curator { } return mb.toString(); } + + /** + * Proxy method for logging with INFO level + * + * @param message that needs to be logged + */ + protected void logInfo(String message) { + if (handler == null) { + log.info(message); + } else { + handler.logInfo(message); + } + } + + } + + /** + * Proxt method for logging with WARN level + * + * @param message + */ + protected void logWarning(String message) { + logWarning(message, null); + } + + /** + * Proxy method for logging with WARN level and a {@code Messageformatter} + * that generates the final log. + * + * @param message Target message to format or print + * @param object Object to use inside the message, or null + */ + protected void logWarning(String message, Object object) { + if (handler == null) { + if (object != null) { + log.warn(message, object); + } else { + log.warn(message); + } + } else { + if (object != null) { + handler.logWarning(MessageFormat.format(message, object)); + } else { + handler.logWarning(message); + } + } } } diff --git a/dspace-api/src/main/java/org/dspace/discovery/IndexClientOptions.java b/dspace-api/src/main/java/org/dspace/discovery/IndexClientOptions.java index 62357bd95f..74d9ba0c3a 100644 --- a/dspace-api/src/main/java/org/dspace/discovery/IndexClientOptions.java +++ b/dspace-api/src/main/java/org/dspace/discovery/IndexClientOptions.java @@ -74,25 +74,17 @@ public enum IndexClientOptions { options .addOption("r", "remove", true, "remove an Item, Collection or Community from index based on its handle"); - options.getOption("r").setType(String.class); options.addOption("i", "index", true, "add or update an Item, Collection or Community based on its handle or uuid"); - options.getOption("i").setType(boolean.class); options.addOption("c", "clean", false, "clean existing index removing any documents that no longer exist in the db"); - options.getOption("c").setType(boolean.class); options.addOption("d", "delete", false, "delete all records from existing index"); - options.getOption("d").setType(boolean.class); options.addOption("b", "build", false, "(re)build index, wiping out current one if it exists"); - options.getOption("b").setType(boolean.class); options.addOption("s", "spellchecker", false, "Rebuild the spellchecker, can be combined with -b and -f."); - options.getOption("s").setType(boolean.class); options.addOption("f", "force", false, "if updating existing index, force each handle to be reindexed even if uptodate"); - options.getOption("f").setType(boolean.class); options.addOption("h", "help", false, "print this help message"); - options.getOption("h").setType(boolean.class); return options; } } diff --git a/dspace-api/src/main/java/org/dspace/discovery/IndexEventConsumer.java b/dspace-api/src/main/java/org/dspace/discovery/IndexEventConsumer.java index 5f1f8b0b0e..4ff1f31344 100644 --- a/dspace-api/src/main/java/org/dspace/discovery/IndexEventConsumer.java +++ b/dspace-api/src/main/java/org/dspace/discovery/IndexEventConsumer.java @@ -7,6 +7,7 @@ */ package org.dspace.discovery; +import java.sql.SQLException; import java.util.HashSet; import java.util.Optional; import java.util.Set; @@ -37,6 +38,8 @@ public class IndexEventConsumer implements Consumer { // collect Items, Collections, Communities that need indexing private Set objectsToUpdate = new HashSet<>(); + // collect freshly created Items that need indexing (requires pre-db status) + private Set createdItemsToUpdate = new HashSet<>(); // unique search IDs to delete private Set uniqueIdsToDelete = new HashSet<>(); @@ -65,6 +68,7 @@ public class IndexEventConsumer implements Consumer { if (objectsToUpdate == null) { objectsToUpdate = new HashSet<>(); uniqueIdsToDelete = new HashSet<>(); + createdItemsToUpdate = new HashSet<>(); } int st = event.getSubjectType(); @@ -143,6 +147,7 @@ public class IndexEventConsumer implements Consumer { String detail = indexableObjectService.getType() + "-" + event.getSubjectID().toString(); uniqueIdsToDelete.add(detail); } + objectsToUpdate.addAll(indexObjectServiceFactory.getIndexableObjects(ctx, subject)); } break; @@ -162,7 +167,7 @@ public class IndexEventConsumer implements Consumer { // also update the object in order to index mapped/unmapped Items if (subject != null && subject.getType() == Constants.COLLECTION && object.getType() == Constants.ITEM) { - objectsToUpdate.addAll(indexObjectServiceFactory.getIndexableObjects(ctx, object)); + createdItemsToUpdate.addAll(indexObjectServiceFactory.getIndexableObjects(ctx, object)); } } break; @@ -209,23 +214,11 @@ public class IndexEventConsumer implements Consumer { } // update the changed Items not deleted because they were on create list for (IndexableObject iu : objectsToUpdate) { - /* we let all types through here and - * allow the search indexer to make - * decisions on indexing and/or removal - */ - iu.setIndexedObject(ctx.reloadEntity(iu.getIndexedObject())); - String uniqueIndexID = iu.getUniqueIndexID(); - if (uniqueIndexID != null) { - try { - indexer.indexContent(ctx, iu, true, false); - log.debug("Indexed " - + iu.getTypeText() - + ", id=" + iu.getID() - + ", unique_id=" + uniqueIndexID); - } catch (Exception e) { - log.error("Failed while indexing object: ", e); - } - } + indexObject(ctx, iu, false); + } + // update the created Items with a pre-db status + for (IndexableObject iu : createdItemsToUpdate) { + indexObject(ctx, iu, true); } } finally { if (!objectsToUpdate.isEmpty() || !uniqueIdsToDelete.isEmpty()) { @@ -235,6 +228,27 @@ public class IndexEventConsumer implements Consumer { // "free" the resources objectsToUpdate.clear(); uniqueIdsToDelete.clear(); + createdItemsToUpdate.clear(); + } + } + } + + private void indexObject(Context ctx, IndexableObject iu, boolean preDb) throws SQLException { + /* we let all types through here and + * allow the search indexer to make + * decisions on indexing and/or removal + */ + iu.setIndexedObject(ctx.reloadEntity(iu.getIndexedObject())); + String uniqueIndexID = iu.getUniqueIndexID(); + if (uniqueIndexID != null) { + try { + indexer.indexContent(ctx, iu, true, false, preDb); + log.debug("Indexed " + + iu.getTypeText() + + ", id=" + iu.getID() + + ", unique_id=" + uniqueIndexID); + } catch (Exception e) { + log.error("Failed while indexing object: ", e); } } } diff --git a/dspace-api/src/main/java/org/dspace/discovery/IndexingService.java b/dspace-api/src/main/java/org/dspace/discovery/IndexingService.java index db0329dd67..2ef5affa47 100644 --- a/dspace-api/src/main/java/org/dspace/discovery/IndexingService.java +++ b/dspace-api/src/main/java/org/dspace/discovery/IndexingService.java @@ -9,7 +9,9 @@ package org.dspace.discovery; import java.io.IOException; import java.sql.SQLException; +import java.util.Map; +import org.apache.solr.client.solrj.SolrServerException; import org.dspace.core.Context; /** @@ -30,6 +32,17 @@ public interface IndexingService { void indexContent(Context context, IndexableObject dso, boolean force, boolean commit) throws SQLException, SearchServiceException; + /** + * Index a given DSO + * @param context The DSpace Context + * @param dso The DSpace Object to index + * @param force Force update even if not stale + * @param commit Commit the changes + * @param preDb Add a "preDB" status to the index (only applicable to Items) + */ + void indexContent(Context context, IndexableObject dso, + boolean force, boolean commit, boolean preDb) throws SQLException, SearchServiceException; + void unIndexContent(Context context, IndexableObject dso) throws SQLException, IOException; @@ -62,4 +75,15 @@ public interface IndexingService { void optimize() throws SearchServiceException; void buildSpellCheck() throws SearchServiceException, IOException; + + /** + * Atomically update the index of a single field for an object + * @param context The DSpace context + * @param uniqueIndexId The unqiue index ID of the object to update the index for + * @param field The field to update + * @param fieldModifier The modifiers for the field to update. More information on how to atomically update a solr + * field using a field modifier can be found here: https://yonik.com/solr/atomic-updates/ + */ + void atomicUpdate(Context context, String uniqueIndexId, String field, Map fieldModifier) + throws SolrServerException, IOException; } diff --git a/dspace-api/src/main/java/org/dspace/discovery/SearchService.java b/dspace-api/src/main/java/org/dspace/discovery/SearchService.java index 9b6ac0109d..cb945648e7 100644 --- a/dspace-api/src/main/java/org/dspace/discovery/SearchService.java +++ b/dspace-api/src/main/java/org/dspace/discovery/SearchService.java @@ -8,6 +8,7 @@ package org.dspace.discovery; import java.sql.SQLException; +import java.util.Iterator; import java.util.List; import org.dspace.content.Item; @@ -38,6 +39,7 @@ public interface SearchService { DiscoverResult search(Context context, DiscoverQuery query) throws SearchServiceException; + /** * Convenient method to call @see #search(Context, DSpaceObject, * DiscoverQuery, boolean) with includeWithdrawn=false @@ -52,9 +54,22 @@ public interface SearchService { DiscoverResult search(Context context, IndexableObject dso, DiscoverQuery query) throws SearchServiceException; + /** + * Convenience method to call @see #search(Context, DSpaceObject, DiscoverQuery) and getting an iterator for the + * results + * + * @param context DSpace context object + * @param dso a DSpace object to use as a scope of the search + * @param query the discovery query object + * @return an iterator iterating over all results from the search + * @throws SearchServiceException if search error + */ + Iterator iteratorSearch(Context context, IndexableObject dso, DiscoverQuery query) + throws SearchServiceException; + List search(Context context, String query, String orderfield, boolean ascending, int offset, - int max, String... filterquery); + int max, String... filterquery); /** * Transforms the given string field and value into a filter query diff --git a/dspace-api/src/main/java/org/dspace/discovery/SearchUtils.java b/dspace-api/src/main/java/org/dspace/discovery/SearchUtils.java index 90afb09eca..f14ca124f4 100644 --- a/dspace-api/src/main/java/org/dspace/discovery/SearchUtils.java +++ b/dspace-api/src/main/java/org/dspace/discovery/SearchUtils.java @@ -20,6 +20,7 @@ import org.dspace.content.Item; import org.dspace.content.WorkspaceItem; import org.dspace.discovery.configuration.DiscoveryConfiguration; import org.dspace.discovery.configuration.DiscoveryConfigurationService; +import org.dspace.discovery.utils.DiscoverQueryBuilder; import org.dspace.kernel.ServiceManager; import org.dspace.services.factory.DSpaceServicesFactory; import org.dspace.workflow.WorkflowItem; @@ -51,6 +52,9 @@ public class SearchUtils { */ private SearchUtils() { } + /** + * Return an instance of the {@link SearchService}. + */ public static SearchService getSearchService() { if (searchService == null) { org.dspace.kernel.ServiceManager manager = DSpaceServicesFactory.getInstance().getServiceManager(); @@ -59,6 +63,16 @@ public class SearchUtils { return searchService; } + /** + * Clear the cached {@link SearchService} instance, forcing it to be retrieved from the service manager again + * next time {@link SearchUtils#getSearchService} is called. + * In practice, this is only necessary for integration tests in some environments + * where the cached version may no longer be up to date between tests. + */ + public static void clearCachedSearchService() { + searchService = null; + } + public static DiscoveryConfiguration getDiscoveryConfiguration() { return getDiscoveryConfiguration(null, null); } @@ -170,4 +184,10 @@ public class SearchUtils { DiscoveryConfiguration configurationExtra = getDiscoveryConfigurationByName(confName); result.add(configurationExtra); } + + public static DiscoverQueryBuilder getQueryBuilder() { + ServiceManager manager = DSpaceServicesFactory.getInstance().getServiceManager(); + return manager + .getServiceByName(DiscoverQueryBuilder.class.getName(), DiscoverQueryBuilder.class); + } } diff --git a/dspace-api/src/main/java/org/dspace/discovery/SolrSearchCore.java b/dspace-api/src/main/java/org/dspace/discovery/SolrSearchCore.java index b430a0c973..f31feab612 100644 --- a/dspace-api/src/main/java/org/dspace/discovery/SolrSearchCore.java +++ b/dspace-api/src/main/java/org/dspace/discovery/SolrSearchCore.java @@ -21,7 +21,6 @@ import org.apache.solr.client.solrj.impl.HttpSolrClient; import org.dspace.discovery.indexobject.IndexableItem; import org.dspace.service.impl.HttpConnectionPoolService; import org.dspace.services.ConfigurationService; -import org.dspace.services.factory.DSpaceServicesFactory; import org.dspace.storage.rdbms.DatabaseUtils; import org.springframework.beans.factory.annotation.Autowired; @@ -75,14 +74,13 @@ public class SolrSearchCore { */ protected void initSolr() { if (solr == null) { - String solrService = DSpaceServicesFactory.getInstance().getConfigurationService() - .getProperty("discovery.search.server"); + String solrService = configurationService.getProperty("discovery.search.server"); UrlValidator urlValidator = new UrlValidator(UrlValidator.ALLOW_LOCAL_URLS); if (urlValidator.isValid(solrService) || configurationService .getBooleanProperty("discovery.solr.url.validation.enabled", true)) { try { - log.debug("Solr URL: " + solrService); + log.debug("Solr URL: {}", solrService); HttpSolrClient solrServer = new HttpSolrClient.Builder(solrService) .withHttpClient(httpConnectionPoolService.getClient()) .build(); @@ -103,10 +101,13 @@ public class SolrSearchCore { solr = solrServer; } catch (SolrServerException | IOException e) { - log.error("Error while initializing solr server", e); + log.error("Error while initializing solr server {}", + solrService, e); + throw new RuntimeException("Failed to contact Solr at " + solrService + + " : " + e.getMessage()); } } else { - log.error("Error while initializing solr, invalid url: " + solrService); + log.error("Error while initializing solr, invalid url: {}", solrService); } } } diff --git a/dspace-api/src/main/java/org/dspace/discovery/SolrServiceImpl.java b/dspace-api/src/main/java/org/dspace/discovery/SolrServiceImpl.java index f894553e5d..383121072d 100644 --- a/dspace-api/src/main/java/org/dspace/discovery/SolrServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/discovery/SolrServiceImpl.java @@ -8,6 +8,8 @@ package org.dspace.discovery; import static java.util.stream.Collectors.joining; +import static org.dspace.discovery.indexobject.ItemIndexFactoryImpl.STATUS_FIELD; +import static org.dspace.discovery.indexobject.ItemIndexFactoryImpl.STATUS_FIELD_PREDB; import java.io.IOException; import java.io.PrintWriter; @@ -118,8 +120,6 @@ public class SolrServiceImpl implements SearchService, IndexingService { } - - /** * If the handle for the "dso" already exists in the index, and the "dso" * has a lastModified timestamp that is newer than the document in the index @@ -166,6 +166,24 @@ public class SolrServiceImpl implements SearchService, IndexingService { indexableObjectService.writeDocument(context, indexableObject, solrInputDocument); } + /** + * Update the given indexable object using a given service + * @param context The DSpace Context + * @param indexableObjectService The service to index the object with + * @param indexableObject The object to index + * @param preDB Add a "preDB" status to the document + */ + protected void update(Context context, IndexFactory indexableObjectService, IndexableObject indexableObject, + boolean preDB) throws IOException, SQLException, SolrServerException { + if (preDB) { + final SolrInputDocument solrInputDocument = + indexableObjectService.buildNewDocument(context, indexableObject); + indexableObjectService.writeDocument(context, indexableObject, solrInputDocument); + } else { + update(context, indexableObjectService, indexableObject); + } + } + /** * unIndex removes an Item, Collection, or Community * @@ -454,6 +472,16 @@ public class SolrServiceImpl implements SearchService, IndexingService { } } + @Override + public void atomicUpdate(Context context, String uniqueIndexId, String field, Map fieldModifier) + throws SolrServerException, IOException { + SolrInputDocument solrInputDocument = new SolrInputDocument(); + solrInputDocument.addField(SearchUtils.RESOURCE_UNIQUE_ID, uniqueIndexId); + solrInputDocument.addField(field, fieldModifier); + + solrSearchCore.getSolr().add(solrInputDocument); + } + // ////////////////////////////////// // Private // ////////////////////////////////// @@ -710,16 +738,21 @@ public class SolrServiceImpl implements SearchService, IndexingService { discoveryQuery.addFilterQueries("location:l" + dso.getID()); } else if (dso instanceof IndexableItem) { discoveryQuery.addFilterQueries(SearchUtils.RESOURCE_UNIQUE_ID + ":" + dso. - getUniqueIndexID()); + getUniqueIndexID()); } } return search(context, discoveryQuery); } + @Override + public Iterator iteratorSearch(Context context, IndexableObject dso, DiscoverQuery query) + throws SearchServiceException { + return new SearchIterator(context, dso, query); + } @Override - public DiscoverResult search(Context context, DiscoverQuery discoveryQuery ) + public DiscoverResult search(Context context, DiscoverQuery discoveryQuery) throws SearchServiceException { try { if (solrSearchCore.getSolr() == null) { @@ -733,6 +766,72 @@ public class SolrServiceImpl implements SearchService, IndexingService { } } + /** + * This class implements an iterator over items that is specifically used to iterate over search results + */ + private class SearchIterator implements Iterator { + private Context context; + private DiscoverQuery discoverQuery; + private DiscoverResult discoverResult; + private IndexableObject dso; + private int absoluteCursor; + private int relativeCursor; + private int pagesize; + + SearchIterator(Context context, DiscoverQuery discoverQuery) throws SearchServiceException { + this.context = context; + this.discoverQuery = discoverQuery; + this.absoluteCursor = discoverQuery.getStart(); + initialise(); + } + + SearchIterator(Context context, IndexableObject dso, DiscoverQuery discoverQuery) + throws SearchServiceException { + this.context = context; + this.dso = dso; + this.discoverQuery = discoverQuery; + initialise(); + } + + private void initialise() throws SearchServiceException { + this.relativeCursor = 0; + if (discoverQuery.getMaxResults() != -1) { + pagesize = discoverQuery.getMaxResults(); + } else { + pagesize = 10; + } + discoverQuery.setMaxResults(pagesize); + this.discoverResult = search(context, dso, discoverQuery); + } + + @Override + public boolean hasNext() { + return absoluteCursor < discoverResult.getTotalSearchResults(); + } + + @Override + public Item next() { + //paginate getting results from the discoverquery. + if (relativeCursor == pagesize) { + // get a new page of results when the last element of the previous page has been read + int offset = absoluteCursor; + // reset the position counter for getting element relativecursor on a page + relativeCursor = 0; + discoverQuery.setStart(offset); + try { + discoverResult = search(context, dso, discoverQuery); + } catch (SearchServiceException e) { + log.error("error while getting search results", e); + } + } + // get the element at position relativecursor on a page + IndexableObject res = discoverResult.getIndexableObjects().get(relativeCursor); + relativeCursor++; + absoluteCursor++; + return (Item) res.getIndexedObject(); + } + } + protected SolrQuery resolveToSolrQuery(Context context, DiscoverQuery discoveryQuery) throws SearchServiceException { SolrQuery solrQuery = new SolrQuery(); @@ -753,6 +852,7 @@ public class SolrServiceImpl implements SearchService, IndexingService { solrQuery.addField(SearchUtils.RESOURCE_TYPE_FIELD); solrQuery.addField(SearchUtils.RESOURCE_ID_FIELD); solrQuery.addField(SearchUtils.RESOURCE_UNIQUE_ID); + solrQuery.addField(STATUS_FIELD); if (discoveryQuery.isSpellCheck()) { solrQuery.setParam(SpellingParams.SPELLCHECK_Q, query); @@ -879,7 +979,7 @@ public class SolrServiceImpl implements SearchService, IndexingService { // if we found stale objects we can decide to skip execution of the remaining code to improve performance boolean skipLoadingResponse = false; // use zombieDocs to collect stale found objects - List zombieDocs = new ArrayList(); + List zombieDocs = new ArrayList<>(); QueryResponse solrQueryResponse = solrSearchCore.getSolr().query(solrQuery, solrSearchCore.REQUEST_METHOD); if (solrQueryResponse != null) { @@ -903,11 +1003,14 @@ public class SolrServiceImpl implements SearchService, IndexingService { // Enables solr to remove documents related to items not on database anymore (Stale) // if maxAttemps is greater than 0 cleanup the index on each step if (maxAttempts >= 0) { - zombieDocs.add((String) doc.getFirstValue(SearchUtils.RESOURCE_UNIQUE_ID)); - // avoid to process the response except if we are in the last allowed execution. - // When maxAttempts is 0 this will be just the first and last run as the - // executionCount is increased at the start of the loop it will be equals to 1 - skipLoadingResponse = maxAttempts + 1 != executionCount; + Object statusObj = doc.getFirstValue(STATUS_FIELD); + if (!(statusObj instanceof String && statusObj.equals(STATUS_FIELD_PREDB))) { + zombieDocs.add((String) doc.getFirstValue(SearchUtils.RESOURCE_UNIQUE_ID)); + // avoid to process the response except if we are in the last allowed execution. + // When maxAttempts is 0 this will be just the first and last run as the + // executionCount is increased at the start of the loop it will be equals to 1 + skipLoadingResponse = maxAttempts + 1 != executionCount; + } } continue; } @@ -930,12 +1033,6 @@ public class SolrServiceImpl implements SearchService, IndexingService { //We need to remove all the "_hl" appendix strings from our keys Map> resultMap = new HashMap<>(); for (String key : highlightedFields.keySet()) { - List highlightOriginalValue = highlightedFields.get(key); - List resultHighlightOriginalValue = new ArrayList<>(); - for (String highlightValue : highlightOriginalValue) { - String[] splitted = highlightValue.split("###"); - resultHighlightOriginalValue.add(splitted); - } resultMap.put(key.substring(0, key.lastIndexOf("_hl")), highlightedFields.get(key)); } @@ -951,7 +1048,7 @@ public class SolrServiceImpl implements SearchService, IndexingService { // If any stale entries are found in the current page of results, // we remove those stale entries and rerun the same query again. // Otherwise, the query is valid and the results are returned. - if (zombieDocs.size() != 0) { + if (!zombieDocs.isEmpty()) { log.info("Cleaning " + zombieDocs.size() + " stale objects from Discovery Index"); log.info("ZombieDocs "); zombieDocs.forEach(log::info); @@ -1174,7 +1271,7 @@ public class SolrServiceImpl implements SearchService, IndexingService { //DO NOT ESCAPE RANGE QUERIES ! if (!value.matches("\\[.*TO.*\\]")) { value = ClientUtils.escapeQueryChars(value); - filterQuery.append("(").append(value).append(")"); + filterQuery.append("\"").append(value).append("\""); } else { filterQuery.append(value); } @@ -1389,6 +1486,28 @@ public class SolrServiceImpl implements SearchService, IndexingService { } } + @Override + public void indexContent(Context context, IndexableObject indexableObject, boolean force, + boolean commit, boolean preDb) throws SearchServiceException, SQLException { + if (preDb) { + try { + final IndexFactory indexableObjectFactory = indexObjectServiceFactory. + getIndexableObjectFactory(indexableObject); + if (force || requiresIndexing(indexableObject.getUniqueIndexID(), indexableObject.getLastModified())) { + update(context, indexableObjectFactory, indexableObject, true); + log.info(LogHelper.getHeader(context, "indexed_object", indexableObject.getUniqueIndexID())); + } + } catch (IOException | SQLException | SolrServerException | SearchServiceException e) { + log.error(e.getMessage(), e); + } + } else { + indexContent(context, indexableObject, force); + } + if (commit) { + commit(); + } + } + @Override public void commit() throws SearchServiceException { try { @@ -1442,4 +1561,5 @@ public class SolrServiceImpl implements SearchService, IndexingService { } return null; } + } diff --git a/dspace-api/src/main/java/org/dspace/discovery/indexobject/IndexFactoryImpl.java b/dspace-api/src/main/java/org/dspace/discovery/indexobject/IndexFactoryImpl.java index 8660bbebc7..55c99b168e 100644 --- a/dspace-api/src/main/java/org/dspace/discovery/indexobject/IndexFactoryImpl.java +++ b/dspace-api/src/main/java/org/dspace/discovery/indexobject/IndexFactoryImpl.java @@ -70,10 +70,20 @@ public abstract class IndexFactoryImpl implements return doc; } + @Override + public SolrInputDocument buildNewDocument(Context context, T indexableObject) throws SQLException, IOException { + return buildDocument(context, indexableObject); + } + @Override public void writeDocument(Context context, T indexableObject, SolrInputDocument solrInputDocument) throws SQLException, IOException, SolrServerException { - writeDocument(solrInputDocument, null); + try { + writeDocument(solrInputDocument, null); + } catch (Exception e) { + log.error("Error occurred while writing SOLR document for {} object {}", + indexableObject.getType(), indexableObject.getID(), e); + } } /** @@ -95,7 +105,6 @@ public abstract class IndexFactoryImpl implements 100000); // Use Tika's Text parser as the streams are always from the TEXT bundle (i.e. already extracted text) - // TODO: We may wish to consider using Tika to extract the text in the future. TextAndCSVParser tikaParser = new TextAndCSVParser(); BodyContentHandler tikaHandler = new BodyContentHandler(charLimit); Metadata tikaMetadata = new Metadata(); @@ -114,9 +123,11 @@ public abstract class IndexFactoryImpl implements log.info("Full text is larger than the configured limit (discovery.solr.fulltext.charLimit)." + " Only the first {} characters were indexed.", charLimit); } else { + log.error("Tika parsing error. Could not index full text.", saxe); throw new IOException("Tika parsing error. Could not index full text.", saxe); } } catch (TikaException ex) { + log.error("Tika parsing error. Could not index full text.", ex); throw new IOException("Tika parsing error. Could not index full text.", ex); } diff --git a/dspace-api/src/main/java/org/dspace/discovery/indexobject/ItemIndexFactoryImpl.java b/dspace-api/src/main/java/org/dspace/discovery/indexobject/ItemIndexFactoryImpl.java index e9f18ae949..15a74b45d1 100644 --- a/dspace-api/src/main/java/org/dspace/discovery/indexobject/ItemIndexFactoryImpl.java +++ b/dspace-api/src/main/java/org/dspace/discovery/indexobject/ItemIndexFactoryImpl.java @@ -10,7 +10,6 @@ package org.dspace.discovery.indexobject; import java.io.IOException; import java.sql.SQLException; import java.util.ArrayList; -import java.util.Arrays; import java.util.Date; import java.util.HashMap; import java.util.HashSet; @@ -43,7 +42,6 @@ import org.dspace.content.service.WorkspaceItemService; import org.dspace.core.Context; import org.dspace.core.LogHelper; import org.dspace.discovery.FullTextContentStreams; -import org.dspace.discovery.IndexableObject; import org.dspace.discovery.SearchUtils; import org.dspace.discovery.configuration.DiscoveryConfiguration; import org.dspace.discovery.configuration.DiscoveryConfigurationParameters; @@ -64,6 +62,9 @@ import org.dspace.handle.service.HandleService; import org.dspace.services.factory.DSpaceServicesFactory; import org.dspace.util.MultiFormatDateParser; import org.dspace.util.SolrUtils; +import org.dspace.versioning.Version; +import org.dspace.versioning.VersionHistory; +import org.dspace.versioning.service.VersionHistoryService; import org.dspace.xmlworkflow.storedcomponents.XmlWorkflowItem; import org.dspace.xmlworkflow.storedcomponents.service.XmlWorkflowItemService; import org.springframework.beans.factory.annotation.Autowired; @@ -78,6 +79,8 @@ public class ItemIndexFactoryImpl extends DSpaceObjectIndexFactoryImpl findAll(Context context) throws SQLException { - Iterator items = itemService.findAllUnfiltered(context); + Iterator items = itemService.findAllRegularItems(context); return new Iterator() { @Override public boolean hasNext() { @@ -139,6 +144,7 @@ public class ItemIndexFactoryImpl extends DSpaceObjectIndexFactoryImpl discoveryConfigurations) @@ -713,26 +764,31 @@ public class ItemIndexFactoryImpl extends DSpaceObjectIndexFactoryImpl results = new ArrayList<>(); - if (object.isArchived() || object.isWithdrawn()) { - // We only want to index an item as an item if it is not in workflow - results.addAll(Arrays.asList(new IndexableItem(object))); - } else { - // Check if we have a workflow / workspace item - final WorkspaceItem workspaceItem = workspaceItemService.findByItem(context, object); - if (workspaceItem != null) { - results.addAll(workspaceItemIndexFactory.getIndexableObjects(context, workspaceItem)); - } else { - // Check if we a workflow item - final XmlWorkflowItem xmlWorkflowItem = xmlWorkflowItemService.findByItem(context, object); - if (xmlWorkflowItem != null) { - results.addAll(workflowItemIndexFactory.getIndexableObjects(context, xmlWorkflowItem)); - } - } + public List getIndexableObjects(Context context, Item item) throws SQLException { + if (item.isArchived() || item.isWithdrawn()) { + // we only want to index an item as an item if it is not in workflow + return List.of(new IndexableItem(item)); } - return results; + final WorkspaceItem workspaceItem = workspaceItemService.findByItem(context, item); + if (workspaceItem != null) { + // a workspace item is linked to the given item + return List.copyOf(workspaceItemIndexFactory.getIndexableObjects(context, workspaceItem)); + } + + final XmlWorkflowItem xmlWorkflowItem = xmlWorkflowItemService.findByItem(context, item); + if (xmlWorkflowItem != null) { + // a workflow item is linked to the given item + return List.copyOf(workflowItemIndexFactory.getIndexableObjects(context, xmlWorkflowItem)); + } + + if (!isLatestVersion(context, item)) { + // the given item is an older version of another item + return List.of(new IndexableItem(item)); + } + + // nothing to index + return List.of(); } @Override diff --git a/dspace-api/src/main/java/org/dspace/discovery/indexobject/factory/IndexFactory.java b/dspace-api/src/main/java/org/dspace/discovery/indexobject/factory/IndexFactory.java index 6644da248d..7946311796 100644 --- a/dspace-api/src/main/java/org/dspace/discovery/indexobject/factory/IndexFactory.java +++ b/dspace-api/src/main/java/org/dspace/discovery/indexobject/factory/IndexFactory.java @@ -46,6 +46,14 @@ public interface IndexFactory { */ SolrInputDocument buildDocument(Context context, T indexableObject) throws SQLException, IOException; + /** + * Create solr document with all the shared fields initialized. + * Can contain special fields required for "new" documents vs regular buildDocument + * @param indexableObject the indexableObject that we want to index + * @return initialized solr document + */ + SolrInputDocument buildNewDocument(Context context, T indexableObject) throws SQLException, IOException; + /** * Write the provided document to the solr core * @param context DSpace context object diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/utils/DiscoverQueryBuilder.java b/dspace-api/src/main/java/org/dspace/discovery/utils/DiscoverQueryBuilder.java similarity index 58% rename from dspace-server-webapp/src/main/java/org/dspace/app/rest/utils/DiscoverQueryBuilder.java rename to dspace-api/src/main/java/org/dspace/discovery/utils/DiscoverQueryBuilder.java index add7cb45ed..fa5cc32813 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/utils/DiscoverQueryBuilder.java +++ b/dspace-api/src/main/java/org/dspace/discovery/utils/DiscoverQueryBuilder.java @@ -5,7 +5,7 @@ * * http://www.dspace.org/license/ */ -package org.dspace.app.rest.utils; +package org.dspace.discovery.utils; import static java.util.Collections.emptyList; import static java.util.Collections.singletonList; @@ -19,10 +19,6 @@ import java.util.Objects; import org.apache.commons.collections4.CollectionUtils; import org.apache.commons.lang3.StringUtils; import org.apache.logging.log4j.Logger; -import org.dspace.app.rest.converter.query.SearchQueryConverter; -import org.dspace.app.rest.exception.DSpaceBadRequestException; -import org.dspace.app.rest.exception.InvalidSearchRequestException; -import org.dspace.app.rest.parameter.SearchFilter; import org.dspace.core.Context; import org.dspace.core.LogHelper; import org.dspace.discovery.DiscoverFacetField; @@ -32,6 +28,7 @@ import org.dspace.discovery.DiscoverQuery; import org.dspace.discovery.FacetYearRange; import org.dspace.discovery.IndexableObject; import org.dspace.discovery.SearchService; +import org.dspace.discovery.SearchServiceException; import org.dspace.discovery.configuration.DiscoveryConfiguration; import org.dspace.discovery.configuration.DiscoveryConfigurationParameters; import org.dspace.discovery.configuration.DiscoveryHitHighlightFieldConfiguration; @@ -40,17 +37,11 @@ import org.dspace.discovery.configuration.DiscoverySearchFilterFacet; import org.dspace.discovery.configuration.DiscoverySortConfiguration; import org.dspace.discovery.configuration.DiscoverySortFieldConfiguration; import org.dspace.discovery.indexobject.factory.IndexFactory; +import org.dspace.discovery.utils.parameter.QueryBuilderSearchFilter; import org.dspace.services.ConfigurationService; import org.springframework.beans.factory.InitializingBean; import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.data.domain.Pageable; -import org.springframework.data.domain.Sort; -import org.springframework.stereotype.Component; -/** - * This class builds the queries for the /search and /facet endpoints. - */ -@Component public class DiscoverQueryBuilder implements InitializingBean { private static final Logger log = org.apache.logging.log4j.LogManager.getLogger(DiscoverQueryBuilder.class); @@ -74,51 +65,60 @@ public class DiscoverQueryBuilder implements InitializingBean { /** * Build a discovery query * - * @param context the DSpace context - * @param scope the scope for this discovery query - * @param discoveryConfiguration the discovery configuration for this discovery query - * @param query the query string for this discovery query - * @param searchFilters the search filters for this discovery query - * @param dsoType only include search results with this type - * @param page the pageable for this discovery query + * @param context the DSpace context + * @param scope the scope for this discovery query + * @param discoveryConfiguration the discovery configuration for this discovery query + * @param query the query string for this discovery query + * @param searchFilters the search filters for this discovery query + * @param dsoType only include search results with this type + * @param pageSize the page size for this discovery query + * @param offset the offset for this discovery query + * @param sortProperty the sort property for this discovery query + * @param sortDirection the sort direction for this discovery query */ public DiscoverQuery buildQuery(Context context, IndexableObject scope, DiscoveryConfiguration discoveryConfiguration, - String query, List searchFilters, - String dsoType, Pageable page) - throws DSpaceBadRequestException { + String query, List searchFilters, + String dsoType, Integer pageSize, Long offset, String sortProperty, + String sortDirection) throws SearchServiceException { List dsoTypes = dsoType != null ? singletonList(dsoType) : emptyList(); - return buildQuery(context, scope, discoveryConfiguration, query, searchFilters, dsoTypes, page); + return buildQuery(context, scope, discoveryConfiguration, query, searchFilters, dsoTypes, pageSize, offset, + sortProperty, sortDirection); } + /** * Build a discovery query * - * @param context the DSpace context - * @param scope the scope for this discovery query - * @param discoveryConfiguration the discovery configuration for this discovery query - * @param query the query string for this discovery query - * @param searchFilters the search filters for this discovery query - * @param dsoTypes only include search results with one of these types - * @param page the pageable for this discovery query + * @param context the DSpace context + * @param scope the scope for this discovery query + * @param discoveryConfiguration the discovery configuration for this discovery query + * @param query the query string for this discovery query + * @param searchFilters the search filters for this discovery query + * @param dsoTypes only include search results with one of these types + * @param pageSize the page size for this discovery query + * @param offset the offset for this discovery query + * @param sortProperty the sort property for this discovery query + * @param sortDirection the sort direction for this discovery query */ public DiscoverQuery buildQuery(Context context, IndexableObject scope, DiscoveryConfiguration discoveryConfiguration, - String query, List searchFilters, - List dsoTypes, Pageable page) - throws DSpaceBadRequestException { + String query, List searchFilters, + List dsoTypes, Integer pageSize, Long offset, String sortProperty, + String sortDirection) + throws IllegalArgumentException, SearchServiceException { DiscoverQuery queryArgs = buildCommonDiscoverQuery(context, discoveryConfiguration, query, searchFilters, - dsoTypes); + dsoTypes); //When all search criteria are set, configure facet results addFaceting(context, scope, queryArgs, discoveryConfiguration); //Configure pagination and sorting - configurePagination(page, queryArgs); - configureSorting(page, queryArgs, discoveryConfiguration.getSearchSortConfiguration()); + configurePagination(pageSize, offset, queryArgs); + configureSorting(sortProperty, sortDirection, queryArgs, discoveryConfiguration.getSearchSortConfiguration()); addDiscoveryHitHighlightFields(discoveryConfiguration, queryArgs); return queryArgs; @@ -128,11 +128,11 @@ public class DiscoverQueryBuilder implements InitializingBean { DiscoverQuery queryArgs) { if (discoveryConfiguration.getHitHighlightingConfiguration() != null) { List metadataFields = discoveryConfiguration - .getHitHighlightingConfiguration().getMetadataFields(); + .getHitHighlightingConfiguration().getMetadataFields(); for (DiscoveryHitHighlightFieldConfiguration fieldConfiguration : metadataFields) { queryArgs.addHitHighlightingField( - new DiscoverHitHighlightingField(fieldConfiguration.getField(), fieldConfiguration.getMaxSize(), - fieldConfiguration.getSnippets())); + new DiscoverHitHighlightingField(fieldConfiguration.getField(), fieldConfiguration.getMaxSize(), + fieldConfiguration.getSnippets())); } } } @@ -140,92 +140,97 @@ public class DiscoverQueryBuilder implements InitializingBean { /** * Create a discovery facet query. * - * @param context the DSpace context - * @param scope the scope for this discovery query - * @param discoveryConfiguration the discovery configuration for this discovery query - * @param prefix limit the facets results to those starting with the given prefix. - * @param query the query string for this discovery query - * @param searchFilters the search filters for this discovery query - * @param dsoType only include search results with this type - * @param page the pageable for this discovery query - * @param facetName the facet field + * @param context the DSpace context + * @param scope the scope for this discovery query + * @param discoveryConfiguration the discovery configuration for this discovery query + * @param prefix limit the facets results to those starting with the given prefix. + * @param query the query string for this discovery query + * @param searchFilters the search filters for this discovery query + * @param dsoType only include search results with this type + * @param pageSize the page size for this discovery query + * @param offset the offset for this discovery query + * @param facetName the facet field */ public DiscoverQuery buildFacetQuery(Context context, IndexableObject scope, DiscoveryConfiguration discoveryConfiguration, - String prefix, String query, List searchFilters, - String dsoType, Pageable page, String facetName) - throws DSpaceBadRequestException { + String prefix, String query, List searchFilters, + String dsoType, Integer pageSize, Long offset, String facetName) + throws IllegalArgumentException { List dsoTypes = dsoType != null ? singletonList(dsoType) : emptyList(); return buildFacetQuery( - context, scope, discoveryConfiguration, prefix, query, searchFilters, dsoTypes, page, facetName); + context, scope, discoveryConfiguration, prefix, query, searchFilters, dsoTypes, pageSize, offset, + facetName); } /** * Create a discovery facet query. * - * @param context the DSpace context - * @param scope the scope for this discovery query - * @param discoveryConfiguration the discovery configuration for this discovery query - * @param prefix limit the facets results to those starting with the given prefix. - * @param query the query string for this discovery query - * @param searchFilters the search filters for this discovery query - * @param dsoTypes only include search results with one of these types - * @param page the pageable for this discovery query - * @param facetName the facet field + * @param context the DSpace context + * @param scope the scope for this discovery query + * @param discoveryConfiguration the discovery configuration for this discovery query + * @param prefix limit the facets results to those starting with the given prefix. + * @param query the query string for this discovery query + * @param searchFilters the search filters for this discovery query + * @param dsoTypes only include search results with one of these types + * @param pageSize the page size for this discovery query + * @param offset the offset for this discovery query + * @param facetName the facet field */ public DiscoverQuery buildFacetQuery(Context context, IndexableObject scope, DiscoveryConfiguration discoveryConfiguration, - String prefix, String query, List searchFilters, - List dsoTypes, Pageable page, String facetName) - throws DSpaceBadRequestException { + String prefix, String query, List searchFilters, + List dsoTypes, Integer pageSize, Long offset, String facetName) + throws IllegalArgumentException { DiscoverQuery queryArgs = buildCommonDiscoverQuery(context, discoveryConfiguration, query, searchFilters, dsoTypes); //When all search criteria are set, configure facet results - addFacetingForFacets(context, scope, prefix, queryArgs, discoveryConfiguration, facetName, page); + addFacetingForFacets(context, scope, prefix, queryArgs, discoveryConfiguration, facetName, pageSize); //We don' want any search results, we only want facet values queryArgs.setMaxResults(0); //Configure pagination - configurePaginationForFacets(page, queryArgs); + configurePaginationForFacets(offset, queryArgs); return queryArgs; } - private void configurePaginationForFacets(Pageable page, DiscoverQuery queryArgs) { - if (page != null) { - queryArgs.setFacetOffset(Math.toIntExact(page.getOffset())); + private void configurePaginationForFacets(Long offset, DiscoverQuery queryArgs) { + if (offset != null) { + queryArgs.setFacetOffset(Math.toIntExact(offset)); } } private DiscoverQuery addFacetingForFacets(Context context, IndexableObject scope, String prefix, - DiscoverQuery queryArgs, DiscoveryConfiguration discoveryConfiguration, String facetName, Pageable page) - throws DSpaceBadRequestException { + DiscoverQuery queryArgs, DiscoveryConfiguration discoveryConfiguration, + String facetName, Integer pageSize) + throws IllegalArgumentException { DiscoverySearchFilterFacet facet = discoveryConfiguration.getSidebarFacet(facetName); if (facet != null) { queryArgs.setFacetMinCount(1); - int pageSize = Math.min(pageSizeLimit, page.getPageSize()); + + pageSize = pageSize != null ? Math.min(pageSizeLimit, pageSize) : pageSizeLimit; fillFacetIntoQueryArgs(context, scope, prefix, queryArgs, facet, pageSize); } else { - throw new DSpaceBadRequestException(facetName + " is not a valid search facet"); + throw new IllegalArgumentException(facetName + " is not a valid search facet"); } return queryArgs; } private void fillFacetIntoQueryArgs(Context context, IndexableObject scope, String prefix, - DiscoverQuery queryArgs, DiscoverySearchFilterFacet facet, final int pageSize) { + DiscoverQuery queryArgs, DiscoverySearchFilterFacet facet, final int pageSize) { if (facet.getType().equals(DiscoveryConfigurationParameters.TYPE_DATE)) { try { FacetYearRange facetYearRange = - searchService.getFacetYearRange(context, scope, facet, queryArgs.getFilterQueries(), queryArgs); + searchService.getFacetYearRange(context, scope, facet, queryArgs.getFilterQueries(), queryArgs); queryArgs.addYearRangeFacet(facet, facetYearRange); @@ -240,19 +245,20 @@ public class DiscoverQueryBuilder implements InitializingBean { // "show more" url int facetLimit = pageSize + 1; //This should take care of the sorting for us + prefix = StringUtils.isNotBlank(prefix) ? prefix.toLowerCase() : null; queryArgs.addFacetField(new DiscoverFacetField(facet.getIndexFieldName(), facet.getType(), facetLimit, - facet.getSortOrderSidebar(), StringUtils.trimToNull(prefix))); + facet.getSortOrderSidebar(), + StringUtils.trimToNull(prefix))); } } private DiscoverQuery buildCommonDiscoverQuery(Context context, DiscoveryConfiguration discoveryConfiguration, String query, - List searchFilters, List dsoTypes) - throws DSpaceBadRequestException { + List searchFilters, List dsoTypes) + throws IllegalArgumentException { DiscoverQuery queryArgs = buildBaseQueryForConfiguration(discoveryConfiguration); - //Add search filters - queryArgs.addFilterQueries(convertFilters(context, discoveryConfiguration, searchFilters)); + queryArgs.addFilterQueries(convertFiltersToString(context, discoveryConfiguration, searchFilters)); //Set search query if (StringUtils.isNotBlank(query)) { @@ -274,30 +280,17 @@ public class DiscoverQueryBuilder implements InitializingBean { queryArgs.setDiscoveryConfigurationName(discoveryConfiguration.getId()); queryArgs.addFilterQueries(discoveryConfiguration.getDefaultFilterQueries() .toArray( - new String[discoveryConfiguration.getDefaultFilterQueries() - .size()])); + new String[discoveryConfiguration + .getDefaultFilterQueries() + .size()])); return queryArgs; } - private void configureSorting(Pageable page, DiscoverQuery queryArgs, - DiscoverySortConfiguration searchSortConfiguration) throws DSpaceBadRequestException { - String sortBy = null; - String sortOrder = null; - - //Read the Pageable object if there is one - if (page != null) { - Sort sort = page.getSort(); - if (sort != null && sort.iterator().hasNext()) { - Sort.Order order = sort.iterator().next(); - sortBy = order.getProperty(); - sortOrder = order.getDirection().name(); - } - } - - if (StringUtils.isNotBlank(sortBy) && !isConfigured(sortBy, searchSortConfiguration)) { - throw new InvalidSearchRequestException( - "The field: " + sortBy + "is not configured for the configuration!"); - } + private void configureSorting(String sortProperty, String sortDirection, DiscoverQuery queryArgs, + DiscoverySortConfiguration searchSortConfiguration) + throws IllegalArgumentException, SearchServiceException { + String sortBy = sortProperty; + String sortOrder = sortDirection; //Load defaults if we did not receive values if (sortBy == null) { @@ -307,24 +300,30 @@ public class DiscoverQueryBuilder implements InitializingBean { sortOrder = getDefaultSortDirection(searchSortConfiguration, sortOrder); } + if (StringUtils.isNotBlank(sortBy) && !isConfigured(sortBy, searchSortConfiguration)) { + throw new SearchServiceException( + "The field: " + sortBy + "is not configured for the configuration!"); + } + + //Update Discovery query DiscoverySortFieldConfiguration sortFieldConfiguration = searchSortConfiguration - .getSortFieldConfiguration(sortBy); + .getSortFieldConfiguration(sortBy); if (sortFieldConfiguration != null) { String sortField = searchService - .toSortFieldIndex(sortFieldConfiguration.getMetadataField(), sortFieldConfiguration.getType()); + .toSortFieldIndex(sortFieldConfiguration.getMetadataField(), sortFieldConfiguration.getType()); if ("asc".equalsIgnoreCase(sortOrder)) { queryArgs.setSortField(sortField, DiscoverQuery.SORT_ORDER.asc); } else if ("desc".equalsIgnoreCase(sortOrder)) { queryArgs.setSortField(sortField, DiscoverQuery.SORT_ORDER.desc); } else { - throw new DSpaceBadRequestException(sortOrder + " is not a valid sort order"); + throw new IllegalArgumentException(sortOrder + " is not a valid sort order"); } } else { - throw new DSpaceBadRequestException(sortBy + " is not a valid sort field"); + throw new IllegalArgumentException(sortBy + " is not a valid sort field"); } } @@ -334,7 +333,7 @@ public class DiscoverQueryBuilder implements InitializingBean { private String getDefaultSortDirection(DiscoverySortConfiguration searchSortConfiguration, String sortOrder) { if (Objects.nonNull(searchSortConfiguration.getSortFields()) && - !searchSortConfiguration.getSortFields().isEmpty()) { + !searchSortConfiguration.getSortFields().isEmpty()) { sortOrder = searchSortConfiguration.getSortFields().get(0).getDefaultSortOrder().name(); } return sortOrder; @@ -344,7 +343,7 @@ public class DiscoverQueryBuilder implements InitializingBean { String sortBy;// Attempt to find the default one, if none found we use SCORE sortBy = "score"; if (Objects.nonNull(searchSortConfiguration.getSortFields()) && - !searchSortConfiguration.getSortFields().isEmpty()) { + !searchSortConfiguration.getSortFields().isEmpty()) { DiscoverySortFieldConfiguration defaultSort = searchSortConfiguration.getSortFields().get(0); if (StringUtils.isBlank(defaultSort.getMetadataField())) { return sortBy; @@ -354,66 +353,31 @@ public class DiscoverQueryBuilder implements InitializingBean { return sortBy; } - private void configurePagination(Pageable page, DiscoverQuery queryArgs) { - if (page != null) { - queryArgs.setMaxResults(Math.min(pageSizeLimit, page.getPageSize())); - queryArgs.setStart(Math.toIntExact(page.getOffset())); - } else { - queryArgs.setMaxResults(pageSizeLimit); - queryArgs.setStart(0); - } + private void configurePagination(Integer size, Long offset, DiscoverQuery queryArgs) { + queryArgs.setMaxResults(size != null ? Math.min(pageSizeLimit, size) : pageSizeLimit); + queryArgs.setStart(offset != null ? Math.toIntExact(offset) : 0); } - private String getDsoType(String dsoType) throws DSpaceBadRequestException { + private String getDsoType(String dsoType) throws IllegalArgumentException { for (IndexFactory indexFactory : indexableFactories) { if (StringUtils.equalsIgnoreCase(indexFactory.getType(), dsoType)) { return indexFactory.getType(); } } - throw new DSpaceBadRequestException(dsoType + " is not a valid DSpace Object type"); + throw new IllegalArgumentException(dsoType + " is not a valid DSpace Object type"); } public void setIndexableFactories(List indexableFactories) { this.indexableFactories = indexableFactories; } - private String[] convertFilters(Context context, DiscoveryConfiguration discoveryConfiguration, - List searchFilters) throws DSpaceBadRequestException { - ArrayList filterQueries = new ArrayList<>(CollectionUtils.size(searchFilters)); - - SearchQueryConverter searchQueryConverter = new SearchQueryConverter(); - List transformedFilters = searchQueryConverter.convert(searchFilters); - try { - for (SearchFilter searchFilter : CollectionUtils.emptyIfNull(transformedFilters)) { - DiscoverySearchFilter filter = discoveryConfiguration.getSearchFilter(searchFilter.getName()); - if (filter == null) { - throw new DSpaceBadRequestException(searchFilter.getName() + " is not a valid search filter"); - } - - DiscoverFilterQuery filterQuery = searchService.toFilterQuery(context, - filter.getIndexFieldName(), - searchFilter.getOperator(), - searchFilter.getValue(), - discoveryConfiguration); - - if (filterQuery != null) { - filterQueries.add(filterQuery.getFilterQuery()); - } - } - } catch (SQLException e) { - throw new DSpaceBadRequestException("There was a problem parsing the search filters.", e); - } - - return filterQueries.toArray(new String[filterQueries.size()]); - } - private DiscoverQuery addFaceting(Context context, IndexableObject scope, DiscoverQuery queryArgs, DiscoveryConfiguration discoveryConfiguration) { List facets = discoveryConfiguration.getSidebarFacets(); log.debug("facets for configuration " + discoveryConfiguration.getId() + ": " + (facets != null ? facets - .size() : null)); + .size() : null)); if (facets != null) { queryArgs.setFacetMinCount(1); @@ -427,4 +391,34 @@ public class DiscoverQueryBuilder implements InitializingBean { return queryArgs; } + private String[] convertFiltersToString(Context context, DiscoveryConfiguration discoveryConfiguration, + List searchFilters) + throws IllegalArgumentException { + ArrayList filterQueries = new ArrayList<>(CollectionUtils.size(searchFilters)); + + try { + for (QueryBuilderSearchFilter searchFilter : CollectionUtils.emptyIfNull(searchFilters)) { + DiscoverySearchFilter filter = discoveryConfiguration.getSearchFilter(searchFilter.getName()); + if (filter == null) { + throw new IllegalArgumentException(searchFilter.getName() + " is not a valid search filter"); + } + + DiscoverFilterQuery filterQuery = searchService.toFilterQuery(context, + filter.getIndexFieldName(), + searchFilter.getOperator(), + searchFilter.getValue(), + discoveryConfiguration); + + if (filterQuery != null) { + filterQueries.add(filterQuery.getFilterQuery()); + } + } + } catch (SQLException e) { + throw new IllegalArgumentException("There was a problem parsing the search filters.", e); + } + + return filterQueries.toArray(new String[filterQueries.size()]); + } + + } diff --git a/dspace-api/src/main/java/org/dspace/discovery/utils/parameter/QueryBuilderSearchFilter.java b/dspace-api/src/main/java/org/dspace/discovery/utils/parameter/QueryBuilderSearchFilter.java new file mode 100644 index 0000000000..f1d16070de --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/discovery/utils/parameter/QueryBuilderSearchFilter.java @@ -0,0 +1,70 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.discovery.utils.parameter; + +import java.util.Objects; + +import org.apache.commons.lang3.StringUtils; + +/** + * Representation for a Discovery search filter + */ +public class QueryBuilderSearchFilter { + + private String name; + private String operator; + private String value; + + public QueryBuilderSearchFilter(final String name, final String operator, final String value) { + this.name = name; + this.operator = operator; + this.value = value; + } + + public String getName() { + return name; + } + + public String getOperator() { + return operator; + } + + public String getValue() { + return value; + } + + public String toString() { + return "QueryBuilderSearchFilter{" + + "name='" + name + '\'' + + ", operator='" + operator + '\'' + + ", value='" + value + '\'' + + '}'; + } + + public boolean equals(Object object) { + if (object instanceof QueryBuilderSearchFilter) { + QueryBuilderSearchFilter obj = (QueryBuilderSearchFilter) object; + + if (!StringUtils.equals(obj.getName(), getName())) { + return false; + } + if (!StringUtils.equals(obj.getOperator(), getOperator())) { + return false; + } + if (!StringUtils.equals(obj.getValue(), getValue())) { + return false; + } + return true; + } + return false; + } + + public int hashCode() { + return Objects.hash(name, operator, value); + } +} diff --git a/dspace-api/src/main/java/org/dspace/disseminate/CitationDocumentServiceImpl.java b/dspace-api/src/main/java/org/dspace/disseminate/CitationDocumentServiceImpl.java index d51a3dfc7f..c20961db75 100644 --- a/dspace-api/src/main/java/org/dspace/disseminate/CitationDocumentServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/disseminate/CitationDocumentServiceImpl.java @@ -8,7 +8,6 @@ package org.dspace.disseminate; import java.awt.Color; -import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.File; import java.io.IOException; @@ -297,7 +296,7 @@ public class CitationDocumentServiceImpl implements CitationDocumentService, Ini } @Override - public Pair makeCitedDocument(Context context, Bitstream bitstream) + public Pair makeCitedDocument(Context context, Bitstream bitstream) throws IOException, SQLException, AuthorizeException { PDDocument document = new PDDocument(); PDDocument sourceDocument = new PDDocument(); @@ -318,7 +317,7 @@ public class CitationDocumentServiceImpl implements CitationDocumentService, Ini document.save(out); byte[] data = out.toByteArray(); - return Pair.of(new ByteArrayInputStream(data), Long.valueOf(data.length)); + return Pair.of(data, Long.valueOf(data.length)); } } finally { diff --git a/dspace-api/src/main/java/org/dspace/disseminate/service/CitationDocumentService.java b/dspace-api/src/main/java/org/dspace/disseminate/service/CitationDocumentService.java index 4a59de3f5f..0566fc525c 100644 --- a/dspace-api/src/main/java/org/dspace/disseminate/service/CitationDocumentService.java +++ b/dspace-api/src/main/java/org/dspace/disseminate/service/CitationDocumentService.java @@ -8,7 +8,6 @@ package org.dspace.disseminate.service; import java.io.IOException; -import java.io.InputStream; import java.sql.SQLException; import org.apache.commons.lang3.tuple.Pair; @@ -84,7 +83,7 @@ public interface CitationDocumentService { * @throws SQLException if database error * @throws AuthorizeException if authorization error */ - public Pair makeCitedDocument(Context context, Bitstream bitstream) + public Pair makeCitedDocument(Context context, Bitstream bitstream) throws IOException, SQLException, AuthorizeException; /** diff --git a/dspace-api/src/main/java/org/dspace/eperson/AccountServiceImpl.java b/dspace-api/src/main/java/org/dspace/eperson/AccountServiceImpl.java index 25c61f511a..d873d7230c 100644 --- a/dspace-api/src/main/java/org/dspace/eperson/AccountServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/eperson/AccountServiceImpl.java @@ -12,7 +12,6 @@ import java.sql.SQLException; import java.util.Locale; import javax.mail.MessagingException; -import org.apache.commons.lang3.StringUtils; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.dspace.authorize.AuthorizeException; @@ -178,14 +177,6 @@ public class AccountServiceImpl implements AccountService { registrationDataService.deleteByToken(context, token); } - @Override - public boolean verifyPasswordStructure(String password) { - if (StringUtils.length(password) < 6) { - return false; - } - return true; - } - /** * THIS IS AN INTERNAL METHOD. THE SEND PARAMETER ALLOWS IT TO BE USED FOR * TESTING PURPOSES. diff --git a/dspace-api/src/main/java/org/dspace/eperson/CaptchaServiceImpl.java b/dspace-api/src/main/java/org/dspace/eperson/CaptchaServiceImpl.java new file mode 100644 index 0000000000..0ab66aea5c --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/eperson/CaptchaServiceImpl.java @@ -0,0 +1,125 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.eperson; + +import java.io.IOException; +import java.io.UnsupportedEncodingException; +import java.net.URI; +import java.util.ArrayList; +import java.util.List; +import java.util.Objects; +import java.util.regex.Pattern; +import javax.annotation.PostConstruct; + +import com.fasterxml.jackson.databind.ObjectMapper; +import org.apache.http.HttpResponse; +import org.apache.http.NameValuePair; +import org.apache.http.client.HttpClient; +import org.apache.http.client.entity.UrlEncodedFormEntity; +import org.apache.http.client.methods.HttpPost; +import org.apache.http.impl.client.HttpClientBuilder; +import org.apache.http.message.BasicNameValuePair; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.dspace.eperson.service.CaptchaService; +import org.dspace.services.ConfigurationService; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.util.StringUtils; + +/** + * Basic services implementation for the Captcha. + * + * @author Mykhaylo Boychuk (mykhaylo.boychuk@4science.com) + */ +public class CaptchaServiceImpl implements CaptchaService { + + private static final Logger log = LogManager.getLogger(CaptchaServiceImpl.class); + + private static Pattern RESPONSE_PATTERN = Pattern.compile("[A-Za-z0-9_-]+"); + + private CaptchaSettings captchaSettings; + + @Autowired + private ConfigurationService configurationService; + + @PostConstruct + public void init() { + captchaSettings = new CaptchaSettings(); + captchaSettings.setSite(configurationService.getProperty("google.recaptcha.key.site")); + captchaSettings.setSecret(configurationService.getProperty("google.recaptcha.key.secret")); + captchaSettings.setSiteVerify(configurationService.getProperty("google.recaptcha.site-verify")); + captchaSettings.setCaptchaVersion(configurationService.getProperty("google.recaptcha.version", "v2")); + captchaSettings.setThreshold(Float.parseFloat( + configurationService.getProperty("google.recaptcha.key.threshold", "0.5"))); + } + + @Override + public void processResponse(String response, String action) throws InvalidReCaptchaException { + + if (!responseSanityCheck(response)) { + throw new InvalidReCaptchaException("Response contains invalid characters"); + } + + URI verifyUri = URI.create(captchaSettings.getSiteVerify()); + + List params = new ArrayList(3); + params.add(new BasicNameValuePair("secret", captchaSettings.getSecret())); + params.add(new BasicNameValuePair("response", response)); + params.add(new BasicNameValuePair("remoteip", "")); + + HttpPost httpPost = new HttpPost(verifyUri); + try { + httpPost.addHeader("Accept", "application/json"); + httpPost.addHeader("Content-Type", "application/x-www-form-urlencoded"); + httpPost.setEntity(new UrlEncodedFormEntity(params, "UTF-8")); + } catch (UnsupportedEncodingException e) { + log.error(e.getMessage(), e); + throw new RuntimeException(e.getMessage(), e); + } + + HttpClient httpClient = HttpClientBuilder.create().build(); + HttpResponse httpResponse; + GoogleCaptchaResponse googleResponse; + final ObjectMapper objectMapper = new ObjectMapper(); + try { + httpResponse = httpClient.execute(httpPost); + googleResponse = objectMapper.readValue(httpResponse.getEntity().getContent(), GoogleCaptchaResponse.class); + } catch (IOException e) { + log.error(e.getMessage(), e); + throw new RuntimeException("Error during verify google recaptcha site", e); + } + validateGoogleResponse(googleResponse, action); + } + + private boolean responseSanityCheck(String response) { + return StringUtils.hasLength(response) && RESPONSE_PATTERN.matcher(response).matches(); + } + + private void validateGoogleResponse(GoogleCaptchaResponse googleResponse, String action) { + if (Objects.isNull(googleResponse)) { + log.error("Google reCaptcha response was empty. ReCaptcha could not be validated."); + throw new InvalidReCaptchaException("reCaptcha was not successfully validated"); + } + + if ("v2".equals(captchaSettings.getCaptchaVersion())) { + if (!googleResponse.isSuccess()) { + log.error("Google reCaptcha v2 returned an unsuccessful response. ReCaptcha was not validated."); + throw new InvalidReCaptchaException("reCaptcha was not successfully validated"); + } + } else { + if (!googleResponse.isSuccess() || !googleResponse.getAction().equals(action) + || googleResponse.getScore() < captchaSettings.getThreshold()) { + log.error("Google reCaptcha v3 returned an unsuccessful response with" + + " action {" + googleResponse.getAction() + "} and score {" + googleResponse.getScore() + "}." + + " ReCaptcha was not validated."); + throw new InvalidReCaptchaException("reCaptcha was not successfully validated"); + } + } + } + +} \ No newline at end of file diff --git a/dspace-api/src/main/java/org/dspace/eperson/CaptchaSettings.java b/dspace-api/src/main/java/org/dspace/eperson/CaptchaSettings.java new file mode 100644 index 0000000000..e1fe41f9a6 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/eperson/CaptchaSettings.java @@ -0,0 +1,62 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.eperson; + +/** + * This model class represent reCaptcha Google credentials + * + * @author Mohamed Eskander (mohamed.eskander at 4science dot it) + */ +public class CaptchaSettings { + + private String site; + private String secret; + private float threshold; + private String siteVerify; + private String captchaVersion; + + public String getSite() { + return site; + } + + public void setSite(String site) { + this.site = site; + } + + public String getSecret() { + return secret; + } + + public void setSecret(String secret) { + this.secret = secret; + } + + public float getThreshold() { + return threshold; + } + + public void setThreshold(float threshold) { + this.threshold = threshold; + } + + public String getSiteVerify() { + return siteVerify; + } + + public void setSiteVerify(String siteVerify) { + this.siteVerify = siteVerify; + } + + public String getCaptchaVersion() { + return captchaVersion; + } + + public void setCaptchaVersion(String captchaVersion) { + this.captchaVersion = captchaVersion; + } +} diff --git a/dspace-api/src/main/java/org/dspace/eperson/EPerson.java b/dspace-api/src/main/java/org/dspace/eperson/EPerson.java index def7697632..da83a1cafd 100644 --- a/dspace-api/src/main/java/org/dspace/eperson/EPerson.java +++ b/dspace-api/src/main/java/org/dspace/eperson/EPerson.java @@ -446,4 +446,8 @@ public class EPerson extends DSpaceObject implements DSpaceObjectLegacySupport { return previousActive; } + public boolean hasPasswordSet() { + return StringUtils.isNotBlank(getPassword()); + } + } diff --git a/dspace-api/src/main/java/org/dspace/eperson/EPersonConsumer.java b/dspace-api/src/main/java/org/dspace/eperson/EPersonConsumer.java index 5e81b8ee01..feefe65717 100644 --- a/dspace-api/src/main/java/org/dspace/eperson/EPersonConsumer.java +++ b/dspace-api/src/main/java/org/dspace/eperson/EPersonConsumer.java @@ -7,10 +7,12 @@ */ package org.dspace.eperson; +import java.io.IOException; import java.util.Date; import java.util.UUID; import javax.mail.MessagingException; +import org.apache.commons.lang3.StringUtils; import org.apache.logging.log4j.Logger; import org.dspace.core.Constants; import org.dspace.core.Context; @@ -30,16 +32,17 @@ import org.dspace.services.factory.DSpaceServicesFactory; * Recommended filter: EPerson+Create * * @author Stuart Lewis - * @version $Revision$ */ public class EPersonConsumer implements Consumer { /** * log4j logger */ - private static final Logger log = org.apache.logging.log4j.LogManager.getLogger(EPersonConsumer.class); + private static final Logger log + = org.apache.logging.log4j.LogManager.getLogger(EPersonConsumer.class); protected EPersonService ePersonService = EPersonServiceFactory.getInstance().getEPersonService(); + protected ConfigurationService configurationService = DSpaceServicesFactory.getInstance().getConfigurationService(); @@ -74,6 +77,7 @@ public class EPersonConsumer implements Consumer { if (et == Event.CREATE) { // Notify of new user registration String notifyRecipient = configurationService.getProperty("registration.notify"); + EPerson eperson = ePersonService.find(context, id); if (notifyRecipient == null) { notifyRecipient = ""; } @@ -81,7 +85,6 @@ public class EPersonConsumer implements Consumer { if (!notifyRecipient.equals("")) { try { - EPerson eperson = ePersonService.find(context, id); Email adminEmail = Email .getEmail(I18nUtil.getEmailFilename(context.getCurrentLocale(), "registration_notify")); adminEmail.addRecipient(notifyRecipient); @@ -103,6 +106,26 @@ public class EPersonConsumer implements Consumer { "error_emailing_administrator", ""), me); } } + + // If enabled, send a "welcome" message to the new EPerson. + if (configurationService.getBooleanProperty("mail.welcome.enabled", false)) { + String addressee = eperson.getEmail(); + if (StringUtils.isNotBlank(addressee)) { + log.debug("Sending welcome email to {}", addressee); + try { + Email message = Email.getEmail( + I18nUtil.getEmailFilename(context.getCurrentLocale(), "welcome")); + message.addRecipient(addressee); + message.send(); + } catch (IOException | MessagingException ex) { + log.warn("Welcome message not sent to {}: {}", + addressee, ex.getMessage()); + } + } else { + log.warn("Welcome message not sent to EPerson {} because it has no email address.", + eperson.getID().toString()); + } + } } else if (et == Event.DELETE) { // TODO: Implement this if required } diff --git a/dspace-api/src/main/java/org/dspace/eperson/EPersonServiceImpl.java b/dspace-api/src/main/java/org/dspace/eperson/EPersonServiceImpl.java index 004334e92d..61477995c7 100644 --- a/dspace-api/src/main/java/org/dspace/eperson/EPersonServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/eperson/EPersonServiceImpl.java @@ -7,6 +7,8 @@ */ package org.dspace.eperson; +import static org.dspace.content.Item.ANY; + import java.io.IOException; import java.sql.SQLException; import java.util.ArrayList; @@ -30,6 +32,7 @@ import org.dspace.authorize.service.ResourcePolicyService; import org.dspace.content.DSpaceObjectServiceImpl; import org.dspace.content.Item; import org.dspace.content.MetadataField; +import org.dspace.content.MetadataValue; import org.dspace.content.WorkspaceItem; import org.dspace.content.factory.ContentServiceFactory; import org.dspace.content.service.ItemService; @@ -43,6 +46,8 @@ import org.dspace.eperson.service.EPersonService; import org.dspace.eperson.service.GroupService; import org.dspace.eperson.service.SubscribeService; import org.dspace.event.Event; +import org.dspace.orcid.service.OrcidTokenService; +import org.dspace.util.UUIDUtils; import org.dspace.versioning.Version; import org.dspace.versioning.VersionHistory; import org.dspace.versioning.dao.VersionDAO; @@ -96,6 +101,8 @@ public class EPersonServiceImpl extends DSpaceObjectServiceImpl impleme protected VersionDAO versionDAO; @Autowired(required = true) protected ClaimedTaskService claimedTaskService; + @Autowired + protected OrcidTokenService orcidTokenService; protected EPersonServiceImpl() { super(); @@ -379,6 +386,8 @@ public class EPersonServiceImpl extends DSpaceObjectServiceImpl impleme group.getMembers().remove(ePerson); } + orcidTokenService.deleteByEPerson(context, ePerson); + // Remove any subscriptions subscribeService.deleteByEPerson(context, ePerson); @@ -569,4 +578,18 @@ public class EPersonServiceImpl extends DSpaceObjectServiceImpl impleme public int countTotal(Context context) throws SQLException { return ePersonDAO.countRows(context); } + + @Override + public EPerson findByProfileItem(Context context, Item profile) throws SQLException { + List owners = itemService.getMetadata(profile, "dspace", "object", "owner", ANY); + if (CollectionUtils.isEmpty(owners)) { + return null; + } + return find(context, UUIDUtils.fromString(owners.get(0).getAuthority())); + } + + @Override + public String getName(EPerson dso) { + return dso.getName(); + } } diff --git a/dspace-api/src/main/java/org/dspace/eperson/GoogleCaptchaResponse.java b/dspace-api/src/main/java/org/dspace/eperson/GoogleCaptchaResponse.java new file mode 100644 index 0000000000..30817f243c --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/eperson/GoogleCaptchaResponse.java @@ -0,0 +1,142 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.eperson; + +import java.util.HashMap; +import java.util.Map; + +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonIgnoreProperties; +import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonPropertyOrder; + +/** + * This model class represent the response for validation of reCaptcha token + * + * @author Mohamed Eskander (mohamed.eskander at 4science dot it) + */ +@JsonInclude(JsonInclude.Include.NON_NULL) +@JsonIgnoreProperties(ignoreUnknown = true) +@JsonPropertyOrder({ + "success", + "score", + "action", + "challenge_ts", + "hostname", + "error-codes" +}) +public class GoogleCaptchaResponse { + + @JsonProperty("success") + private boolean success; + + @JsonProperty("score") + private float score; + + @JsonProperty("action") + private String action; + + @JsonProperty("challenge_ts") + private String challengeTs; + + @JsonProperty("hostname") + private String hostname; + + @JsonProperty("error-codes") + private ErrorCode[] errorCodes; + + public boolean isSuccess() { + return success; + } + + public float getScore() { + return score; + } + + public void setScore(float score) { + this.score = score; + } + + public String getAction() { + return action; + } + + public void setAction(String action) { + this.action = action; + } + + public void setSuccess(boolean success) { + this.success = success; + } + + public String getChallengeTs() { + return challengeTs; + } + + public void setChallengeTs(String challengeTs) { + this.challengeTs = challengeTs; + } + + public String getHostname() { + return hostname; + } + + public void setHostname(String hostname) { + this.hostname = hostname; + } + + public ErrorCode[] getErrorCodes() { + return errorCodes; + } + + public void setErrorCodes(ErrorCode[] errorCodes) { + this.errorCodes = errorCodes; + } + + @JsonIgnore + public boolean hasClientError() { + ErrorCode[] errors = getErrorCodes(); + if (errors == null) { + return false; + } + for (ErrorCode error : errors) { + switch (error) { + case InvalidResponse: + case MissingResponse: + return true; + default: break; + } + } + return false; + } + + static enum ErrorCode { + + MissingSecret, + InvalidSecret, + MissingResponse, + InvalidResponse; + + private static Map errorsMap = new HashMap<>(4); + + static { + errorsMap.put("missing-input-secret", MissingSecret); + errorsMap.put("invalid-input-secret", InvalidSecret); + errorsMap.put("missing-input-response", MissingResponse); + errorsMap.put("invalid-input-response", InvalidResponse); + } + + @JsonCreator + public static ErrorCode forValue(String value) { + return errorsMap.get(value.toLowerCase()); + } + } + +} \ No newline at end of file diff --git a/dspace-api/src/main/java/org/dspace/eperson/Group.java b/dspace-api/src/main/java/org/dspace/eperson/Group.java index 09b5ce189b..b2d3964895 100644 --- a/dspace-api/src/main/java/org/dspace/eperson/Group.java +++ b/dspace-api/src/main/java/org/dspace/eperson/Group.java @@ -201,7 +201,7 @@ public class Group extends DSpaceObject implements DSpaceObjectLegacySupport { void setName(String name) throws SQLException { if (!StringUtils.equals(this.name, name) && !isPermanent()) { this.name = name; - groupsChanged = true; + setMetadataModified(); } } diff --git a/dspace-api/src/main/java/org/dspace/eperson/GroupServiceImpl.java b/dspace-api/src/main/java/org/dspace/eperson/GroupServiceImpl.java index be81cd9bd8..2b23ecfeef 100644 --- a/dspace-api/src/main/java/org/dspace/eperson/GroupServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/eperson/GroupServiceImpl.java @@ -22,6 +22,8 @@ import java.util.UUID; import org.apache.commons.collections4.CollectionUtils; import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.tuple.Pair; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; import org.dspace.authorize.AuthorizeConfiguration; import org.dspace.authorize.AuthorizeException; import org.dspace.authorize.ResourcePolicy; @@ -52,8 +54,6 @@ import org.dspace.xmlworkflow.storedcomponents.PoolTask; import org.dspace.xmlworkflow.storedcomponents.service.ClaimedTaskService; import org.dspace.xmlworkflow.storedcomponents.service.CollectionRoleService; import org.dspace.xmlworkflow.storedcomponents.service.PoolTaskService; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; /** @@ -64,7 +64,7 @@ import org.springframework.beans.factory.annotation.Autowired; * @author kevinvandevelde at atmire.com */ public class GroupServiceImpl extends DSpaceObjectServiceImpl implements GroupService { - private static final Logger log = LoggerFactory.getLogger(GroupServiceImpl.class); + private static final Logger log = LogManager.getLogger(); @Autowired(required = true) protected GroupDAO groupDAO; @@ -473,7 +473,7 @@ public class GroupServiceImpl extends DSpaceObjectServiceImpl implements @Override public void delete(Context context, Group group) throws SQLException { if (group.isPermanent()) { - log.error("Attempt to delete permanent Group $", group.getName()); + log.error("Attempt to delete permanent Group {}", group::getName); throw new SQLException("Attempt to delete a permanent Group"); } @@ -715,7 +715,7 @@ public class GroupServiceImpl extends DSpaceObjectServiceImpl implements // if the group is used for one or more roles on a single collection, // admins can eventually manage it List collectionRoles = collectionRoleService.findByGroup(context, group); - if (collectionRoles != null && collectionRoles.size() > 0) { + if (collectionRoles != null && !collectionRoles.isEmpty()) { Set colls = new HashSet<>(); for (CollectionRole cr : collectionRoles) { colls.add(cr.getCollection()); @@ -829,4 +829,9 @@ public class GroupServiceImpl extends DSpaceObjectServiceImpl implements final MetadataField metadataField) throws SQLException { return groupDAO.findByMetadataField(context, searchValue, metadataField); } + + @Override + public String getName(Group dso) { + return dso.getName(); + } } diff --git a/dspace-api/src/main/java/org/dspace/eperson/InvalidReCaptchaException.java b/dspace-api/src/main/java/org/dspace/eperson/InvalidReCaptchaException.java new file mode 100644 index 0000000000..3d6584057f --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/eperson/InvalidReCaptchaException.java @@ -0,0 +1,28 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.eperson; + +/** + * This class provides an exception to be used when trying to register a new EPerson + * and Captcha validations failed. + * + * @author Mykhaylo Boychuk (mykhaylo.boychuk@4science.com) + */ +public class InvalidReCaptchaException extends RuntimeException { + + private static final long serialVersionUID = -5328794674744121744L; + + public InvalidReCaptchaException(String message) { + super(message); + } + + public InvalidReCaptchaException(String message, Exception cause) { + super(message, cause); + } + +} \ No newline at end of file diff --git a/dspace-api/src/main/java/org/dspace/eperson/dao/GroupDAO.java b/dspace-api/src/main/java/org/dspace/eperson/dao/GroupDAO.java index ab37aa4047..2cc77129f0 100644 --- a/dspace-api/src/main/java/org/dspace/eperson/dao/GroupDAO.java +++ b/dspace-api/src/main/java/org/dspace/eperson/dao/GroupDAO.java @@ -45,7 +45,7 @@ public interface GroupDAO extends DSpaceObjectDAO, DSpaceObjectLegacySupp * Find all groups ordered by the specified metadata fields ascending * * @param context The DSpace context - * @param sortMetadataFields The metadata fields to sort on + * @param metadataSortFields The metadata fields to sort on * @param pageSize how many results return * @param offset the position of the first result to return * @return A list of all groups, ordered by metadata fields diff --git a/dspace-api/src/main/java/org/dspace/eperson/service/AccountService.java b/dspace-api/src/main/java/org/dspace/eperson/service/AccountService.java index 45fa6d26b1..c8ecb0cc67 100644 --- a/dspace-api/src/main/java/org/dspace/eperson/service/AccountService.java +++ b/dspace-api/src/main/java/org/dspace/eperson/service/AccountService.java @@ -46,11 +46,4 @@ public interface AccountService { public void deleteToken(Context context, String token) throws SQLException; - - /** - * This method verifies that a certain String adheres to the password rules for DSpace - * @param password The String to be checked - * @return A boolean indicating whether or not the given String adheres to the password rules - */ - public boolean verifyPasswordStructure(String password); } diff --git a/dspace-api/src/main/java/org/dspace/eperson/service/CaptchaService.java b/dspace-api/src/main/java/org/dspace/eperson/service/CaptchaService.java new file mode 100644 index 0000000000..da417facc6 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/eperson/service/CaptchaService.java @@ -0,0 +1,30 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.eperson.service; + +import org.dspace.eperson.InvalidReCaptchaException; + +/** + * This service for validate the reCaptcha token + * + * @author Mohamed Eskander (mohamed.eskander at 4science dot it) + */ +public interface CaptchaService { + + public String REGISTER_ACTION = "register_email"; + + /** + * validate the entered reCaptcha token + * + * @param response reCaptcha token to be validated + * @param action action of reCaptcha + * @throws InvalidReCaptchaException if reCaptcha was not successfully validated + */ + public void processResponse(String response, String action) throws InvalidReCaptchaException; + +} \ No newline at end of file diff --git a/dspace-api/src/main/java/org/dspace/eperson/service/EPersonService.java b/dspace-api/src/main/java/org/dspace/eperson/service/EPersonService.java index 6d2dd67d76..c5c9801c16 100644 --- a/dspace-api/src/main/java/org/dspace/eperson/service/EPersonService.java +++ b/dspace-api/src/main/java/org/dspace/eperson/service/EPersonService.java @@ -15,6 +15,7 @@ import java.util.List; import java.util.Set; import org.dspace.authorize.AuthorizeException; +import org.dspace.content.Item; import org.dspace.content.MetadataFieldName; import org.dspace.content.service.DSpaceObjectLegacySupportService; import org.dspace.content.service.DSpaceObjectService; @@ -263,4 +264,16 @@ public interface EPersonService extends DSpaceObjectService, DSpaceObje * @throws SQLException An exception that provides information on a database access error or other errors. */ int countTotal(Context context) throws SQLException; + + /** + * Find the EPerson related to the given profile item. If the given item is not + * a profile item, null is returned. + * + * @param context The relevant DSpace Context. + * @param profile the profile item to search for + * @return the EPerson, if any + * @throws SQLException An exception that provides information on a database + * access error or other errors. + */ + EPerson findByProfileItem(Context context, Item profile) throws SQLException; } diff --git a/dspace-api/src/main/java/org/dspace/external/OpenAIRERestConnector.java b/dspace-api/src/main/java/org/dspace/external/OpenAIRERestConnector.java index 5e45d6324d..b0aa4aba13 100644 --- a/dspace-api/src/main/java/org/dspace/external/OpenAIRERestConnector.java +++ b/dspace-api/src/main/java/org/dspace/external/OpenAIRERestConnector.java @@ -335,7 +335,7 @@ public class OpenAIRERestConnector { /** * tokenUsage true to enable the usage of an access token * - * @param tokenUsage + * @param tokenEnabled true/false */ @Autowired(required = false) public void setTokenEnabled(boolean tokenEnabled) { diff --git a/dspace-api/src/main/java/org/dspace/external/provider/impl/LiveImportDataProvider.java b/dspace-api/src/main/java/org/dspace/external/provider/impl/LiveImportDataProvider.java index 962183fa6f..2e934462c9 100644 --- a/dspace-api/src/main/java/org/dspace/external/provider/impl/LiveImportDataProvider.java +++ b/dspace-api/src/main/java/org/dspace/external/provider/impl/LiveImportDataProvider.java @@ -57,7 +57,7 @@ public class LiveImportDataProvider extends AbstractExternalDataProvider { /** * This method set the MetadataSource for the ExternalDataProvider - * @param metadataSource {@link org.dspace.importer.external.service.components.MetadataSource} implementation used to process the input data + * @param querySource Source {@link org.dspace.importer.external.service.components.QuerySource} implementation used to process the input data */ public void setMetadataSource(QuerySource querySource) { this.querySource = querySource; diff --git a/dspace-api/src/main/java/org/dspace/external/provider/impl/OpenAIREFundingDataProvider.java b/dspace-api/src/main/java/org/dspace/external/provider/impl/OpenAIREFundingDataProvider.java index 3dcd7d16a6..8ca5b7c0ea 100644 --- a/dspace-api/src/main/java/org/dspace/external/provider/impl/OpenAIREFundingDataProvider.java +++ b/dspace-api/src/main/java/org/dspace/external/provider/impl/OpenAIREFundingDataProvider.java @@ -15,6 +15,7 @@ import java.util.ArrayList; import java.util.Base64; import java.util.Collections; import java.util.List; +import java.util.Map; import java.util.Optional; import java.util.regex.Pattern; import java.util.stream.Collectors; @@ -33,6 +34,7 @@ import org.dspace.content.dto.MetadataValueDTO; import org.dspace.external.OpenAIRERestConnector; import org.dspace.external.model.ExternalDataObject; import org.dspace.external.provider.AbstractExternalDataProvider; +import org.dspace.importer.external.metadatamapping.MetadataFieldConfig; import org.springframework.beans.factory.annotation.Autowired; /** @@ -40,13 +42,9 @@ import org.springframework.beans.factory.annotation.Autowired; * will deal with the OpenAIRE External Data lookup * * @author paulo-graca - * */ public class OpenAIREFundingDataProvider extends AbstractExternalDataProvider { - /** - * log4j logger - */ private static Logger log = org.apache.logging.log4j.LogManager.getLogger(OpenAIREFundingDataProvider.class); /** @@ -54,6 +52,16 @@ public class OpenAIREFundingDataProvider extends AbstractExternalDataProvider { */ protected static final String PREFIX = "info:eu-repo/grantAgreement"; + private static final String TITLE = "dcTitle"; + private static final String SUBJECT = "dcSubject"; + private static final String AWARD_URI = "awardURI"; + private static final String FUNDER_NAME = "funderName"; + private static final String SPATIAL = "coverageSpatial"; + private static final String AWARD_NUMBER = "awardNumber"; + private static final String FUNDER_ID = "funderIdentifier"; + private static final String FUNDING_STREAM = "fundingStream"; + private static final String TITLE_ALTERNATIVE = "titleAlternative"; + /** * rows default limit */ @@ -69,11 +77,9 @@ public class OpenAIREFundingDataProvider extends AbstractExternalDataProvider { */ protected OpenAIRERestConnector connector; - /** - * required method - */ - public void init() throws IOException { - } + protected Map metadataFields; + + public void init() throws IOException {} @Override public String getSourceIdentifier() { @@ -266,14 +272,22 @@ public class OpenAIREFundingDataProvider extends AbstractExternalDataProvider { } } + public Map getMetadataFields() { + return metadataFields; + } + + public void setMetadataFields(Map metadataFields) { + this.metadataFields = metadataFields; + } + /** * OpenAIRE Funding External Data Builder Class * * @author pgraca - * */ - public static class ExternalDataObjectBuilder { - ExternalDataObject externalDataObject; + public class ExternalDataObjectBuilder { + + private ExternalDataObject externalDataObject; public ExternalDataObjectBuilder(Project project) { String funderIdPrefix = "urn:openaire:"; @@ -283,46 +297,42 @@ public class OpenAIREFundingDataProvider extends AbstractExternalDataProvider { for (FundingTreeType fundingTree : projectHelper.getFundingTreeTypes()) { FunderType funder = fundingTree.getFunder(); // Funder name - this.addFunderName(funder.getName()); + this.addMetadata(metadataFields.get(FUNDER_NAME), funder.getName()); // Funder Id - convert it to an urn - this.addFunderID(funderIdPrefix + funder.getId()); + this.addMetadata(metadataFields.get(FUNDER_ID), funderIdPrefix + funder.getId()); // Jurisdiction - this.addFunderJuristiction(funder.getJurisdiction()); + this.addMetadata(metadataFields.get(SPATIAL), funder.getJurisdiction()); FundingHelper fundingHelper = new FundingHelper( - fundingTree.getFundingLevel2OrFundingLevel1OrFundingLevel0()); + fundingTree.getFundingLevel2OrFundingLevel1OrFundingLevel0()); // Funding description for (FundingType funding : fundingHelper.getFirstAvailableFunding()) { - this.addFundingStream(funding.getDescription()); + this.addMetadata(metadataFields.get(FUNDING_STREAM), funding.getDescription()); } } // Title for (String title : projectHelper.getTitles()) { - this.addAwardTitle(title); + this.addMetadata(metadataFields.get(TITLE), title); this.setDisplayValue(title); this.setValue(title); } - // Code for (String code : projectHelper.getCodes()) { - this.addAwardNumber(code); + this.addMetadata(metadataFields.get(AWARD_NUMBER), code); } - // Website url for (String url : projectHelper.getWebsiteUrls()) { - this.addAwardURI(url); + this.addMetadata(metadataFields.get(AWARD_URI), url); } - // Acronyms for (String acronym : projectHelper.getAcronyms()) { - this.addFundingItemAcronym(acronym); + this.addMetadata(metadataFields.get(TITLE_ALTERNATIVE), acronym); } - // Keywords for (String keyword : projectHelper.getKeywords()) { - this.addSubject(keyword); + this.addMetadata(metadataFields.get(SUBJECT), keyword); } } @@ -366,7 +376,6 @@ public class OpenAIREFundingDataProvider extends AbstractExternalDataProvider { * @return ExternalDataObjectBuilder */ public ExternalDataObjectBuilder setId(String id) { - // we use base64 encoding in order to use slashes / and other // characters that must be escaped for the <:entry-id> String base64Id = Base64.getEncoder().encodeToString(id.getBytes()); @@ -374,128 +383,10 @@ public class OpenAIREFundingDataProvider extends AbstractExternalDataProvider { return this; } - /** - * Add metadata dc.identifier - * - * @param metadata identifier - * @return ExternalDataObjectBuilder - */ - public ExternalDataObjectBuilder addIdentifier(String identifier) { - this.externalDataObject.addMetadata(new MetadataValueDTO("dc", "identifier", null, null, identifier)); - return this; - } - - /** - * Add metadata project.funder.name - * - * @param metadata funderName - * @return ExternalDataObjectBuilder - */ - public ExternalDataObjectBuilder addFunderName(String funderName) { - this.externalDataObject.addMetadata(new MetadataValueDTO("project", "funder", "name", null, funderName)); - return this; - } - - /** - * Add metadata project.funder.identifier - * - * @param metadata funderId - * @return ExternalDataObjectBuilder - */ - public ExternalDataObjectBuilder addFunderID(String funderID) { - this.externalDataObject - .addMetadata(new MetadataValueDTO("project", "funder", "identifier", null, funderID)); - return this; - } - - /** - * Add metadata dc.title - * - * @param metadata awardTitle - * @return ExternalDataObjectBuilder - */ - public ExternalDataObjectBuilder addAwardTitle(String awardTitle) { - this.externalDataObject.addMetadata(new MetadataValueDTO("dc", "title", null, null, awardTitle)); - return this; - } - - /** - * Add metadata oaire.fundingStream - * - * @param metadata fundingStream - * @return ExternalDataObjectBuilder - */ - public ExternalDataObjectBuilder addFundingStream(String fundingStream) { - this.externalDataObject - .addMetadata(new MetadataValueDTO("oaire", "fundingStream", null, null, fundingStream)); - return this; - } - - /** - * Add metadata oaire.awardNumber - * - * @param metadata awardNumber - * @return ExternalDataObjectBuilder - */ - public ExternalDataObjectBuilder addAwardNumber(String awardNumber) { - this.externalDataObject.addMetadata(new MetadataValueDTO("oaire", "awardNumber", null, null, awardNumber)); - return this; - } - - /** - * Add metadata oaire.awardURI - * - * @param metadata websiteUrl - * @return ExternalDataObjectBuilder - */ - public ExternalDataObjectBuilder addAwardURI(String websiteUrl) { - this.externalDataObject.addMetadata(new MetadataValueDTO("oaire", "awardURI", null, null, websiteUrl)); - return this; - } - - /** - * Add metadata dc.title.alternative - * - * @param metadata fundingItemAcronym - * @return ExternalDataObjectBuilder - */ - public ExternalDataObjectBuilder addFundingItemAcronym(String fundingItemAcronym) { - this.externalDataObject - .addMetadata(new MetadataValueDTO("dc", "title", "alternative", null, fundingItemAcronym)); - return this; - } - - /** - * Add metadata dc.coverage.spatial - * - * @param metadata funderJuristiction - * @return ExternalDataObjectBuilder - */ - public ExternalDataObjectBuilder addFunderJuristiction(String funderJuristiction) { - this.externalDataObject - .addMetadata(new MetadataValueDTO("dc", "coverage", "spatial", null, funderJuristiction)); - return this; - } - - /** - * Add metadata dc.description - * - * @param metadata description - * @return ExternalDataObjectBuilder - */ - public ExternalDataObjectBuilder addDescription(String description) { - this.externalDataObject.addMetadata(new MetadataValueDTO("dc", "description", null, null, description)); - return this; - } - - /** - * Add metadata dc.subject - * - * @param metadata subject - * @return ExternalDataObjectBuilder - */ - public ExternalDataObjectBuilder addSubject(String subject) { - this.externalDataObject.addMetadata(new MetadataValueDTO("dc", "subject", null, null, subject)); + public ExternalDataObjectBuilder addMetadata(MetadataFieldConfig metadataField, String value) { + this.externalDataObject.addMetadata(new MetadataValueDTO(metadataField.getSchema(), + metadataField.getElement(), + metadataField.getQualifier(), null, value)); return this; } @@ -508,4 +399,5 @@ public class OpenAIREFundingDataProvider extends AbstractExternalDataProvider { return this.externalDataObject; } } -} + +} \ No newline at end of file diff --git a/dspace-api/src/main/java/org/dspace/external/provider/impl/OrcidPublicationDataProvider.java b/dspace-api/src/main/java/org/dspace/external/provider/impl/OrcidPublicationDataProvider.java new file mode 100644 index 0000000000..4fdf15a8a3 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/external/provider/impl/OrcidPublicationDataProvider.java @@ -0,0 +1,547 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.external.provider.impl; + +import static java.util.Collections.emptyList; +import static java.util.Comparator.comparing; +import static java.util.Comparator.reverseOrder; +import static java.util.Optional.ofNullable; +import static org.apache.commons.collections4.ListUtils.partition; +import static org.apache.commons.lang3.StringUtils.isNotBlank; +import static org.orcid.jaxb.model.common.CitationType.FORMATTED_UNSPECIFIED; + +import java.io.File; +import java.io.FileOutputStream; +import java.nio.charset.Charset; +import java.util.List; +import java.util.Optional; +import java.util.function.Supplier; +import java.util.regex.Pattern; +import java.util.stream.Collectors; + +import org.apache.commons.collections.CollectionUtils; +import org.apache.commons.io.IOUtils; +import org.apache.commons.lang3.StringUtils; +import org.apache.commons.lang3.math.NumberUtils; +import org.dspace.content.Item; +import org.dspace.content.MetadataFieldName; +import org.dspace.content.dto.MetadataValueDTO; +import org.dspace.core.Context; +import org.dspace.external.model.ExternalDataObject; +import org.dspace.external.provider.AbstractExternalDataProvider; +import org.dspace.external.provider.ExternalDataProvider; +import org.dspace.importer.external.datamodel.ImportRecord; +import org.dspace.importer.external.metadatamapping.MetadatumDTO; +import org.dspace.importer.external.service.ImportService; +import org.dspace.orcid.OrcidToken; +import org.dspace.orcid.client.OrcidClient; +import org.dspace.orcid.client.OrcidConfiguration; +import org.dspace.orcid.model.OrcidTokenResponseDTO; +import org.dspace.orcid.model.OrcidWorkFieldMapping; +import org.dspace.orcid.service.OrcidSynchronizationService; +import org.dspace.orcid.service.OrcidTokenService; +import org.dspace.web.ContextUtil; +import org.orcid.jaxb.model.common.ContributorRole; +import org.orcid.jaxb.model.common.WorkType; +import org.orcid.jaxb.model.v3.release.common.Contributor; +import org.orcid.jaxb.model.v3.release.common.ContributorAttributes; +import org.orcid.jaxb.model.v3.release.common.PublicationDate; +import org.orcid.jaxb.model.v3.release.common.Subtitle; +import org.orcid.jaxb.model.v3.release.common.Title; +import org.orcid.jaxb.model.v3.release.record.Citation; +import org.orcid.jaxb.model.v3.release.record.ExternalIDs; +import org.orcid.jaxb.model.v3.release.record.SourceAware; +import org.orcid.jaxb.model.v3.release.record.Work; +import org.orcid.jaxb.model.v3.release.record.WorkBulk; +import org.orcid.jaxb.model.v3.release.record.WorkContributors; +import org.orcid.jaxb.model.v3.release.record.WorkTitle; +import org.orcid.jaxb.model.v3.release.record.summary.WorkGroup; +import org.orcid.jaxb.model.v3.release.record.summary.WorkSummary; +import org.orcid.jaxb.model.v3.release.record.summary.Works; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.beans.factory.annotation.Autowired; + +/** + * Implementation of {@link ExternalDataProvider} that search for all the works + * of the profile with the given orcid id that hava a source other than DSpace. + * The id of the external data objects returned by the methods of this class is + * the concatenation of the orcid id and the put code associated with the + * publication, separated by :: (example 0000-0000-0123-4567::123456) + * + * @author Luca Giamminonni (luca.giamminonni at 4science.it) + * + */ +public class OrcidPublicationDataProvider extends AbstractExternalDataProvider { + + private final static Logger LOGGER = LoggerFactory.getLogger(OrcidPublicationDataProvider.class); + + /** + * Examples of valid ORCID IDs: + *

      + *
    • 0000-0002-1825-0097
    • + *
    • 0000-0001-5109-3700
    • + *
    • 0000-0002-1694-233X
    • + *
    + */ + private final static Pattern ORCID_ID_PATTERN = Pattern.compile("(\\d{4}-){3}\\d{3}(\\d|X)"); + + private final static int MAX_PUT_CODES_SIZE = 100; + + @Autowired + private OrcidClient orcidClient; + + @Autowired + private OrcidConfiguration orcidConfiguration; + + @Autowired + private OrcidSynchronizationService orcidSynchronizationService; + + @Autowired + private ImportService importService; + + @Autowired + private OrcidTokenService orcidTokenService; + + private OrcidWorkFieldMapping fieldMapping; + + private String sourceIdentifier; + + private String readPublicAccessToken; + + @Override + public Optional getExternalDataObject(String id) { + + if (isInvalidIdentifier(id)) { + throw new IllegalArgumentException("Invalid identifier '" + id + "', expected ::"); + } + + String[] idSections = id.split("::"); + String orcid = idSections[0]; + String putCode = idSections[1]; + + validateOrcidId(orcid); + + return getWork(orcid, putCode) + .filter(work -> hasDifferentSourceClientId(work)) + .filter(work -> work.getPutCode() != null) + .map(work -> convertToExternalDataObject(orcid, work)); + } + + @Override + public List searchExternalDataObjects(String orcid, int start, int limit) { + + validateOrcidId(orcid); + + return findWorks(orcid, start, limit).stream() + .map(work -> convertToExternalDataObject(orcid, work)) + .collect(Collectors.toList()); + } + + private boolean isInvalidIdentifier(String id) { + return StringUtils.isBlank(id) || id.split("::").length != 2; + } + + private void validateOrcidId(String orcid) { + if (!ORCID_ID_PATTERN.matcher(orcid).matches()) { + throw new IllegalArgumentException("The given ORCID ID is not valid: " + orcid); + } + } + + /** + * Returns all the works related to the given ORCID in the range from start and + * limit. + * + * @param orcid the ORCID ID of the author to search for works + * @param start the start index + * @param limit the limit index + * @return the list of the works + */ + private List findWorks(String orcid, int start, int limit) { + List workSummaries = findWorkSummaries(orcid, start, limit); + return findWorks(orcid, workSummaries); + } + + /** + * Returns all the works summaries related to the given ORCID in the range from + * start and limit. + * + * @param orcid the ORCID ID of the author to search for works summaries + * @param start the start index + * @param limit the limit index + * @return the list of the works summaries + */ + private List findWorkSummaries(String orcid, int start, int limit) { + return getWorks(orcid).getWorkGroup().stream() + .filter(workGroup -> allWorkSummariesHaveDifferentSourceClientId(workGroup)) + .map(workGroup -> getPreferredWorkSummary(workGroup)) + .flatMap(Optional::stream) + .skip(start) + .limit(limit > 0 ? limit : Long.MAX_VALUE) + .collect(Collectors.toList()); + } + + /** + * Returns all the works related to the given ORCID ID and work summaries (a + * work has more details than a work summary). + * + * @param orcid the ORCID id of the author to search for works + * @param workSummaries the work summaries used to search the related works + * @return the list of the works + */ + private List findWorks(String orcid, List workSummaries) { + + List workPutCodes = getPutCodes(workSummaries); + + if (CollectionUtils.isEmpty(workPutCodes)) { + return emptyList(); + } + + if (workPutCodes.size() == 1) { + return getWork(orcid, workPutCodes.get(0)).stream().collect(Collectors.toList()); + } + + return partition(workPutCodes, MAX_PUT_CODES_SIZE).stream() + .map(putCodes -> getWorkBulk(orcid, putCodes)) + .flatMap(workBulk -> getWorks(workBulk).stream()) + .collect(Collectors.toList()); + } + + /** + * Search a work by ORCID id and putcode, using API or PUBLIC urls based on + * whether the ORCID API keys are configured or not. + * + * @param orcid the ORCID ID + * @param putCode the work's identifier on ORCID + * @return the work, if any + */ + private Optional getWork(String orcid, String putCode) { + if (orcidConfiguration.isApiConfigured()) { + String accessToken = getAccessToken(orcid); + return orcidClient.getObject(accessToken, orcid, putCode, Work.class); + } else { + return orcidClient.getObject(orcid, putCode, Work.class); + } + } + + /** + * Returns all the works related to the given ORCID. + * + * @param orcid the ORCID ID of the author to search for works + * @return the list of the works + */ + private Works getWorks(String orcid) { + if (orcidConfiguration.isApiConfigured()) { + String accessToken = getAccessToken(orcid); + return orcidClient.getWorks(accessToken, orcid); + } else { + return orcidClient.getWorks(orcid); + } + } + + /** + * Returns all the works related to the given ORCID by the given putCodes. + * + * @param orcid the ORCID ID of the author to search for works + * @param putCodes the work's put codes to search + * @return the list of the works + */ + private WorkBulk getWorkBulk(String orcid, List putCodes) { + if (orcidConfiguration.isApiConfigured()) { + String accessToken = getAccessToken(orcid); + return orcidClient.getWorkBulk(accessToken, orcid, putCodes); + } else { + return orcidClient.getWorkBulk(orcid, putCodes); + } + } + + private String getAccessToken(String orcid) { + List items = orcidSynchronizationService.findProfilesByOrcid(new Context(), orcid); + return Optional.ofNullable(items.isEmpty() ? null : items.get(0)) + .flatMap(item -> getAccessToken(item)) + .orElseGet(() -> getReadPublicAccessToken()); + } + + private Optional getAccessToken(Item item) { + return ofNullable(orcidTokenService.findByProfileItem(getContext(), item)) + .map(OrcidToken::getAccessToken); + } + + private String getReadPublicAccessToken() { + if (readPublicAccessToken != null) { + return readPublicAccessToken; + } + + OrcidTokenResponseDTO accessTokenResponse = orcidClient.getReadPublicAccessToken(); + readPublicAccessToken = accessTokenResponse.getAccessToken(); + + return readPublicAccessToken; + } + + private List getWorks(WorkBulk workBulk) { + return workBulk.getBulk().stream() + .filter(bulkElement -> (bulkElement instanceof Work)) + .map(bulkElement -> ((Work) bulkElement)) + .collect(Collectors.toList()); + + } + + private List getPutCodes(List workSummaries) { + return workSummaries.stream() + .map(WorkSummary::getPutCode) + .map(String::valueOf) + .collect(Collectors.toList()); + } + + private Optional getPreferredWorkSummary(WorkGroup workGroup) { + return workGroup.getWorkSummary().stream() + .filter(work -> work.getPutCode() != null) + .filter(work -> NumberUtils.isCreatable(work.getDisplayIndex())) + .sorted(comparing(work -> Integer.valueOf(work.getDisplayIndex()), reverseOrder())) + .findFirst(); + } + + private ExternalDataObject convertToExternalDataObject(String orcid, Work work) { + ExternalDataObject externalDataObject = new ExternalDataObject(sourceIdentifier); + externalDataObject.setId(orcid + "::" + work.getPutCode().toString()); + + String title = getWorkTitle(work); + externalDataObject.setDisplayValue(title); + externalDataObject.setValue(title); + + addMetadataValue(externalDataObject, fieldMapping.getTitleField(), () -> title); + addMetadataValue(externalDataObject, fieldMapping.getTypeField(), () -> getWorkType(work)); + addMetadataValue(externalDataObject, fieldMapping.getPublicationDateField(), () -> getPublicationDate(work)); + addMetadataValue(externalDataObject, fieldMapping.getJournalTitleField(), () -> getJournalTitle(work)); + addMetadataValue(externalDataObject, fieldMapping.getSubTitleField(), () -> getSubTitleField(work)); + addMetadataValue(externalDataObject, fieldMapping.getShortDescriptionField(), () -> getDescription(work)); + addMetadataValue(externalDataObject, fieldMapping.getLanguageField(), () -> getLanguage(work)); + + for (String contributorField : fieldMapping.getContributorFields().keySet()) { + ContributorRole role = fieldMapping.getContributorFields().get(contributorField); + addMetadataValues(externalDataObject, contributorField, () -> getContributors(work, role)); + } + + for (String externalIdField : fieldMapping.getExternalIdentifierFields().keySet()) { + String type = fieldMapping.getExternalIdentifierFields().get(externalIdField); + addMetadataValues(externalDataObject, externalIdField, () -> getExternalIds(work, type)); + } + + try { + addMetadataValuesFromCitation(externalDataObject, work.getWorkCitation()); + } catch (Exception e) { + LOGGER.error("An error occurs reading the following citation: " + work.getWorkCitation().getCitation(), e); + } + + return externalDataObject; + } + + private boolean allWorkSummariesHaveDifferentSourceClientId(WorkGroup workGroup) { + return workGroup.getWorkSummary().stream().allMatch(this::hasDifferentSourceClientId); + } + + @SuppressWarnings("deprecation") + private boolean hasDifferentSourceClientId(SourceAware sourceAware) { + return Optional.ofNullable(sourceAware.getSource()) + .map(source -> source.getSourceClientId()) + .map(sourceClientId -> sourceClientId.getPath()) + .map(clientId -> !StringUtils.equals(orcidConfiguration.getClientId(), clientId)) + .orElse(true); + } + + private void addMetadataValues(ExternalDataObject externalData, String metadata, Supplier> values) { + + if (StringUtils.isBlank(metadata)) { + return; + } + + MetadataFieldName field = new MetadataFieldName(metadata); + for (String value : values.get()) { + externalData.addMetadata(new MetadataValueDTO(field.schema, field.element, field.qualifier, null, value)); + } + } + + private void addMetadataValue(ExternalDataObject externalData, String metadata, Supplier valueSupplier) { + addMetadataValues(externalData, metadata, () -> { + String value = valueSupplier.get(); + return isNotBlank(value) ? List.of(value) : emptyList(); + }); + } + + private String getWorkTitle(Work work) { + WorkTitle workTitle = work.getWorkTitle(); + if (workTitle == null) { + return null; + } + Title title = workTitle.getTitle(); + return title != null ? title.getContent() : null; + } + + private String getWorkType(Work work) { + WorkType workType = work.getWorkType(); + return workType != null ? fieldMapping.convertType(workType.value()) : null; + } + + private String getPublicationDate(Work work) { + PublicationDate publicationDate = work.getPublicationDate(); + if (publicationDate == null) { + return null; + } + + StringBuilder builder = new StringBuilder(publicationDate.getYear().getValue()); + if (publicationDate.getMonth() != null) { + builder.append("-"); + builder.append(publicationDate.getMonth().getValue()); + } + + if (publicationDate.getDay() != null) { + builder.append("-"); + builder.append(publicationDate.getDay().getValue()); + } + + return builder.toString(); + } + + private String getJournalTitle(Work work) { + Title journalTitle = work.getJournalTitle(); + return journalTitle != null ? journalTitle.getContent() : null; + } + + private String getSubTitleField(Work work) { + WorkTitle workTitle = work.getWorkTitle(); + if (workTitle == null) { + return null; + } + Subtitle subTitle = workTitle.getSubtitle(); + return subTitle != null ? subTitle.getContent() : null; + } + + private String getDescription(Work work) { + return work.getShortDescription(); + } + + private String getLanguage(Work work) { + return work.getLanguageCode() != null ? fieldMapping.convertLanguage(work.getLanguageCode()) : null; + } + + private List getContributors(Work work, ContributorRole role) { + WorkContributors workContributors = work.getWorkContributors(); + if (workContributors == null) { + return emptyList(); + } + + return workContributors.getContributor().stream() + .filter(contributor -> hasRole(contributor, role)) + .map(contributor -> getContributorName(contributor)) + .flatMap(Optional::stream) + .collect(Collectors.toList()); + } + + private void addMetadataValuesFromCitation(ExternalDataObject externalDataObject, Citation citation) + throws Exception { + + if (citation == null || citation.getWorkCitationType() == FORMATTED_UNSPECIFIED) { + return; + } + + getImportRecord(citation).ifPresent(importRecord -> enrichExternalDataObject(externalDataObject, importRecord)); + + } + + private Optional getImportRecord(Citation citation) throws Exception { + File citationFile = File.createTempFile("temp", "." + citation.getWorkCitationType().value()); + try (FileOutputStream outputStream = new FileOutputStream(citationFile)) { + IOUtils.write(citation.getCitation(), new FileOutputStream(citationFile), Charset.defaultCharset()); + return Optional.ofNullable(importService.getRecord(citationFile, citationFile.getName())); + } finally { + citationFile.delete(); + } + } + + private void enrichExternalDataObject(ExternalDataObject externalDataObject, ImportRecord importRecord) { + importRecord.getValueList().stream() + .filter(metadata -> doesNotContains(externalDataObject, metadata)) + .forEach(metadata -> addMetadata(externalDataObject, metadata)); + } + + private void addMetadata(ExternalDataObject externalDataObject, MetadatumDTO metadata) { + externalDataObject.addMetadata(new MetadataValueDTO(metadata.getSchema(), metadata.getElement(), + metadata.getQualifier(), null, metadata.getValue())); + } + + private boolean doesNotContains(ExternalDataObject externalDataObject, MetadatumDTO metadata) { + return externalDataObject.getMetadata().stream() + .filter(metadataValue -> StringUtils.equals(metadataValue.getSchema(), metadata.getSchema())) + .filter(metadataValue -> StringUtils.equals(metadataValue.getElement(), metadata.getElement())) + .filter(metadataValue -> StringUtils.equals(metadataValue.getQualifier(), metadata.getQualifier())) + .findAny().isEmpty(); + } + + private boolean hasRole(Contributor contributor, ContributorRole role) { + ContributorAttributes attributes = contributor.getContributorAttributes(); + return attributes != null ? role.equals(attributes.getContributorRole()) : false; + } + + private Optional getContributorName(Contributor contributor) { + return Optional.ofNullable(contributor.getCreditName()) + .map(creditName -> creditName.getContent()); + } + + private List getExternalIds(Work work, String type) { + ExternalIDs externalIdentifiers = work.getExternalIdentifiers(); + if (externalIdentifiers == null) { + return emptyList(); + } + + return externalIdentifiers.getExternalIdentifier().stream() + .filter(externalId -> type.equals(externalId.getType())) + .map(externalId -> externalId.getValue()) + .collect(Collectors.toList()); + } + + private Context getContext() { + Context context = ContextUtil.obtainCurrentRequestContext(); + return context != null ? context : new Context(); + } + + @Override + public boolean supports(String source) { + return StringUtils.equals(sourceIdentifier, source); + } + + @Override + public int getNumberOfResults(String orcid) { + return findWorkSummaries(orcid, 0, -1).size(); + } + + public void setSourceIdentifier(String sourceIdentifier) { + this.sourceIdentifier = sourceIdentifier; + } + + @Override + public String getSourceIdentifier() { + return sourceIdentifier; + } + + public void setFieldMapping(OrcidWorkFieldMapping fieldMapping) { + this.fieldMapping = fieldMapping; + } + + public void setReadPublicAccessToken(String readPublicAccessToken) { + this.readPublicAccessToken = readPublicAccessToken; + } + + public OrcidClient getOrcidClient() { + return orcidClient; + } + + public void setOrcidClient(OrcidClient orcidClient) { + this.orcidClient = orcidClient; + } + +} diff --git a/dspace-api/src/main/java/org/dspace/external/provider/impl/OrcidV3AuthorDataProvider.java b/dspace-api/src/main/java/org/dspace/external/provider/impl/OrcidV3AuthorDataProvider.java index 0653ee758d..125da8f7c6 100644 --- a/dspace-api/src/main/java/org/dspace/external/provider/impl/OrcidV3AuthorDataProvider.java +++ b/dspace-api/src/main/java/org/dspace/external/provider/impl/OrcidV3AuthorDataProvider.java @@ -140,7 +140,7 @@ public class OrcidV3AuthorDataProvider extends AbstractExternalDataProvider { new MetadataValueDTO("person", "identifier", "orcid", null, person.getName().getPath())); externalDataObject .addMetadata(new MetadataValueDTO("dc", "identifier", "uri", null, - orcidUrl + person.getName().getPath())); + orcidUrl + "/" + person.getName().getPath())); if (!StringUtils.isBlank(lastName) && !StringUtils.isBlank(firstName)) { externalDataObject.setDisplayValue(lastName + ", " + firstName); externalDataObject.setValue(lastName + ", " + firstName); diff --git a/dspace-api/src/main/java/org/dspace/google/GoogleAnalyticsEvent.java b/dspace-api/src/main/java/org/dspace/google/GoogleAnalyticsEvent.java index 9e9751337c..50da0e5283 100644 --- a/dspace-api/src/main/java/org/dspace/google/GoogleAnalyticsEvent.java +++ b/dspace-api/src/main/java/org/dspace/google/GoogleAnalyticsEvent.java @@ -7,134 +7,93 @@ */ package org.dspace.google; +import java.util.Objects; + +import org.springframework.util.Assert; + /** * This is a dataholder class for an individual event to be sent to Google Analaytics * * @author April Herron */ -public class GoogleAnalyticsEvent { +public final class GoogleAnalyticsEvent { - private String cid; - private String uip; - private String ua; - private String dr; - private String dp; - private String dt; - private long time; + private final String clientId; + private final String userIp; + private final String userAgent; + private final String documentReferrer; + private final String documentPath; + private final String documentTitle; + private final long time; - GoogleAnalyticsEvent(String cid, String uip, String ua, String dr, String dp, String dt, long time) { - this.cid = cid; - this.uip = uip; - this.ua = ua; - this.dr = dr; - this.dp = dp; - this.dt = dt; - this.time = time; + public GoogleAnalyticsEvent(String clientId, String userIp, String userAgent, String documentReferrer, + String documentPath, String documentTitle) { + Assert.notNull(clientId, "A client id is required to create a Google Analytics event"); + this.clientId = clientId; + this.userIp = userIp; + this.userAgent = userAgent; + this.documentReferrer = documentReferrer; + this.documentPath = documentPath; + this.documentTitle = documentTitle; + this.time = System.currentTimeMillis(); } - /** - * Return Client ID - */ - public String getCid() { - return cid; + public String getClientId() { + return clientId; } - /** - * Set Client ID - */ - public void setCid(String cid) { - this.cid = cid; + public String getUserIp() { + return userIp; } - /** - * Return User IP - */ - public String getUip() { - return uip; + public String getUserAgent() { + return userAgent != null ? userAgent : ""; } - /** - * Set User IP - */ - public void setUip(String uip) { - this.uip = uip; + public String getDocumentReferrer() { + return documentReferrer != null ? documentReferrer : ""; } - /** - * Returns User Agent - */ - public String getUa() { - if (ua == null) { - return ""; - } else { - return ua; - } + public String getDocumentPath() { + return documentPath; } - /** - * Set User Agent - */ - public void setUa(String ua) { - this.ua = ua; + public String getDocumentTitle() { + return documentTitle; } - /** - * Return Document Referrer - */ - public String getDr() { - if (dr == null) { - return ""; - } else { - return dr; - } - } - - /** - * Set Document Referrer - */ - public void setDr(String dr) { - this.dr = dr; - } - - /** - * Return Document Path - */ - public String getDp() { - return dp; - } - - /** - * Set Document Path - */ - public void setDp(String dp) { - this.dp = dp; - } - - /** - * Return Document Title - */ - public String getDt() { - return dt; - } - - /** - * Set Document Title - */ - public void setDt(String dt) { - this.dt = dt; - } - - /** - * Return Time of event - */ public long getTime() { return time; } - /** - * Set Time of event - */ - public void setTime(long time) { - this.time = time; + @Override + public int hashCode() { + return Objects.hash(clientId, documentPath, documentReferrer, documentTitle, time, userAgent, userIp); } + + @Override + public boolean equals(Object obj) { + if (this == obj) { + return true; + } + if (obj == null) { + return false; + } + if (getClass() != obj.getClass()) { + return false; + } + GoogleAnalyticsEvent other = (GoogleAnalyticsEvent) obj; + return Objects.equals(clientId, other.clientId) && Objects.equals(documentPath, other.documentPath) + && Objects.equals(documentReferrer, other.documentReferrer) + && Objects.equals(documentTitle, other.documentTitle) && time == other.time + && Objects.equals(userAgent, other.userAgent) && Objects.equals(userIp, other.userIp); + } + + @Override + public String toString() { + return "GoogleAnalyticsEvent [clientId=" + clientId + ", userIp=" + userIp + ", userAgent=" + userAgent + + ", documentReferrer=" + documentReferrer + ", documentPath=" + documentPath + ", documentTitle=" + + documentTitle + ", time=" + time + "]"; + } + } diff --git a/dspace-api/src/main/java/org/dspace/google/GoogleAsyncEventListener.java b/dspace-api/src/main/java/org/dspace/google/GoogleAsyncEventListener.java index cf5c40976d..c169e4712f 100644 --- a/dspace-api/src/main/java/org/dspace/google/GoogleAsyncEventListener.java +++ b/dspace-api/src/main/java/org/dspace/google/GoogleAsyncEventListener.java @@ -7,34 +7,25 @@ */ package org.dspace.google; -import java.io.IOException; -import java.net.URLEncoder; import java.sql.SQLException; import java.util.ArrayList; import java.util.Iterator; import java.util.List; import java.util.UUID; -import java.util.concurrent.ExecutorService; -import java.util.concurrent.Executors; -import java.util.concurrent.Future; -import java.util.concurrent.TimeUnit; +import java.util.stream.Collectors; import javax.annotation.PostConstruct; -import javax.annotation.PreDestroy; +import javax.servlet.http.HttpServletRequest; -import com.google.common.base.Throwables; import org.apache.commons.collections.Buffer; import org.apache.commons.collections.BufferUtils; import org.apache.commons.collections.buffer.CircularFifoBuffer; import org.apache.commons.lang.StringUtils; -import org.apache.http.client.methods.CloseableHttpResponse; -import org.apache.http.client.methods.HttpPost; -import org.apache.http.entity.StringEntity; -import org.apache.http.impl.client.CloseableHttpClient; -import org.apache.http.impl.client.HttpClients; -import org.apache.log4j.Logger; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; import org.dspace.content.factory.ContentServiceFactory; import org.dspace.core.Constants; import org.dspace.core.Context; +import org.dspace.google.client.GoogleAnalyticsClient; import org.dspace.service.ClientInfoService; import org.dspace.services.ConfigurationService; import org.dspace.services.model.Event; @@ -42,96 +33,173 @@ import org.dspace.usage.AbstractUsageEventListener; import org.dspace.usage.UsageEvent; import org.springframework.beans.factory.annotation.Autowired; - /** * Notifies Google Analytics of Bitstream VIEW events. These events are stored in memory and then * asynchronously processed by a single seperate thread. * * @author April Herron + * @author Luca Giamminonni */ public class GoogleAsyncEventListener extends AbstractUsageEventListener { - private static final int MAX_TIME_SINCE_EVENT = 14400000; // 20 is the event max set by the GA API - private static final int GA_MAX_EVENTS = 20; - private static final String ANALYTICS_BATCH_ENDPOINT = "https://www.google-analytics.com/batch"; - private static Logger log = Logger.getLogger(GoogleAsyncEventListener.class); - private static String analyticsKey; - private static CloseableHttpClient httpclient; - private static Buffer buffer; - private static ExecutorService executor; - private static Future future; - private static boolean destroyed = false; + public static final int GA_MAX_EVENTS = 20; - @Autowired(required = true) - ConfigurationService configurationService; + private static final Logger LOGGER = LogManager.getLogger(); - @Autowired(required = true) - ClientInfoService clientInfoService; + private static final int MAX_TIME_SINCE_EVENT = 14400000; + + @Autowired + private ConfigurationService configurationService; + + @Autowired + private ClientInfoService clientInfoService; + + @Autowired + private List googleAnalyticsClients; + + private Buffer eventsBuffer; @PostConstruct public void init() { - analyticsKey = configurationService.getProperty("google.analytics.key"); - if (StringUtils.isNotEmpty(analyticsKey)) { - int analyticsBufferlimit = configurationService.getIntProperty("google.analytics.buffer.limit", 256); - buffer = BufferUtils.synchronizedBuffer(new CircularFifoBuffer(analyticsBufferlimit)); - httpclient = HttpClients.createDefault(); - executor = Executors.newSingleThreadExecutor(); - future = executor.submit(new GoogleAnalyticsTask()); - } + int analyticsBufferlimit = configurationService.getIntProperty("google.analytics.buffer.limit", 256); + eventsBuffer = BufferUtils.synchronizedBuffer(new CircularFifoBuffer(analyticsBufferlimit)); } @Override + @SuppressWarnings("unchecked") public void receiveEvent(Event event) { - if ((event instanceof UsageEvent)) { - if (StringUtils.isNotEmpty(analyticsKey)) { - UsageEvent ue = (UsageEvent) event; - log.debug("Usage event received " + event.getName()); - try { - if (ue.getAction() == UsageEvent.Action.VIEW && - ue.getObject().getType() == Constants.BITSTREAM) { - // Client ID, should uniquely identify the user or device. If we have an X-CORRELATION-ID - // header or a session ID for the user, then lets use it, othwerwise generate a UUID. - String cid; - if (ue.getRequest().getHeader("X-CORRELATION-ID") != null) { - cid = ue.getRequest().getHeader("X-CORRELATION-ID"); - } else if (ue.getRequest().getSession(false) != null) { - cid = ue.getRequest().getSession().getId(); - } else { - cid = UUID.randomUUID().toString(); - } - // Prefer the X-REFERRER header, otherwise falback to the referrer header - String referrer; - if (ue.getRequest().getHeader("X-REFERRER") != null) { - referrer = ue.getRequest().getHeader("X-REFERRER"); - } else { - referrer = ue.getRequest().getHeader("referer"); - } - buffer.add(new GoogleAnalyticsEvent(cid, clientInfoService.getClientIp(ue.getRequest()), - ue.getRequest().getHeader("USER-AGENT"), referrer, - ue.getRequest() .getRequestURI() + "?" + ue.getRequest().getQueryString(), - getObjectName(ue), System.currentTimeMillis())); - } - } catch (Exception e) { - log.error("Failed to add event to buffer", e); - log.error("Event information: " + ue); - Context context = ue.getContext(); - if (context != null) { - log.error("Context information:"); - log.error(" Current User: " + context.getCurrentUser()); - log.error(" Extra log info: " + context.getExtraLogInfo()); - if (context.getEvents() != null && !context.getEvents().isEmpty()) { - for (int x = 1; x <= context.getEvents().size(); x++) { - log.error(" Context Event " + x + ": " + context.getEvents().get(x)); - } - } - } else { - log.error("UsageEvent has no Context object"); - } - } + if (!(event instanceof UsageEvent) || isGoogleAnalyticsKeyNotConfigured()) { + return; + } + + UsageEvent usageEvent = (UsageEvent) event; + LOGGER.debug("Usage event received " + event.getName()); + + if (isNotBitstreamViewEvent(usageEvent)) { + return; + } + + try { + GoogleAnalyticsEvent analyticsEvent = createGoogleAnalyticsEvent(usageEvent); + eventsBuffer.add(analyticsEvent); + } catch (Exception e) { + logReceiveEventException(usageEvent, e); + } + + } + + /** + * Send the collected events to Google Analytics. + */ + public void sendCollectedEvents() { + + if (isGoogleAnalyticsKeyNotConfigured()) { + return; + } + + String analyticsKey = getGoogleAnalyticsKey(); + + List events = getEventsFromBufferFilteredByEventTime(); + + if (events.isEmpty()) { + return; + } + + GoogleAnalyticsClient client = getClientByAnalyticsKey(analyticsKey); + + try { + client.sendEvents(analyticsKey, events); + } catch (RuntimeException ex) { + LOGGER.error("An error occurs sending the events.", ex); + } + + } + + /** + * Creates an instance of GoogleAnalyticsEvent from the given usage event. + * @param usageEvent the usage event + * @return the Google Analytics event instance + */ + private GoogleAnalyticsEvent createGoogleAnalyticsEvent(UsageEvent usageEvent) { + + HttpServletRequest request = usageEvent.getRequest(); + + String clientId = getClientId(usageEvent); + String referrer = getReferrer(usageEvent); + String clientIp = clientInfoService.getClientIp(request); + String userAgent = request.getHeader("USER-AGENT"); + String documentPath = getDocumentPath(request); + String documentName = getObjectName(usageEvent); + + return new GoogleAnalyticsEvent(clientId, clientIp, userAgent, referrer, + documentPath, documentName); + } + + /** + * Client ID, should uniquely identify the user or device. If we have an + * X-CORRELATION-ID header or a session ID for the user, then lets use it, + * othwerwise generate a UUID. + */ + private String getClientId(UsageEvent usageEvent) { + if (usageEvent.getRequest().getHeader("X-CORRELATION-ID") != null) { + return usageEvent.getRequest().getHeader("X-CORRELATION-ID"); + } else if (usageEvent.getRequest().getSession(false) != null) { + return usageEvent.getRequest().getSession().getId(); + } else { + return UUID.randomUUID().toString(); + } + } + + /** + * Prefer the X-REFERRER header, otherwise fallback to the referrer header. + */ + private String getReferrer(UsageEvent usageEvent) { + if (usageEvent.getRequest().getHeader("X-REFERRER") != null) { + return usageEvent.getRequest().getHeader("X-REFERRER"); + } else { + return usageEvent.getRequest().getHeader("referer"); + } + } + + private String getDocumentPath(HttpServletRequest request) { + String documentPath = request.getRequestURI(); + if (StringUtils.isNotBlank(request.getQueryString())) { + documentPath += "?" + request.getQueryString(); + } + return documentPath; + } + + private boolean isNotBitstreamViewEvent(UsageEvent usageEvent) { + return usageEvent.getAction() != UsageEvent.Action.VIEW + || usageEvent.getObject().getType() != Constants.BITSTREAM; + } + + private boolean isGoogleAnalyticsKeyNotConfigured() { + return StringUtils.isBlank(getGoogleAnalyticsKey()); + } + + private void logReceiveEventException(UsageEvent usageEvent, Exception e) { + + LOGGER.error("Failed to add event to buffer", e); + LOGGER.error("Event information: " + usageEvent); + + Context context = usageEvent.getContext(); + if (context == null) { + LOGGER.error("UsageEvent has no Context object"); + return; + } + + LOGGER.error("Context information:"); + LOGGER.error(" Current User: " + context.getCurrentUser()); + LOGGER.error(" Extra log info: " + context.getExtraLogInfo()); + if (context.getEvents() != null && !context.getEvents().isEmpty()) { + for (int x = 1; x <= context.getEvents().size(); x++) { + LOGGER.error(" Context Event " + x + ": " + context.getEvents().get(x)); } } + } private String getObjectName(UsageEvent ue) { @@ -146,7 +214,7 @@ public class GoogleAsyncEventListener extends AbstractUsageEventListener { } } catch (SQLException e) { // This shouldn't merit interrupting the user's transaction so log the error and continue. - log.error("Error in Google Analytics recording - can't determine ParentObjectName for bitstream " + + LOGGER.error("Error in Google Analytics recording - can't determine ParentObjectName for bitstream " + ue.getObject().getID(), e); } @@ -154,78 +222,73 @@ public class GoogleAsyncEventListener extends AbstractUsageEventListener { } - @PreDestroy - public void destroy() throws InterruptedException { - destroyed = true; - if (StringUtils.isNotEmpty(analyticsKey)) { - future.cancel(true); - executor.shutdown(); - executor.awaitTermination(1, TimeUnit.SECONDS); - } - } + /** + * Returns the first GA_MAX_EVENTS stored in the eventsBuffer with a time minor + * that MAX_TIME_SINCE_EVENT. The found events are removed from the buffer. + * + * @return the events from the buffer + */ + private List getEventsFromBufferFilteredByEventTime() { - private static class GoogleAnalyticsTask implements Runnable { - public void run() { - while (!destroyed) { - try { - boolean sleep = false; - StringBuilder request = null; - List events = new ArrayList<>(); - Iterator iterator = buffer.iterator(); - for (int x = 0; x < GA_MAX_EVENTS && iterator.hasNext(); x++) { - GoogleAnalyticsEvent event = (GoogleAnalyticsEvent) iterator.next(); - events.add(event); - if ((System.currentTimeMillis() - event.getTime()) < MAX_TIME_SINCE_EVENT) { - String download = "v=1" + - "&tid=" + analyticsKey + - "&cid=" + event.getCid() + - "&t=event" + - "&uip=" + URLEncoder.encode(event.getUip(), "UTF-8") + - "&ua=" + URLEncoder.encode(event.getUa(), "UTF-8") + - "&dr=" + URLEncoder.encode(event.getDr(), "UTF-8") + - "&dp=" + URLEncoder.encode(event.getDp(), "UTF-8") + - "&dt=" + URLEncoder.encode(event.getDt(), "UTF-8") + - "&qt=" + (System.currentTimeMillis() - event.getTime()) + - "&ec=bitstream" + - "&ea=download" + - "&el=item"; - if (request == null) { - request = new StringBuilder(download); - } else { - request.append("\n").append(download); - } - } - } + List events = new ArrayList<>(); - if (request != null) { - HttpPost httpPost = new HttpPost(ANALYTICS_BATCH_ENDPOINT); - httpPost.setEntity(new StringEntity(request.toString())); - try (final CloseableHttpResponse response2 = httpclient.execute(httpPost)) { - // I can't find a list of what are acceptable responses, - // so I log the response but take no action. - log.debug("Google Analytics response is " + response2.getStatusLine()); - // Cleanup processed events - buffer.removeAll(events); - } catch (IOException e) { - log.error("GA post failed", e); - } - } else { - sleep = true; - } + Iterator iterator = eventsBuffer.iterator(); - if (sleep) { - try { - Thread.sleep(60000); - } catch (InterruptedException e) { - log.debug("Interrupted; checking if we should stop"); - } - } - } catch (Throwable t) { - log.error("Unexpected error; aborting GA event recording", t); - Throwables.propagate(t); - } + while (iterator.hasNext() && events.size() < GA_MAX_EVENTS) { + + GoogleAnalyticsEvent event = (GoogleAnalyticsEvent) iterator.next(); + eventsBuffer.remove(event); + + if ((System.currentTimeMillis() - event.getTime()) < MAX_TIME_SINCE_EVENT) { + events.add(event); } - log.info("Stopping GA event recording"); + } + + return events; } + + /** + * Returns the first instance of the GoogleAnalyticsClient that supports the + * given analytics key. + * + * @param analyticsKey the analytics key. + * @return the found client + * @throws IllegalStateException if no client is found for the given analytics + * key + */ + private GoogleAnalyticsClient getClientByAnalyticsKey(String analyticsKey) { + + List clients = googleAnalyticsClients.stream() + .filter(client -> client.isAnalyticsKeySupported(analyticsKey)) + .collect(Collectors.toList()); + + if (clients.isEmpty()) { + throw new IllegalStateException("No Google Analytics Client supports key " + analyticsKey); + } + + if (clients.size() > 1) { + throw new IllegalStateException("More than one Google Analytics Client supports key " + analyticsKey); + } + + return clients.get(0); + + } + + private String getGoogleAnalyticsKey() { + return configurationService.getProperty("google.analytics.key"); + } + + public List getGoogleAnalyticsClients() { + return googleAnalyticsClients; + } + + public void setGoogleAnalyticsClients(List googleAnalyticsClients) { + this.googleAnalyticsClients = googleAnalyticsClients; + } + + public Buffer getEventsBuffer() { + return eventsBuffer; + } + } diff --git a/dspace-api/src/main/java/org/dspace/google/client/GoogleAnalytics4ClientRequestBuilder.java b/dspace-api/src/main/java/org/dspace/google/client/GoogleAnalytics4ClientRequestBuilder.java new file mode 100644 index 0000000000..85f48d6108 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/google/client/GoogleAnalytics4ClientRequestBuilder.java @@ -0,0 +1,247 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.google.client; + +import static java.util.stream.Collectors.groupingBy; +import static org.apache.commons.lang.StringUtils.startsWith; + +import java.util.ArrayList; +import java.util.List; +import java.util.Map; + +import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.annotation.JsonInclude.Include; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.ObjectMapper; +import org.apache.commons.lang3.StringUtils; +import org.dspace.google.GoogleAnalyticsEvent; +import org.dspace.services.ConfigurationService; +import org.springframework.beans.factory.annotation.Autowired; + +/** + * Implementation of {@link GoogleAnalyticsClientRequestBuilder} that compose + * the request for Google Analytics 4 (GA4). + * + * @author Luca Giamminonni (luca.giamminonni at 4science.it) + * + */ +public class GoogleAnalytics4ClientRequestBuilder implements GoogleAnalyticsClientRequestBuilder { + + private final String endpointUrl; + + @Autowired + private ConfigurationService configurationService; + + private ObjectMapper objectMapper = new ObjectMapper(); + + public GoogleAnalytics4ClientRequestBuilder(String endpointUrl) { + this.endpointUrl = endpointUrl; + } + + @Override + public String getEndpointUrl(String analyticsKey) { + + if (!startsWith(analyticsKey, "G-")) { + throw new IllegalArgumentException("Only keys with G- prefix are supported"); + } + + String apiSecret = configurationService.getProperty("google.analytics.api-secret"); + if (StringUtils.isBlank(apiSecret)) { + throw new GoogleAnalyticsClientException("The API secret must be configured to sent GA4 events"); + } + + return endpointUrl + "?api_secret=" + apiSecret + "&measurement_id=" + analyticsKey; + + } + + @Override + public List composeRequestsBody(String analyticsKey, List events) { + + Map> eventsGroupedByClientId = groupByClientId(events); + + List requestsBody = new ArrayList(); + + for (String clientId : eventsGroupedByClientId.keySet()) { + String requestBody = composeRequestBody(clientId, eventsGroupedByClientId.get(clientId)); + requestsBody.add(requestBody); + } + + return requestsBody; + + } + + private Map> groupByClientId(List events) { + return events.stream() + .collect(groupingBy(GoogleAnalyticsEvent::getClientId)); + } + + private String composeRequestBody(String clientId, List events) { + + GoogleAnalytics4EventsVO eventsVo = new GoogleAnalytics4EventsVO(clientId); + + events.stream() + .map(GoogleAnalytics4EventVO::fromGoogleAnalyticsEvent) + .forEach(eventsVo::addEvent); + + return toJsonAsString(eventsVo); + + } + + private String toJsonAsString(GoogleAnalytics4EventsVO eventsVo) { + try { + return objectMapper.writeValueAsString(eventsVo); + } catch (JsonProcessingException e) { + throw new GoogleAnalyticsClientException(e); + } + } + + public void setConfigurationService(ConfigurationService configurationService) { + this.configurationService = configurationService; + } + + /** + * Class that models the json of the events to be write in the body of the GA request. + */ + public static class GoogleAnalytics4EventsVO { + + @JsonProperty("client_id") + private final String clientId; + + private final List events; + + public GoogleAnalytics4EventsVO(String clientId) { + this.clientId = clientId; + this.events = new ArrayList<>(); + } + + public String getClientId() { + return clientId; + } + + public List getEvents() { + return events; + } + + public void addEvent(GoogleAnalytics4EventVO event) { + this.events.add(event); + } + + } + + /** + * Class that model a single event to be sent to GA. + */ + public static class GoogleAnalytics4EventVO { + + private final String name = "item"; + + private final GoogleAnalytics4EventParamsVO params; + + public static GoogleAnalytics4EventVO fromGoogleAnalyticsEvent(GoogleAnalyticsEvent event) { + return new GoogleAnalytics4EventVO(event.getTime(), event.getDocumentTitle(), event.getDocumentPath(), + event.getDocumentReferrer(), event.getUserAgent(), event.getUserIp()); + } + + public GoogleAnalytics4EventVO(long time, String documentTitle, String documentPath, String documentReferrer, + String userAgent, String userIp) { + + this.params = new GoogleAnalytics4EventParamsVO(time, documentTitle, documentPath, + documentReferrer, userAgent, userIp); + } + + public String getName() { + return name; + } + + public GoogleAnalytics4EventParamsVO getParams() { + return params; + } + + } + + /** + * Class that model the params of a specific event to be sent to GA. + * + * @author Luca Giamminonni (luca.giamminonni at 4science.it) + * + */ + public static class GoogleAnalytics4EventParamsVO { + + private final String action = "download"; + + private final String category = "bitstream"; + + @JsonInclude(Include.NON_NULL) + private final long time; + + @JsonInclude(Include.NON_NULL) + @JsonProperty("document_title") + private final String documentTitle; + + @JsonInclude(Include.NON_NULL) + @JsonProperty("document_path") + private final String documentPath; + + @JsonInclude(Include.NON_NULL) + @JsonProperty("document_referrer") + private final String documentReferrer; + + @JsonInclude(Include.NON_NULL) + @JsonProperty("user_agent") + private final String userAgent; + + @JsonInclude(Include.NON_NULL) + @JsonProperty("user_ip") + private final String userIp; + + public GoogleAnalytics4EventParamsVO(long time, String documentTitle, String documentPath, + String documentReferrer, String userAgent, String userIp) { + this.time = time; + this.documentTitle = documentTitle; + this.documentPath = documentPath; + this.documentReferrer = documentReferrer; + this.userAgent = userAgent; + this.userIp = userIp; + } + + public long getTime() { + return time; + } + + public String getDocumentTitle() { + return documentTitle; + } + + public String getDocumentPath() { + return documentPath; + } + + public String getDocumentReferrer() { + return documentReferrer; + } + + public String getUserAgent() { + return userAgent; + } + + public String getUserIp() { + return userIp; + } + + public String getAction() { + return action; + } + + public String getCategory() { + return category; + } + + } + +} diff --git a/dspace-api/src/main/java/org/dspace/google/client/GoogleAnalyticsClient.java b/dspace-api/src/main/java/org/dspace/google/client/GoogleAnalyticsClient.java new file mode 100644 index 0000000000..80f64aa534 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/google/client/GoogleAnalyticsClient.java @@ -0,0 +1,37 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.google.client; + +import java.util.List; + +import org.dspace.google.GoogleAnalyticsEvent; + +/** + * Client to send events to Google Analytics. + * + * @author Luca Giamminonni (luca.giamminonni at 4science.it) + * + */ +public interface GoogleAnalyticsClient { + + /** + * Check if the client supports the given analytics key. + * + * @param analyticsKey the analytics key + * @return true if the key is supported, false otherwise + */ + boolean isAnalyticsKeySupported(String analyticsKey); + + /** + * Send the given Google Analytics events. + * + * @param analyticsKey the analytics key + * @param events the events to be sent + */ + void sendEvents(String analyticsKey, List events); +} diff --git a/dspace-api/src/main/java/org/dspace/google/client/GoogleAnalyticsClientException.java b/dspace-api/src/main/java/org/dspace/google/client/GoogleAnalyticsClientException.java new file mode 100644 index 0000000000..a762deed34 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/google/client/GoogleAnalyticsClientException.java @@ -0,0 +1,32 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.google.client; + +/** + * Exception thrown by {@link GoogleAnalyticsClient} during the events sending. + * + * @author Luca Giamminonni (luca.giamminonni at 4science.it) + * + */ +public class GoogleAnalyticsClientException extends RuntimeException { + + private static final long serialVersionUID = -2248100136404696572L; + + public GoogleAnalyticsClientException(String message, Throwable cause) { + super(message, cause); + } + + public GoogleAnalyticsClientException(String message) { + super(message); + } + + public GoogleAnalyticsClientException(Throwable cause) { + super(cause); + } + +} diff --git a/dspace-api/src/main/java/org/dspace/google/client/GoogleAnalyticsClientImpl.java b/dspace-api/src/main/java/org/dspace/google/client/GoogleAnalyticsClientImpl.java new file mode 100644 index 0000000000..b5ee1806cd --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/google/client/GoogleAnalyticsClientImpl.java @@ -0,0 +1,119 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.google.client; + +import java.io.IOException; +import java.nio.charset.Charset; +import java.util.List; + +import org.apache.commons.io.IOUtils; +import org.apache.commons.lang.StringUtils; +import org.apache.http.HttpResponse; +import org.apache.http.client.methods.CloseableHttpResponse; +import org.apache.http.client.methods.HttpPost; +import org.apache.http.entity.StringEntity; +import org.apache.http.impl.client.CloseableHttpClient; +import org.apache.http.impl.client.HttpClients; +import org.dspace.google.GoogleAnalyticsEvent; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** + * Implementation of {@link GoogleAnalyticsClient}. + * + * @author Luca Giamminonni (luca.giamminonni at 4science.it) + * + */ +public class GoogleAnalyticsClientImpl implements GoogleAnalyticsClient { + + private static final Logger LOGGER = LoggerFactory.getLogger(GoogleAnalyticsClientImpl.class); + + private final String keyPrefix; + + private final GoogleAnalyticsClientRequestBuilder requestBuilder; + + private final CloseableHttpClient httpclient; + + public GoogleAnalyticsClientImpl(String keyPrefix, GoogleAnalyticsClientRequestBuilder requestBuilder) { + this.keyPrefix = keyPrefix; + this.requestBuilder = requestBuilder; + this.httpclient = HttpClients.createDefault(); + } + + @Override + public boolean isAnalyticsKeySupported(String analyticsKey) { + return StringUtils.startsWith(analyticsKey, keyPrefix); + } + + @Override + public void sendEvents(String analyticsKey, List events) { + + if (!isAnalyticsKeySupported(analyticsKey)) { + throw new IllegalArgumentException("The given analytics key " + analyticsKey + + " is not supported. A key with prefix " + keyPrefix + " is required"); + } + + String endpointUrl = requestBuilder.getEndpointUrl(analyticsKey); + + requestBuilder.composeRequestsBody(analyticsKey, events) + .forEach(requestBody -> sendRequest(endpointUrl, requestBody)); + + } + + private void sendRequest(String endpointUrl, String requestBody) { + + try { + + HttpPost httpPost = new HttpPost(endpointUrl); + httpPost.setEntity(new StringEntity(requestBody)); + + try (CloseableHttpResponse response = httpclient.execute(httpPost)) { + if (isNotSuccessfull(response)) { + throw new GoogleAnalyticsClientException(formatErrorMessage(response)); + } + } + + } catch (GoogleAnalyticsClientException ex) { + throw ex; + } catch (Exception ex) { + throw new GoogleAnalyticsClientException("An error occurs sending events to " + endpointUrl, ex); + } + + } + + private boolean isNotSuccessfull(HttpResponse response) { + int statusCode = getStatusCode(response); + return statusCode < 200 || statusCode > 299; + } + + private int getStatusCode(HttpResponse response) { + return response.getStatusLine().getStatusCode(); + } + + private String formatErrorMessage(HttpResponse response) { + return "Status " + getStatusCode(response) + ". Content: " + getResponseContent(response); + } + + private String getResponseContent(HttpResponse response) { + try { + return IOUtils.toString(response.getEntity().getContent(), Charset.defaultCharset()); + } catch (UnsupportedOperationException | IOException e) { + LOGGER.error("An error occurs getting the response content", e); + return "Generic error"; + } + } + + public String getKeyPrefix() { + return keyPrefix; + } + + public GoogleAnalyticsClientRequestBuilder getRequestBuilder() { + return requestBuilder; + } + +} diff --git a/dspace-api/src/main/java/org/dspace/google/client/GoogleAnalyticsClientRequestBuilder.java b/dspace-api/src/main/java/org/dspace/google/client/GoogleAnalyticsClientRequestBuilder.java new file mode 100644 index 0000000000..f45eddce4c --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/google/client/GoogleAnalyticsClientRequestBuilder.java @@ -0,0 +1,40 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.google.client; + +import java.util.List; + +import org.dspace.google.GoogleAnalyticsEvent; + +/** + * Interface for classes used by {@link GoogleAnalyticsClient} to define the url + * and the body of the requests to be sent to Google Analytics. + * + * @author Luca Giamminonni (luca.giamminonni at 4science.it) + * + */ +public interface GoogleAnalyticsClientRequestBuilder { + + /** + * Returns the url of the Google Analytics endpoint. + * + * @param analyticsKey the Google Analytics key + * @return the endpoint url + */ + String getEndpointUrl(String analyticsKey); + + /** + * Returns the body of the requests to be sent to Google Analytics as string, + * based on the given analytics key and events. + * + * @param analyticsKey the Google Analytics key + * @param events the events to be sent + * @return the requests body as string + */ + List composeRequestsBody(String analyticsKey, List events); +} diff --git a/dspace-api/src/main/java/org/dspace/google/client/UniversalAnalyticsClientRequestBuilder.java b/dspace-api/src/main/java/org/dspace/google/client/UniversalAnalyticsClientRequestBuilder.java new file mode 100644 index 0000000000..274c27957e --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/google/client/UniversalAnalyticsClientRequestBuilder.java @@ -0,0 +1,74 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.google.client; + +import static org.apache.commons.lang.StringUtils.startsWith; +import static org.apache.commons.lang3.StringUtils.isNotEmpty; + +import java.net.URLEncoder; +import java.nio.charset.StandardCharsets; +import java.util.List; +import java.util.stream.Collectors; + +import org.dspace.google.GoogleAnalyticsEvent; + +/** + * Implementation of {@link GoogleAnalyticsClientRequestBuilder} that compose + * the request for Universal Analytics (UA). + * + * @author Luca Giamminonni (luca.giamminonni at 4science.it) + * + */ +public class UniversalAnalyticsClientRequestBuilder implements GoogleAnalyticsClientRequestBuilder { + + private final String endpointUrl; + + public UniversalAnalyticsClientRequestBuilder(String endpointUrl) { + this.endpointUrl = endpointUrl; + } + + @Override + public String getEndpointUrl(String analyticsKey) { + return endpointUrl; + } + + @Override + public List composeRequestsBody(String analyticsKey, List events) { + + if (!startsWith(analyticsKey, "UA-")) { + throw new IllegalArgumentException("Only keys with UA- prefix are supported"); + } + + String requestBody = events.stream() + .map(event -> formatEvent(analyticsKey, event)) + .collect(Collectors.joining("\n")); + + return isNotEmpty(requestBody) ? List.of(requestBody) : List.of(); + } + + private String formatEvent(String analyticsKey, GoogleAnalyticsEvent event) { + return "v=1" + + "&tid=" + analyticsKey + + "&cid=" + event.getClientId() + + "&t=event" + + "&uip=" + encodeParameter(event.getUserIp()) + + "&ua=" + encodeParameter(event.getUserAgent()) + + "&dr=" + encodeParameter(event.getDocumentReferrer()) + + "&dp=" + encodeParameter(event.getDocumentPath()) + + "&dt=" + encodeParameter(event.getDocumentTitle()) + + "&qt=" + (System.currentTimeMillis() - event.getTime()) + + "&ec=bitstream" + + "&ea=download" + + "&el=item"; + } + + private String encodeParameter(String parameter) { + return URLEncoder.encode(parameter, StandardCharsets.UTF_8); + } + +} diff --git a/dspace-api/src/main/java/org/dspace/handle/HandleServiceImpl.java b/dspace-api/src/main/java/org/dspace/handle/HandleServiceImpl.java index c417aa4794..aa730fe2b1 100644 --- a/dspace-api/src/main/java/org/dspace/handle/HandleServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/handle/HandleServiceImpl.java @@ -9,6 +9,7 @@ package org.dspace.handle; import java.sql.SQLException; import java.util.ArrayList; +import java.util.Iterator; import java.util.List; import java.util.regex.Matcher; import java.util.regex.Pattern; @@ -211,17 +212,17 @@ public class HandleServiceImpl implements HandleService { @Override public void unbindHandle(Context context, DSpaceObject dso) throws SQLException { - List handles = getInternalHandles(context, dso); - if (CollectionUtils.isNotEmpty(handles)) { - for (Handle handle : handles) { + Iterator handles = dso.getHandles().iterator(); + if (handles.hasNext()) { + while (handles.hasNext()) { + final Handle handle = handles.next(); + handles.remove(); //Only set the "resouce_id" column to null when unbinding a handle. // We want to keep around the "resource_type_id" value, so that we // can verify during a restore whether the same *type* of resource // is reusing this handle! handle.setDSpaceObject(null); - //Also remove the handle from the DSO list to keep a consistent model - dso.getHandles().remove(handle); handleDAO.save(context, handle); @@ -256,7 +257,7 @@ public class HandleServiceImpl implements HandleService { @Override public String findHandle(Context context, DSpaceObject dso) throws SQLException { - List handles = getInternalHandles(context, dso); + List handles = dso.getHandles(); if (CollectionUtils.isEmpty(handles)) { return null; } else { @@ -328,20 +329,6 @@ public class HandleServiceImpl implements HandleService { //////////////////////////////////////// // Internal methods //////////////////////////////////////// - - /** - * Return the handle for an Object, or null if the Object has no handle. - * - * @param context DSpace context - * @param dso DSpaceObject for which we require our handles - * @return The handle for object, or null if the object has no handle. - * @throws SQLException If a database error occurs - */ - protected List getInternalHandles(Context context, DSpaceObject dso) - throws SQLException { - return handleDAO.getHandlesByDSpaceObject(context, dso); - } - /** * Find the database row corresponding to handle. * @@ -405,7 +392,7 @@ public class HandleServiceImpl implements HandleService { } // Check additional prefixes supported in the config file - String[] additionalPrefixes = configurationService.getArrayProperty("handle.additional.prefixes"); + String[] additionalPrefixes = getAdditionalPrefixes(); for (String additionalPrefix : additionalPrefixes) { if (identifier.startsWith(additionalPrefix + "/")) { // prefix is the equivalent of 123456789 in 123456789/???; don't strip @@ -415,4 +402,9 @@ public class HandleServiceImpl implements HandleService { return null; } + + @Override + public String[] getAdditionalPrefixes() { + return configurationService.getArrayProperty("handle.additional.prefixes"); + } } diff --git a/dspace-api/src/main/java/org/dspace/handle/UpdateHandlePrefix.java b/dspace-api/src/main/java/org/dspace/handle/UpdateHandlePrefix.java index 133d3dbc2c..7fb03376eb 100644 --- a/dspace-api/src/main/java/org/dspace/handle/UpdateHandlePrefix.java +++ b/dspace-api/src/main/java/org/dspace/handle/UpdateHandlePrefix.java @@ -126,7 +126,7 @@ public class UpdateHandlePrefix { ); } catch (SQLException sqle) { - if ((context != null) && (context.isValid())) { + if (context.isValid()) { context.abort(); context = null; } diff --git a/dspace-api/src/main/java/org/dspace/handle/hdlresolver/HdlResolverDTO.java b/dspace-api/src/main/java/org/dspace/handle/hdlresolver/HdlResolverDTO.java new file mode 100644 index 0000000000..fe50bba813 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/handle/hdlresolver/HdlResolverDTO.java @@ -0,0 +1,87 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.handle.hdlresolver; + +import java.io.UnsupportedEncodingException; +import java.net.URLDecoder; +import java.util.Objects; + +import org.apache.commons.lang3.Validate; +import org.dspace.core.Constants; + +/** + * Maps the URL of the request to an handle identifier + * + * @author Vincenzo Mecca (vins01-4science - vincenzo.mecca at 4science.it) + * + */ +public class HdlResolverDTO { + + private final String[] splittedString; + private final String handle; + + /** + * Decode a given URL + * @param url URL + * @return decoded URL + */ + private static String decode(String url) { + try { + return URLDecoder.decode(url, Constants.DEFAULT_ENCODING); + } catch (UnsupportedEncodingException e) { + return url; + } + } + + /** + * Default Constructor + * + * @param requestURL is the complete Request URL + * @param resolverSubPath is the rest service Sub-path + */ + public HdlResolverDTO(final String requestURL, final String resolverSubPath) { + Validate.notBlank(requestURL, "RequestURI not specified"); + Validate.notBlank(resolverSubPath, "fullPath not specified"); + this.splittedString = requestURL.split(resolverSubPath); + if (Objects.nonNull(splittedString) && splittedString.length > 1) { + // Decodes the URL-encoded characters of the String + this.handle = decode(splittedString[1]); + } else { + this.handle = null; + } + } + + /** + * Returns the splitted String of the resource-path + * + * @return + */ + public final String[] getSplittedString() { + return this.splittedString; + } + + /** + * Returns the handle identifier + * + * @return + */ + public final String getHandle() { + return this.handle; + } + + /** + * Checks if the handle identifier is valid. + * + * @return + */ + public boolean isValid() { + return Objects.nonNull(this.handle) && + !"null".equalsIgnoreCase(this.handle) && + !this.handle.trim().isEmpty(); + } +} diff --git a/dspace-api/src/main/java/org/dspace/handle/hdlresolver/HdlResolverService.java b/dspace-api/src/main/java/org/dspace/handle/hdlresolver/HdlResolverService.java new file mode 100644 index 0000000000..3beca5f5dd --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/handle/hdlresolver/HdlResolverService.java @@ -0,0 +1,69 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.handle.hdlresolver; + +import java.util.List; + +import org.dspace.core.Context; + +/** + * Service used to for utilities involving {@code HdlResolverDTO} and its + * resolution to handle URI and vice-versa. + * + * @author Vincenzo Mecca (vins01-4science - vincenzo.mecca at 4science.it) + * + */ +/** + * @author Vincenzo Mecca (vins01-4science - vincenzo.mecca at 4science.com) + * + */ +public interface HdlResolverService { + + /** + * Method that creates an HdlResovlerDTO using the requestURI (full + * requested handle URI) and the path (REST handler URI) + * + * @param requestURI + * @param path + * @return HdlResolverDTO + */ + HdlResolverDTO resolveBy(String requestURI, String path); + + /** + * Converts the hdlResovler into URL fetching it from repository using the DSpace context + * + * @param context + * @param hdlResolver + * @return URL found or null + */ + String resolveToURL(Context context, HdlResolverDTO hdlResolver); + + /** + * List all available prefixes for this installation + * + * @return `List` of Handle prefixes + */ + List listPrefixes(); + + /** + * List all available handles with `prefix` + * + * @param context DSpace context + * @param prefix prefix to search + * @return `List` of handles + */ + List listHandles(Context context, String prefix); + + /** + * Verifies status of handle controller + * + * @return `true` if enabled, `false` otherwise + */ + boolean isListhandlesEnabled(); + +} diff --git a/dspace-api/src/main/java/org/dspace/handle/hdlresolver/HdlResolverServiceImpl.java b/dspace-api/src/main/java/org/dspace/handle/hdlresolver/HdlResolverServiceImpl.java new file mode 100644 index 0000000000..3607777322 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/handle/hdlresolver/HdlResolverServiceImpl.java @@ -0,0 +1,87 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.handle.hdlresolver; + +import java.sql.SQLException; +import java.util.List; +import java.util.stream.Collectors; +import java.util.stream.Stream; + +import org.apache.commons.lang3.StringUtils; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.dspace.core.Context; +import org.dspace.handle.service.HandleService; +import org.dspace.services.ConfigurationService; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.stereotype.Service; + +/** + * + * Handle Resolver that uses an HandleService to retrieve the right + * URL of a target Handle. + * + * @author Vincenzo Mecca (vins01-4science - vincenzo.mecca at 4science.it) + * + */ +@Service +public class HdlResolverServiceImpl implements HdlResolverService { + + public static final String LISTHANDLES_HIDE_PROP = "handle.hide.listhandles"; + + private static final Logger log = LogManager.getLogger(); + + @Autowired(required = true) + private HandleService handleService; + + @Autowired(required = true) + private ConfigurationService configurationService; + + @Override + public HdlResolverDTO resolveBy(String requestURI, String path) { + return new HdlResolverDTO(requestURI, path); + } + + @Override + public String resolveToURL(Context context, HdlResolverDTO hdlResolver) { + try { + return this.handleService.resolveToURL(context, hdlResolver.getHandle()); + } catch (SQLException e) { + log.error("Error while resolving Handle: " + hdlResolver.getHandle(), e); + throw new RuntimeException("Error while resolving Handle: " + hdlResolver.getHandle(), e); + } + } + + @Override + public List listPrefixes() { + return Stream.concat( + Stream.of(this.handleService.getAdditionalPrefixes()), + Stream.of(this.handleService.getPrefix()) + ) + .filter(StringUtils::isNotBlank) + .collect(Collectors.toList()); + } + + @Override + public List listHandles(Context context, String prefix) { + List handlesForPrefix = List.of(); + try { + handlesForPrefix = this.handleService.getHandlesForPrefix(context, prefix); + } catch (SQLException e) { + log.error("Error while listing handles for prefix: " + prefix, e); + throw new RuntimeException("Error while listing handles for prefix: " + prefix, e); + } + return handlesForPrefix; + } + + @Override + public boolean isListhandlesEnabled() { + return !this.configurationService.getBooleanProperty(LISTHANDLES_HIDE_PROP); + } + +} diff --git a/dspace-api/src/main/java/org/dspace/handle/service/HandleService.java b/dspace-api/src/main/java/org/dspace/handle/service/HandleService.java index c7de7411ef..85950ab6db 100644 --- a/dspace-api/src/main/java/org/dspace/handle/service/HandleService.java +++ b/dspace-api/src/main/java/org/dspace/handle/service/HandleService.java @@ -101,16 +101,18 @@ public interface HandleService { throws SQLException, IllegalStateException; /** - * Creates a handle entry, but with a handle supplied by the caller (new - * Handle not generated) + * Creates a handle entry, but with a handle supplied by the caller (new Handle + * not generated) * * @param context DSpace context * @param dso DSpaceObject * @param suppliedHandle existing handle value * @param force FIXME: currently unused * @return the Handle - * @throws SQLException An exception that provides information on a database access error or other errors. - * @throws IllegalStateException if specified handle is already in use by another object + * @throws SQLException An exception that provides information on a + * database access error or other errors. + * @throws IllegalStateException if specified handle is already in use by + * another object */ public String createHandle(Context context, DSpaceObject dso, String suppliedHandle, boolean force) throws SQLException, IllegalStateException; @@ -190,4 +192,12 @@ public interface HandleService { * @return */ String parseHandle(String identifier); + + /** + * Gets the additional prefixes used for handles, + * mapped in configuration file. + * + * @return `String[]` array of prefixes + */ + String[] getAdditionalPrefixes(); } diff --git a/dspace-api/src/main/java/org/dspace/harvest/HarvestedCollectionServiceImpl.java b/dspace-api/src/main/java/org/dspace/harvest/HarvestedCollectionServiceImpl.java index 88cec74a58..0ad83a3292 100644 --- a/dspace-api/src/main/java/org/dspace/harvest/HarvestedCollectionServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/harvest/HarvestedCollectionServiceImpl.java @@ -19,20 +19,20 @@ import java.util.Calendar; import java.util.Date; import java.util.List; import javax.xml.parsers.ParserConfigurationException; -import javax.xml.transform.TransformerException; +import javax.xml.xpath.XPathExpressionException; -import ORG.oclc.oai.harvester2.verb.Identify; -import ORG.oclc.oai.harvester2.verb.ListIdentifiers; import org.dspace.content.Collection; import org.dspace.core.Context; import org.dspace.harvest.dao.HarvestedCollectionDAO; import org.dspace.harvest.service.HarvestedCollectionService; import org.dspace.services.ConfigurationService; import org.dspace.services.factory.DSpaceServicesFactory; -import org.jdom.Document; -import org.jdom.Element; -import org.jdom.Namespace; -import org.jdom.input.DOMBuilder; +import org.jdom2.Document; +import org.jdom2.Element; +import org.jdom2.Namespace; +import org.jdom2.input.DOMBuilder; +import org.oclc.oai.harvester2.verb.Identify; +import org.oclc.oai.harvester2.verb.ListIdentifiers; import org.springframework.beans.factory.annotation.Autowired; import org.w3c.dom.DOMException; import org.xml.sax.SAXException; @@ -198,7 +198,7 @@ public class HarvestedCollectionServiceImpl implements HarvestedCollectionServic // First, see if we can contact the target server at all. try { new Identify(oaiSource); - } catch (IOException | ParserConfigurationException | TransformerException | SAXException ex) { + } catch (IOException | ParserConfigurationException | XPathExpressionException | SAXException ex) { errorSet.add(OAI_ADDRESS_ERROR + ": OAI server could not be reached."); return errorSet; } @@ -216,7 +216,7 @@ public class HarvestedCollectionServiceImpl implements HarvestedCollectionServic try { OREOAIPrefix = OAIHarvester.oaiResolveNamespaceToPrefix(oaiSource, OAIHarvester.getORENamespace().getURI()); DMDOAIPrefix = OAIHarvester.oaiResolveNamespaceToPrefix(oaiSource, DMD_NS.getURI()); - } catch (IOException | ParserConfigurationException | TransformerException | SAXException ex) { + } catch (IOException | ParserConfigurationException | XPathExpressionException | SAXException ex) { errorSet.add(OAI_ADDRESS_ERROR + ": OAI did not respond to ListMetadataFormats query (" + ORE_NS.getPrefix() + ":" + OREOAIPrefix + " ; " @@ -260,7 +260,8 @@ public class HarvestedCollectionServiceImpl implements HarvestedCollectionServic } } } - } catch (IOException | ParserConfigurationException | TransformerException | DOMException | SAXException e) { + } catch (IOException | ParserConfigurationException | XPathExpressionException | DOMException | + SAXException e) { errorSet.add(OAI_ADDRESS_ERROR + ": OAI server could not be reached"); return errorSet; } catch (RuntimeException re) { diff --git a/dspace-api/src/main/java/org/dspace/harvest/OAIHarvester.java b/dspace-api/src/main/java/org/dspace/harvest/OAIHarvester.java index 71e00d73d7..5aeb40bdd9 100644 --- a/dspace-api/src/main/java/org/dspace/harvest/OAIHarvester.java +++ b/dspace-api/src/main/java/org/dspace/harvest/OAIHarvester.java @@ -28,13 +28,10 @@ import java.util.Map; import java.util.Set; import java.util.TimeZone; import javax.xml.parsers.ParserConfigurationException; -import javax.xml.transform.TransformerException; +import javax.xml.xpath.XPathExpressionException; -import ORG.oclc.oai.harvester2.verb.GetRecord; -import ORG.oclc.oai.harvester2.verb.Identify; -import ORG.oclc.oai.harvester2.verb.ListMetadataFormats; -import ORG.oclc.oai.harvester2.verb.ListRecords; import org.apache.commons.lang3.StringUtils; +import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.dspace.authorize.AuthorizeException; import org.dspace.content.Bitstream; @@ -70,11 +67,15 @@ import org.dspace.harvest.service.HarvestedCollectionService; import org.dspace.harvest.service.HarvestedItemService; import org.dspace.services.ConfigurationService; import org.dspace.services.factory.DSpaceServicesFactory; -import org.jdom.Document; -import org.jdom.Element; -import org.jdom.Namespace; -import org.jdom.input.DOMBuilder; -import org.jdom.output.XMLOutputter; +import org.jdom2.Document; +import org.jdom2.Element; +import org.jdom2.Namespace; +import org.jdom2.input.DOMBuilder; +import org.jdom2.output.XMLOutputter; +import org.oclc.oai.harvester2.verb.GetRecord; +import org.oclc.oai.harvester2.verb.Identify; +import org.oclc.oai.harvester2.verb.ListMetadataFormats; +import org.oclc.oai.harvester2.verb.ListRecords; import org.xml.sax.SAXException; @@ -91,7 +92,7 @@ public class OAIHarvester { /** * log4j category */ - private static Logger log = org.apache.logging.log4j.LogManager.getLogger(OAIHarvester.class); + private static final Logger log = LogManager.getLogger(); private static final Namespace ATOM_NS = Namespace.getNamespace("http://www.w3.org/2005/Atom"); private static final Namespace ORE_NS = Namespace.getNamespace("http://www.openarchives.org/ore/terms/"); @@ -133,7 +134,7 @@ public class OAIHarvester { private String metadataKey; // DOMbuilder class for the DOM -> JDOM conversions - private static DOMBuilder db = new DOMBuilder(); + private static final DOMBuilder db = new DOMBuilder(); // The point at which this thread should terminate itself /* Initialize the harvester with a collection object */ @@ -331,18 +332,16 @@ public class OAIHarvester { // main loop to keep requesting more objects until we're done List records; - Set errorSet = new HashSet(); + Set errorSet = new HashSet<>(); ListRecords listRecords = new ListRecords(oaiSource, fromDate, toDate, oaiSetId, descMDPrefix); log.debug( "Harvesting request parameters: listRecords " + oaiSource + " " + fromDate + " " + toDate + " " + oaiSetId + " " + descMDPrefix); - if (listRecords != null) { - log.info("HTTP Request: " + listRecords.getRequestURL()); - } + log.info("HTTP Request: " + listRecords.getRequestURL()); while (listRecords != null) { - records = new ArrayList(); + records = new ArrayList<>(); oaiResponse = db.build(listRecords.getDocument()); if (listRecords.getErrors() != null && listRecords.getErrors().getLength() > 0) { @@ -376,8 +375,8 @@ public class OAIHarvester { } // Process the obtained records - if (records != null && records.size() > 0) { - log.info("Found " + records.size() + " records to process"); + if (!records.isEmpty()) { + log.info("Found {} records to process", records::size); for (Element record : records) { // check for STOP interrupt from the scheduler if (HarvestScheduler.getInterrupt() == HarvestScheduler.HARVESTER_INTERRUPT_STOP) { @@ -439,7 +438,8 @@ public class OAIHarvester { harvestRow.setHarvestStatus(HarvestedCollection.STATUS_UNKNOWN_ERROR); harvestedCollectionService.update(ourContext, harvestRow); alertAdmin(HarvestedCollection.STATUS_UNKNOWN_ERROR, ex); - log.error("Error occurred while generating an OAI response: " + ex.getMessage() + " " + ex.getCause(), ex); + log.error("Error occurred while generating an OAI response: {} {}", + ex.getMessage(), ex.getCause(), ex); ourContext.complete(); return; } finally { @@ -455,6 +455,7 @@ public class OAIHarvester { harvestRow.setHarvestStartTime(startTime); harvestRow.setHarvestMessage("Harvest from " + oaiSource + " successful"); harvestRow.setHarvestStatus(HarvestedCollection.STATUS_READY); + harvestRow.setLastHarvested(startTime); log.info( "Harvest from " + oaiSource + " successful. The process took " + timeTaken + " milliseconds. Harvested " + currentRecord + " items."); @@ -493,11 +494,11 @@ public class OAIHarvester { * @throws HarvestingException if harvesting error * @throws ParserConfigurationException XML parsing error * @throws SAXException if XML processing error - * @throws TransformerException if XML transformer error + * @throws XPathExpressionException if XPath error */ protected void processRecord(Element record, String OREPrefix, final long currentRecord, long totalListSize) throws SQLException, AuthorizeException, IOException, CrosswalkException, HarvestingException, - ParserConfigurationException, SAXException, TransformerException { + ParserConfigurationException, SAXException, XPathExpressionException { WorkspaceItem wi = null; Date timeStart = new Date(); @@ -567,11 +568,7 @@ public class OAIHarvester { // Import the actual bitstreams if (harvestRow.getHarvestType() == 3) { log.info("Running ORE ingest on: " + item.getHandle()); - - List allBundles = item.getBundles(); - for (Bundle bundle : allBundles) { - itemService.removeBundle(ourContext, item, bundle); - } + itemService.removeAllBundles(ourContext, item); ORExwalk.ingest(ourContext, item, oreREM, true); } } else { @@ -623,7 +620,7 @@ public class OAIHarvester { List OREBundles = itemService.getBundles(item, "ORE"); Bitstream OREBitstream = null; - if (OREBundles.size() > 0) { + if (!OREBundles.isEmpty()) { OREBundle = OREBundles.get(0); } else { OREBundle = bundleService.create(ourContext, item, "ORE"); @@ -698,7 +695,7 @@ public class OAIHarvester { List values = itemService.getMetadata(item, "dc", "identifier", Item.ANY, Item.ANY); - if (values.size() > 0 && acceptedHandleServers != null) { + if (!values.isEmpty() && acceptedHandleServers != null) { for (MetadataValue value : values) { // 0 1 2 3 4 // https://hdl.handle.net/1234/12 @@ -732,7 +729,7 @@ public class OAIHarvester { * @return a string in the format 'yyyy-mm-ddThh:mm:ssZ' and converted to UTC timezone */ private String processDate(Date date) { - Integer timePad = configurationService.getIntProperty("oai.harvester.timePadding"); + int timePad = configurationService.getIntProperty("oai.harvester.timePadding"); if (timePad == 0) { timePad = 120; @@ -769,10 +766,10 @@ public class OAIHarvester { * @throws IOException if IO error * @throws SAXException if XML processing error * @throws ParserConfigurationException XML parsing error - * @throws TransformerException if XML transformer error + * @throws XPathExpressionException if XPath error */ private String oaiGetDateGranularity(String oaiSource) - throws IOException, ParserConfigurationException, SAXException, TransformerException { + throws IOException, ParserConfigurationException, SAXException, XPathExpressionException { Identify iden = new Identify(oaiSource); return iden.getDocument().getElementsByTagNameNS(OAI_NS.getURI(), "granularity").item(0).getTextContent(); } @@ -789,26 +786,24 @@ public class OAIHarvester { * operations. * @throws ParserConfigurationException XML parsing error * @throws SAXException if XML processing error - * @throws TransformerException if XML transformer error + * @throws XPathExpressionException if XPath error * @throws ConnectException if could not connect to OAI server */ public static String oaiResolveNamespaceToPrefix(String oaiSource, String MDNamespace) - throws IOException, ParserConfigurationException, SAXException, TransformerException, ConnectException { + throws IOException, ParserConfigurationException, SAXException, XPathExpressionException, ConnectException { String metaPrefix = null; // Query the OAI server for the metadata ListMetadataFormats lmf = new ListMetadataFormats(oaiSource); - if (lmf != null) { - Document lmfResponse = db.build(lmf.getDocument()); - List mdFormats = lmfResponse.getRootElement().getChild("ListMetadataFormats", OAI_NS) - .getChildren("metadataFormat", OAI_NS); + Document lmfResponse = db.build(lmf.getDocument()); + List mdFormats = lmfResponse.getRootElement().getChild("ListMetadataFormats", OAI_NS) + .getChildren("metadataFormat", OAI_NS); - for (Element mdFormat : mdFormats) { - if (MDNamespace.equals(mdFormat.getChildText("metadataNamespace", OAI_NS))) { - metaPrefix = mdFormat.getChildText("metadataPrefix", OAI_NS); - break; - } + for (Element mdFormat : mdFormats) { + if (MDNamespace.equals(mdFormat.getChildText("metadataNamespace", OAI_NS))) { + metaPrefix = mdFormat.getChildText("metadataPrefix", OAI_NS); + break; } } @@ -868,15 +863,15 @@ public class OAIHarvester { * operations. * @throws ParserConfigurationException XML parsing error * @throws SAXException if XML processing error - * @throws TransformerException if XML transformer error + * @throws XPathExpressionException if XPath error * @throws HarvestingException if harvesting error */ protected List getMDrecord(String oaiSource, String itemOaiId, String metadataPrefix) - throws IOException, ParserConfigurationException, SAXException, TransformerException, HarvestingException { + throws IOException, ParserConfigurationException, SAXException, XPathExpressionException, HarvestingException { GetRecord getRecord = new GetRecord(oaiSource, itemOaiId, metadataPrefix); - Set errorSet = new HashSet(); + Set errorSet = new HashSet<>(); // If the metadata is not available for this item, can the whole thing - if (getRecord != null && getRecord.getErrors() != null && getRecord.getErrors().getLength() > 0) { + if (getRecord.getErrors() != null && getRecord.getErrors().getLength() > 0) { for (int i = 0; i < getRecord.getErrors().getLength(); i++) { String errorCode = getRecord.getErrors().item(i).getAttributes().getNamedItem("code").getTextContent(); errorSet.add(errorCode); diff --git a/dspace-api/src/main/java/org/dspace/health/EmbargoCheck.java b/dspace-api/src/main/java/org/dspace/health/EmbargoCheck.java index 5577f41e66..e7b456f7b3 100644 --- a/dspace-api/src/main/java/org/dspace/health/EmbargoCheck.java +++ b/dspace-api/src/main/java/org/dspace/health/EmbargoCheck.java @@ -26,9 +26,8 @@ public class EmbargoCheck extends Check { @Override public String run(ReportInfo ri) { String ret = ""; - Context context = null; + Context context = new Context(); try { - context = new Context(); Iterator item_iter = null; try { item_iter = embargoService.findItemsByLiftMetadata(context); @@ -56,9 +55,7 @@ public class EmbargoCheck extends Check { } catch (SQLException e) { error(e); try { - if (null != context) { - context.abort(); - } + context.abort(); } catch (Exception e1) { error(e); } diff --git a/dspace-api/src/main/java/org/dspace/identifier/DOI.java b/dspace-api/src/main/java/org/dspace/identifier/DOI.java index 67668c3abe..e99472e45c 100644 --- a/dspace-api/src/main/java/org/dspace/identifier/DOI.java +++ b/dspace-api/src/main/java/org/dspace/identifier/DOI.java @@ -34,8 +34,6 @@ public class DOI implements Identifier, ReloadableEntity { public static final String SCHEME = "doi:"; - public static final String RESOLVER = "http://dx.doi.org"; - @Id @Column(name = "doi_id") @GeneratedValue(strategy = GenerationType.SEQUENCE, generator = "doi_seq") diff --git a/dspace-api/src/main/java/org/dspace/identifier/DOIIdentifierProvider.java b/dspace-api/src/main/java/org/dspace/identifier/DOIIdentifierProvider.java index da9c3b718a..66e7b94a4b 100644 --- a/dspace-api/src/main/java/org/dspace/identifier/DOIIdentifierProvider.java +++ b/dspace-api/src/main/java/org/dspace/identifier/DOIIdentifierProvider.java @@ -543,7 +543,7 @@ public class DOIIdentifierProvider extends FilteredIdentifierProvider { if (DELETED.equals(doiRow.getStatus()) || TO_BE_DELETED.equals(doiRow.getStatus())) { throw new DOIIdentifierException("You tried to update the metadata" - + "of a DOI that is marked as DELETED.", + + " of a DOI that is marked as DELETED.", DOIIdentifierException.DOI_IS_DELETED); } @@ -1028,7 +1028,7 @@ public class DOIIdentifierProvider extends FilteredIdentifierProvider { Item item = (Item) dso; List metadata = itemService.getMetadata(item, MD_SCHEMA, DOI_ELEMENT, DOI_QUALIFIER, null); - String leftPart = DOI.RESOLVER + SLASH + getPrefix() + SLASH + getNamespaceSeparator(); + String leftPart = doiService.getResolver() + SLASH + getPrefix() + SLASH + getNamespaceSeparator(); for (MetadataValue id : metadata) { if (id.getValue().startsWith(leftPart)) { return doiService.DOIFromExternalFormat(id.getValue()); diff --git a/dspace-api/src/main/java/org/dspace/identifier/DOIServiceImpl.java b/dspace-api/src/main/java/org/dspace/identifier/DOIServiceImpl.java index aca933aab6..99643db33f 100644 --- a/dspace-api/src/main/java/org/dspace/identifier/DOIServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/identifier/DOIServiceImpl.java @@ -17,11 +17,13 @@ import org.dspace.core.Context; import org.dspace.identifier.dao.DOIDAO; import org.dspace.identifier.doi.DOIIdentifierException; import org.dspace.identifier.service.DOIService; +import org.dspace.services.ConfigurationService; import org.springframework.beans.factory.annotation.Autowired; /** - * Service implementation for the DOI object. - * This class is responsible for all business logic calls for the DOI object and is autowired by spring. + * Service implementation for the {@link DOI} object. + * This class is responsible for all business logic calls for the DOI object + * and is autowired by Spring. * This class should never be accessed directly. * * @author kevinvandevelde at atmire.com @@ -31,6 +33,16 @@ public class DOIServiceImpl implements DOIService { @Autowired(required = true) protected DOIDAO doiDAO; + @Autowired(required = true) + protected ConfigurationService configurationService; + + private static final Pattern DOI_URL_PATTERN + = Pattern.compile("http(s)?://([a-z0-9-.]+)?doi.org(?/.*)", + Pattern.CASE_INSENSITIVE); + private static final String DOI_URL_PATTERN_PATH_GROUP = "path"; + + private static final String RESOLVER_DEFAULT = "https://doi.org"; + protected DOIServiceImpl() { } @@ -66,25 +78,46 @@ public class DOIServiceImpl implements DOIService { if (null == identifier) { throw new IllegalArgumentException("Identifier is null.", new NullPointerException()); } + if (identifier.isEmpty()) { throw new IllegalArgumentException("Cannot format an empty identifier."); } - if (identifier.startsWith(DOI.SCHEME)) { - return DOI.RESOLVER + "/" + identifier.substring(DOI.SCHEME.length()); + + String resolver = getResolver(); + + if (identifier.startsWith(DOI.SCHEME)) { // doi:something + StringBuilder result = new StringBuilder(resolver); + if (!resolver.endsWith("/")) { + result.append('/'); + } + result.append(identifier.substring(DOI.SCHEME.length())); + return result.toString(); } - if (identifier.startsWith("10.") && identifier.contains("/")) { - return DOI.RESOLVER + "/" + identifier; + + if (identifier.startsWith("10.") && identifier.contains("/")) { // 10.something + StringBuilder result = new StringBuilder(resolver); + if (!resolver.endsWith("/")) { + result.append('/'); + } + result.append(identifier); + return result.toString(); } - if (identifier.startsWith(DOI.RESOLVER + "/10.")) { + + if (identifier.startsWith(resolver + "/10.")) { // https://doi.org/10.something return identifier; } + Matcher matcher = DOI_URL_PATTERN.matcher(identifier); + if (matcher.matches()) { // various old URL forms + return resolver + matcher.group(DOI_URL_PATTERN_PATH_GROUP); + } + throw new IdentifierException(identifier + "does not seem to be a DOI."); } @Override public String DOIFromExternalFormat(String identifier) throws DOIIdentifierException { - Pattern pattern = Pattern.compile("^" + DOI.RESOLVER + "/+(10\\..*)$"); + Pattern pattern = Pattern.compile("^" + getResolver() + "/+(10\\..*)$"); Matcher matcher = pattern.matcher(identifier); if (matcher.find()) { return DOI.SCHEME + matcher.group(1); @@ -99,18 +132,29 @@ public class DOIServiceImpl implements DOIService { if (null == identifier) { throw new IllegalArgumentException("Identifier is null.", new NullPointerException()); } - if (identifier.startsWith(DOI.SCHEME)) { - return identifier; - } + if (identifier.isEmpty()) { throw new IllegalArgumentException("Cannot format an empty identifier."); } - if (identifier.startsWith("10.") && identifier.contains("/")) { + + if (identifier.startsWith(DOI.SCHEME)) { // doi:something + return identifier; + } + + if (identifier.startsWith("10.") && identifier.contains("/")) { // 10.something return DOI.SCHEME + identifier; } - if (identifier.startsWith(DOI.RESOLVER + "/10.")) { - return DOI.SCHEME + identifier.substring(18); + + String resolver = getResolver(); + if (identifier.startsWith(resolver + "/10.")) { //https://doi.org/10.something + return DOI.SCHEME + identifier.substring(resolver.length()); } + + Matcher matcher = DOI_URL_PATTERN.matcher(identifier); + if (matcher.matches()) { // various old URL forms + return DOI.SCHEME + matcher.group(DOI_URL_PATTERN_PATH_GROUP).substring(1); + } + throw new DOIIdentifierException(identifier + "does not seem to be a DOI.", DOIIdentifierException.UNRECOGNIZED); } @@ -126,4 +170,14 @@ public class DOIServiceImpl implements DOIService { throws SQLException { return doiDAO.findSimilarNotInState(context, doiPattern, statuses, dsoIsNotNull); } + + @Override + public String getResolver() { + String resolver = configurationService.getProperty("identifier.doi.resolver", + RESOLVER_DEFAULT); + if (resolver.endsWith("/")) { + resolver = resolver.substring(0, resolver.length() - 1); + } + return resolver; + } } diff --git a/dspace-api/src/main/java/org/dspace/identifier/DataCiteXMLCreator.java b/dspace-api/src/main/java/org/dspace/identifier/DataCiteXMLCreator.java index 0ea25ff3a4..ae2cd248d4 100644 --- a/dspace-api/src/main/java/org/dspace/identifier/DataCiteXMLCreator.java +++ b/dspace-api/src/main/java/org/dspace/identifier/DataCiteXMLCreator.java @@ -23,8 +23,8 @@ import org.dspace.core.factory.CoreServiceFactory; import org.dspace.handle.factory.HandleServiceFactory; import org.dspace.services.ConfigurationService; import org.dspace.utils.DSpace; -import org.jdom.Element; -import org.jdom.output.XMLOutputter; +import org.jdom2.Element; +import org.jdom2.output.XMLOutputter; import org.slf4j.Logger; import org.slf4j.LoggerFactory; diff --git a/dspace-api/src/main/java/org/dspace/identifier/VersionedDOIIdentifierProvider.java b/dspace-api/src/main/java/org/dspace/identifier/VersionedDOIIdentifierProvider.java index cc43bd21b5..a864b4be4b 100644 --- a/dspace-api/src/main/java/org/dspace/identifier/VersionedDOIIdentifierProvider.java +++ b/dspace-api/src/main/java/org/dspace/identifier/VersionedDOIIdentifierProvider.java @@ -261,7 +261,6 @@ public class VersionedDOIIdentifierProvider extends DOIIdentifierProvider { doiService.update(context, doi); return doi.getDoi(); } - assert (previousVersionDOI != null); String identifier = getBareDOI(previousVersionDOI); diff --git a/dspace-api/src/main/java/org/dspace/identifier/VersionedHandleIdentifierProviderWithCanonicalHandles.java b/dspace-api/src/main/java/org/dspace/identifier/VersionedHandleIdentifierProviderWithCanonicalHandles.java index 61abbcb580..7705fd2b57 100644 --- a/dspace-api/src/main/java/org/dspace/identifier/VersionedHandleIdentifierProviderWithCanonicalHandles.java +++ b/dspace-api/src/main/java/org/dspace/identifier/VersionedHandleIdentifierProviderWithCanonicalHandles.java @@ -117,7 +117,7 @@ public class VersionedHandleIdentifierProviderWithCanonicalHandles extends Ident // check if we have a previous item if (previous != null) { try { - // If we have a reviewer he/she might not have the + // If we have a reviewer they might not have the // rights to edit the metadata of thes previous item. // Temporarly grant them: context.turnOffAuthorisationSystem(); diff --git a/dspace-api/src/main/java/org/dspace/identifier/doi/DataCiteConnector.java b/dspace-api/src/main/java/org/dspace/identifier/doi/DataCiteConnector.java index bc8ea90957..57136d6143 100644 --- a/dspace-api/src/main/java/org/dspace/identifier/doi/DataCiteConnector.java +++ b/dspace-api/src/main/java/org/dspace/identifier/doi/DataCiteConnector.java @@ -45,13 +45,13 @@ import org.dspace.core.factory.CoreServiceFactory; import org.dspace.handle.service.HandleService; import org.dspace.identifier.DOI; import org.dspace.services.ConfigurationService; -import org.jdom.Document; -import org.jdom.Element; -import org.jdom.JDOMException; -import org.jdom.filter.ElementFilter; -import org.jdom.input.SAXBuilder; -import org.jdom.output.Format; -import org.jdom.output.XMLOutputter; +import org.jdom2.Document; +import org.jdom2.Element; +import org.jdom2.JDOMException; +import org.jdom2.filter.ElementFilter; +import org.jdom2.input.SAXBuilder; +import org.jdom2.output.Format; +import org.jdom2.output.XMLOutputter; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; diff --git a/dspace-api/src/main/java/org/dspace/identifier/service/DOIService.java b/dspace-api/src/main/java/org/dspace/identifier/service/DOIService.java index 9af1fd8a0a..5bd68a9061 100644 --- a/dspace-api/src/main/java/org/dspace/identifier/service/DOIService.java +++ b/dspace-api/src/main/java/org/dspace/identifier/service/DOIService.java @@ -17,26 +17,65 @@ import org.dspace.identifier.IdentifierException; import org.dspace.identifier.doi.DOIIdentifierException; /** - * Service interface class for the DOI object. - * The implementation of this class is responsible for all business logic calls for the DOI object and is autowired - * by spring + * Service interface class for the {@link DOI} object. + * The implementation of this class is responsible for all business logic calls + * for the {@link DOI} object and is autowired by Spring. * * @author kevinvandevelde at atmire.com */ public interface DOIService { + /** + * Update a DOI in storage. + * + * @param context current DSpace session. + * @param doi the DOI to persist. + * @throws SQLException passed through. + */ public void update(Context context, DOI doi) throws SQLException; + /** + * Create a new DOI in storage. + * + * @param context current DSpace session. + * @return the new DOI. + * @throws SQLException passed through. + */ public DOI create(Context context) throws SQLException; + /** + * Find a specific DOI in storage. + * + * @param context current DSpace session. + * @param doi string representation of the DOI. + * @return the DOI object found. + * @throws SQLException passed through, can mean none found. + */ public DOI findByDoi(Context context, String doi) throws SQLException; + /** + * Find the DOI assigned to a given DSpace Object. + * + * @param context current DSpace session. + * @param dso The DSpace Object. + * @return the DSO's DOI. + * @throws SQLException passed through. + */ public DOI findDOIByDSpaceObject(Context context, DSpaceObject dso) throws SQLException; + /** + * Find the DOI assigned to a given DSpace Object, unless it has one of a + * given set of statuses. + * + * @param context current DSpace context. + * @param dso the DSpace Object. + * @param statusToExclude uninteresting statuses. + * @return the DSO's DOI. + * @throws SQLException passed through. + */ public DOI findDOIByDSpaceObject(Context context, DSpaceObject dso, List statusToExclude) throws SQLException; - /** * This method helps to convert a DOI into a URL. It takes DOIs in one of * the following formats and returns it as URL (f.e. @@ -49,12 +88,18 @@ public interface DOIService { * * @param identifier A DOI that should be returned in external form. * @return A String containing a URL to the official DOI resolver. - * @throws IllegalArgumentException If identifier is null or an empty String. - * @throws org.dspace.identifier.IdentifierException If identifier could not be recognized as valid DOI. + * @throws IllegalArgumentException If identifier is null or an empty String. + * @throws IdentifierException If identifier could not be recognized as valid DOI. */ public String DOIToExternalForm(String identifier) throws IdentifierException; + /** + * Convert an HTTP DOI URL (https://doi.org/10.something) to a "doi:" URI. + * @param identifier HTTP URL + * @return DOI URI + * @throws DOIIdentifierException if {@link identifier} is not recognizable. + */ public String DOIFromExternalFormat(String identifier) throws DOIIdentifierException; @@ -64,16 +109,24 @@ public interface DOIService { * @param identifier Identifier to format, following format are accepted: * f.e. 10.123/456, doi:10.123/456, http://dx.doi.org/10.123/456. * @return Given Identifier with DOI-Scheme, f.e. doi:10.123/456. - * @throws IllegalArgumentException If identifier is empty or null. - * @throws org.dspace.identifier.doi.DOIIdentifierException If DOI could not be recognized. + * @throws IllegalArgumentException If identifier is empty or null. + * @throws DOIIdentifierException If DOI could not be recognized. */ public String formatIdentifier(String identifier) throws DOIIdentifierException; + /** + * Find all DOIs that have one of a given set of statuses. + * @param context current DSpace session. + * @param statuses desired statuses. + * @return all DOIs having any of the given statuses. + * @throws SQLException passed through. + */ public List getDOIsByStatus(Context context, List statuses) throws SQLException; /** - * Find all DOIs that are similar to the specified pattern ant not in the specified states. + * Find all DOIs that are similar to the specified pattern and not in the + * specified states. * * @param context DSpace context * @param doiPattern The pattern, e.g. "10.5072/123.%" @@ -85,4 +138,11 @@ public interface DOIService { public List getSimilarDOIsNotInState(Context context, String doiPattern, List statuses, boolean dsoIsNotNull) throws SQLException; + + /** + * Get the URL stem of the DOI resolver, e.g. "https://doi.org/". + * + * @return URL to the DOI resolver. + */ + public String getResolver(); } diff --git a/dspace-api/src/main/java/org/dspace/identifier/service/IdentifierService.java b/dspace-api/src/main/java/org/dspace/identifier/service/IdentifierService.java index 74219fc71c..64eee1dfcf 100644 --- a/dspace-api/src/main/java/org/dspace/identifier/service/IdentifierService.java +++ b/dspace-api/src/main/java/org/dspace/identifier/service/IdentifierService.java @@ -92,6 +92,9 @@ public interface IdentifierService { throws AuthorizeException, SQLException, IdentifierException; /** + * Used to register newly-minted identifiers. Each provider is responsible + * for creating the appropriate identifier. All providers are interrogated. + * * @param context The relevant DSpace Context. * @param dso DSpace object to be registered * @throws AuthorizeException if authorization error @@ -101,7 +104,7 @@ public interface IdentifierService { void register(Context context, DSpaceObject dso) throws AuthorizeException, SQLException, IdentifierException; /** - * Used to Register a specific Identifier (for example a Handle, hdl:1234.5/6) + * Used to Register a specific Identifier (for example a Handle, hdl:1234.5/6). * The provider is responsible for detecting and processing the appropriate * identifier. All Providers are interrogated. Multiple providers * can process the same identifier. diff --git a/dspace-api/src/main/java/org/dspace/iiif/IIIFApiQueryServiceImpl.java b/dspace-api/src/main/java/org/dspace/iiif/IIIFApiQueryServiceImpl.java index 04a08a7781..7c6336ed3c 100644 --- a/dspace-api/src/main/java/org/dspace/iiif/IIIFApiQueryServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/iiif/IIIFApiQueryServiceImpl.java @@ -23,8 +23,7 @@ import org.dspace.iiif.util.IIIFSharedUtils; /** - * Queries the configured IIIF server for image dimensions. Used for - * formats that cannot be easily read using ImageIO (jpeg 2000). + * Queries the configured IIIF image server via the Image API. * * @author Michael Spalti mspalti@willamette.edu */ diff --git a/dspace-api/src/main/java/org/dspace/iiif/canvasdimension/CanvasDimensionCLI.java b/dspace-api/src/main/java/org/dspace/iiif/canvasdimension/CanvasDimensionCLI.java index 50b934d110..c7feea4c56 100644 --- a/dspace-api/src/main/java/org/dspace/iiif/canvasdimension/CanvasDimensionCLI.java +++ b/dspace-api/src/main/java/org/dspace/iiif/canvasdimension/CanvasDimensionCLI.java @@ -8,6 +8,7 @@ package org.dspace.iiif.canvasdimension; import java.util.Arrays; +import java.util.Date; import java.util.UUID; import org.apache.commons.cli.CommandLine; @@ -48,6 +49,7 @@ public class CanvasDimensionCLI { public static void main(String[] argv) throws Exception { + Date startTime = new Date(); boolean iiifEnabled = configurationService.getBooleanProperty("iiif.enabled"); if (!iiifEnabled) { @@ -64,7 +66,8 @@ public class CanvasDimensionCLI { String identifier = null; String eperson = null; - Context context = new Context(); + Context context = new Context(Context.Mode.BATCH_EDIT); + IIIFCanvasDimensionService canvasProcessor = IIIFCanvasDimensionServiceFactory.getInstance() .getIiifCanvasDimensionService(); @@ -220,9 +223,15 @@ public class CanvasDimensionCLI { context.commit(); } + Date endTime = new Date(); + System.out.println("Started: " + startTime.getTime()); + System.out.println("Ended: " + endTime.getTime()); + System.out.println( + "Elapsed time: " + ((endTime.getTime() - startTime.getTime()) / 1000) + " secs (" + (endTime + .getTime() - startTime.getTime()) + " msecs)"); + // Always print summary to standard out. System.out.println(processed + " IIIF items were processed."); - } } diff --git a/dspace-api/src/main/java/org/dspace/iiif/canvasdimension/IIIFCanvasDimensionServiceImpl.java b/dspace-api/src/main/java/org/dspace/iiif/canvasdimension/IIIFCanvasDimensionServiceImpl.java index ad36b65ab9..a8be8971c0 100644 --- a/dspace-api/src/main/java/org/dspace/iiif/canvasdimension/IIIFCanvasDimensionServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/iiif/canvasdimension/IIIFCanvasDimensionServiceImpl.java @@ -119,6 +119,7 @@ public class IIIFCanvasDimensionServiceImpl implements IIIFCanvasDimensionServic if (processItemBundles(context, item)) { ++processed; } + context.uncacheEntity(item); } } } @@ -137,6 +138,7 @@ public class IIIFCanvasDimensionServiceImpl implements IIIFCanvasDimensionServic List bitstreams = bundle.getBitstreams(); for (Bitstream bit : bitstreams) { done |= processBitstream(context, bit); + context.uncacheEntity(bit); } } if (done) { diff --git a/dspace-api/src/main/java/org/dspace/iiif/consumer/CanvasCacheEvictService.java b/dspace-api/src/main/java/org/dspace/iiif/consumer/CanvasCacheEvictService.java index 56cd432d91..beeb40ceac 100644 --- a/dspace-api/src/main/java/org/dspace/iiif/consumer/CanvasCacheEvictService.java +++ b/dspace-api/src/main/java/org/dspace/iiif/consumer/CanvasCacheEvictService.java @@ -23,7 +23,7 @@ public class CanvasCacheEvictService { CacheManager cacheManager; public void evictSingleCacheValue(String cacheKey) { - Objects.requireNonNull(cacheManager.getCache(CACHE_NAME)).evict(cacheKey); + Objects.requireNonNull(cacheManager.getCache(CACHE_NAME)).evictIfPresent(cacheKey); } } diff --git a/dspace-api/src/main/java/org/dspace/iiif/consumer/ManifestsCacheEvictService.java b/dspace-api/src/main/java/org/dspace/iiif/consumer/ManifestsCacheEvictService.java index 967d0667a6..963ce3113f 100644 --- a/dspace-api/src/main/java/org/dspace/iiif/consumer/ManifestsCacheEvictService.java +++ b/dspace-api/src/main/java/org/dspace/iiif/consumer/ManifestsCacheEvictService.java @@ -26,11 +26,11 @@ public class ManifestsCacheEvictService { CacheManager cacheManager; public void evictSingleCacheValue(String cacheKey) { - Objects.requireNonNull(cacheManager.getCache(CACHE_NAME)).evict(cacheKey); + Objects.requireNonNull(cacheManager.getCache(CACHE_NAME)).evictIfPresent(cacheKey); } public void evictAllCacheValues() { - Objects.requireNonNull(cacheManager.getCache(CACHE_NAME)).clear(); + Objects.requireNonNull(cacheManager.getCache(CACHE_NAME)).invalidate(); } } diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/cache/CacheLogger.java b/dspace-api/src/main/java/org/dspace/iiif/logger/CacheLogger.java similarity index 95% rename from dspace-server-webapp/src/main/java/org/dspace/app/rest/cache/CacheLogger.java rename to dspace-api/src/main/java/org/dspace/iiif/logger/CacheLogger.java index bd77c578e6..28d57975bf 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/cache/CacheLogger.java +++ b/dspace-api/src/main/java/org/dspace/iiif/logger/CacheLogger.java @@ -6,7 +6,7 @@ * http://www.dspace.org/license/ */ -package org.dspace.app.rest.cache; +package org.dspace.iiif.logger; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/cache/CanvasCacheLogger.java b/dspace-api/src/main/java/org/dspace/iiif/logger/CanvasCacheLogger.java similarity index 95% rename from dspace-server-webapp/src/main/java/org/dspace/app/rest/cache/CanvasCacheLogger.java rename to dspace-api/src/main/java/org/dspace/iiif/logger/CanvasCacheLogger.java index eaa08000ee..2f1a8d6dba 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/cache/CanvasCacheLogger.java +++ b/dspace-api/src/main/java/org/dspace/iiif/logger/CanvasCacheLogger.java @@ -5,7 +5,7 @@ * * http://www.dspace.org/license/ */ -package org.dspace.app.rest.cache; +package org.dspace.iiif.logger; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; diff --git a/dspace-api/src/main/java/org/dspace/importer/external/ads/ADSFieldMapping.java b/dspace-api/src/main/java/org/dspace/importer/external/ads/ADSFieldMapping.java new file mode 100644 index 0000000000..e7d2d3398b --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/importer/external/ads/ADSFieldMapping.java @@ -0,0 +1,39 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.importer.external.ads; + +import java.util.Map; +import javax.annotation.Resource; + +import org.dspace.importer.external.metadatamapping.AbstractMetadataFieldMapping; + +/** + * An implementation of {@link AbstractMetadataFieldMapping} + * Responsible for defining the mapping of the ADS metadatum fields on the DSpace metadatum fields + * + * @author Pasquale Cavallo (pasquale.cavallo at 4science dot it) + */ +@SuppressWarnings("rawtypes") +public class ADSFieldMapping extends AbstractMetadataFieldMapping { + + /** + * Defines which metadatum is mapped on which metadatum. Note that while the key must be unique it + * only matters here for postprocessing of the value. The mapped MetadatumContributor has full control over + * what metadatafield is generated. + * + * @param metadataFieldMap The map containing the link between retrieve metadata and metadata that will be set to + * the item. + */ + @Override + @SuppressWarnings("unchecked") + @Resource(name = "adsMetadataFieldMap") + public void setMetadataFieldMap(Map metadataFieldMap) { + super.setMetadataFieldMap(metadataFieldMap); + } + +} \ No newline at end of file diff --git a/dspace-api/src/main/java/org/dspace/importer/external/ads/ADSImportMetadataSourceServiceImpl.java b/dspace-api/src/main/java/org/dspace/importer/external/ads/ADSImportMetadataSourceServiceImpl.java new file mode 100644 index 0000000000..8fbe4ef2cf --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/importer/external/ads/ADSImportMetadataSourceServiceImpl.java @@ -0,0 +1,334 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.importer.external.ads; + +import static org.dspace.importer.external.liveimportclient.service.LiveImportClientImpl.HEADER_PARAMETERS; + +import java.net.URISyntaxException; +import java.util.ArrayList; +import java.util.Collection; +import java.util.HashMap; +import java.util.Iterator; +import java.util.List; +import java.util.Map; +import java.util.concurrent.Callable; +import javax.el.MethodNotFoundException; + +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.ObjectMapper; +import org.apache.commons.collections.CollectionUtils; +import org.apache.commons.lang3.StringUtils; +import org.apache.http.client.utils.URIBuilder; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.dspace.content.Item; +import org.dspace.importer.external.datamodel.ImportRecord; +import org.dspace.importer.external.datamodel.Query; +import org.dspace.importer.external.exception.MetadataSourceException; +import org.dspace.importer.external.liveimportclient.service.LiveImportClient; +import org.dspace.importer.external.service.AbstractImportMetadataSourceService; +import org.dspace.importer.external.service.components.QuerySource; +import org.springframework.beans.factory.annotation.Autowired; + +/** + * Implements a data source for querying ADS + * + * @author Mykhaylo Boychuk (mykhaylo.boychuk at 4science.com) + */ +public class ADSImportMetadataSourceServiceImpl extends AbstractImportMetadataSourceService + implements QuerySource { + + private final static Logger log = LogManager.getLogger(); + + private String url; + private String resultFieldList; + + private String apiKey; + private int timeout = 1000; + + @Autowired + private LiveImportClient liveImportClient; + + @Override + public String getImportSource() { + return "ads"; + } + + @Override + public ImportRecord getRecord(String id) throws MetadataSourceException { + List records = retry(new SearchByIdCallable(id)); + return CollectionUtils.isEmpty(records) ? null : records.get(0); + } + + @Override + public int getRecordsCount(String query) throws MetadataSourceException { + return retry(new CountByQueryCallable(query)); + } + + @Override + public int getRecordsCount(Query query) throws MetadataSourceException { + return retry(new CountByQueryCallable(query)); + } + + @Override + public Collection getRecords(String query, int start, int count) throws MetadataSourceException { + return retry(new SearchByQueryCallable(query, count, start)); + } + + @Override + public Collection getRecords(Query query) throws MetadataSourceException { + return retry(new SearchByQueryCallable(query)); + } + + @Override + public ImportRecord getRecord(Query query) throws MetadataSourceException { + List records = retry(new SearchByIdCallable(query)); + return CollectionUtils.isEmpty(records) ? null : records.get(0); + } + + @Override + public Collection findMatchingRecords(Query query) throws MetadataSourceException { + return retry(new FindMatchingRecordCallable(query)); + } + + @Override + public Collection findMatchingRecords(Item item) throws MetadataSourceException { + throw new MethodNotFoundException("This method is not implemented for CrossRef"); + } + + @Override + public void init() throws Exception {} + + public String getApiKey() { + return apiKey; + } + + public void setApiKey(String apiKey) { + this.apiKey = apiKey; + } + + /** + * This class is a Callable implementation to get ADS entries based on query object. + * This Callable use as query value the string queryString passed to constructor. + * If the object will be construct through Query.class instance, a Query's map entry with key "query" will be used. + * Pagination is supported too, using the value of the Query's map with keys "start" and "count". + * + * @author Mykhaylo Boychuk (mykhaylo.boychuk@4science.com) + */ + private class SearchByQueryCallable implements Callable> { + + private Query query; + + private SearchByQueryCallable(String queryString, Integer maxResult, Integer start) { + query = new Query(); + query.addParameter("query", queryString); + query.addParameter("count", maxResult); + query.addParameter("start", start); + } + + private SearchByQueryCallable(Query query) { + this.query = query; + } + + @Override + public List call() throws Exception { + return search(query.getParameterAsClass("query", String.class), + query.getParameterAsClass("start", Integer.class), + query.getParameterAsClass("count", Integer.class), + getApiKey()); + } + } + + /** + * This class is a Callable implementation to get an ADS entry using bibcode + * The bibcode to use can be passed through the constructor as a String or as Query's map entry, with the key "id". + * + * @author Mykhaylo Boychuk (mykhaylo.boychuk@4science.com) + */ + private class SearchByIdCallable implements Callable> { + private Query query; + + private SearchByIdCallable(Query query) { + this.query = query; + } + + private SearchByIdCallable(String id) { + this.query = new Query(); + query.addParameter("id", id); + } + + @Override + public List call() throws Exception { + String queryString = "bibcode:" + query.getParameterAsClass("id", String.class); + return search(queryString, 0 , 1, getApiKey()); + } + } + + /** + * This class is a Callable implementation to search ADS entries + * using author and title and year. + * Pagination is supported too, using the value of the Query's map with keys "start" and "count". + * + * @author Mykhaylo Boychuk (mykhaylo.boychuk@4science.com) + */ + private class FindMatchingRecordCallable implements Callable> { + + private Query query; + + private FindMatchingRecordCallable(Query q) { + query = q; + } + + @Override + public List call() throws Exception { + Integer count = query.getParameterAsClass("count", Integer.class); + Integer start = query.getParameterAsClass("start", Integer.class); + String author = query.getParameterAsClass("author", String.class); + String title = query.getParameterAsClass("title", String.class); + Integer year = query.getParameterAsClass("year", Integer.class); + return search(title, author, year, start, count, getApiKey()); + } + + } + + /** + * This class is a Callable implementation to count the number of entries for an ADS query. + * This Callable use as query value to ADS the string queryString passed to constructor. + * If the object will be construct through Query.class instance, the value of the Query's + * map with the key "query" will be used. + * + * @author Mykhaylo Boychuk (mykhaylo.boychuk@4science.com) + */ + private class CountByQueryCallable implements Callable { + private Query query; + + + private CountByQueryCallable(String queryString) { + query = new Query(); + query.addParameter("query", queryString); + } + + private CountByQueryCallable(Query query) { + this.query = query; + } + + @Override + public Integer call() throws Exception { + return count(query.getParameterAsClass("query", String.class), getApiKey()); + } + } + + private List search(String title, String author, int year, int start, int count, String token) { + String query = ""; + if (StringUtils.isNotBlank(title)) { + query += "title:" + title; + } + if (StringUtils.isNotBlank(author)) { + String splitRegex = "(\\s*,\\s+|\\s*;\\s+|\\s*;+|\\s*,+|\\s+)"; + String[] authors = author.split(splitRegex); + // [FAU] + if (StringUtils.isNotBlank(query)) { + query = "author:"; + } else { + query += "&fq=author:"; + } + int x = 0; + for (String auth : authors) { + x++; + query += auth; + if (x < authors.length) { + query += " AND "; + } + } + } + if (year != -1) { + // [DP] + if (StringUtils.isNotBlank(query)) { + query = "year:"; + } else { + query += "&fq=year:"; + } + query += year; + } + return search(query.toString(), start, count, token); + } + + public Integer count(String query, String token) { + try { + Map> params = new HashMap>(); + Map headerParameters = new HashMap(); + headerParameters.put("Authorization", "Bearer " + token); + params.put(HEADER_PARAMETERS, headerParameters); + + URIBuilder uriBuilder = new URIBuilder(this.url); + uriBuilder.addParameter("q", query); + uriBuilder.addParameter("rows", "1"); + uriBuilder.addParameter("start", "0"); + uriBuilder.addParameter("fl", this.resultFieldList); + + String resp = liveImportClient.executeHttpGetRequest(timeout, uriBuilder.toString(), params); + JsonNode jsonNode = convertStringJsonToJsonNode(resp); + return jsonNode.at("/response/numFound").asInt(); + } catch (URISyntaxException e) { + e.printStackTrace(); + } + return 0; + } + + public List search(String query, Integer start, Integer count, String token) { + List adsResults = new ArrayList<>(); + try { + Map> params = new HashMap>(); + Map headerParameters = new HashMap(); + headerParameters.put("Authorization", "Bearer " + token); + params.put(HEADER_PARAMETERS, headerParameters); + + URIBuilder uriBuilder = new URIBuilder(this.url); + uriBuilder.addParameter("q", query); + uriBuilder.addParameter("rows", count.toString()); + uriBuilder.addParameter("start", start.toString()); + uriBuilder.addParameter("fl", this.resultFieldList); + + String resp = liveImportClient.executeHttpGetRequest(timeout, uriBuilder.toString(), params); + + JsonNode jsonNode = convertStringJsonToJsonNode(resp); + JsonNode docs = jsonNode.at("/response/docs"); + if (docs.isArray()) { + Iterator nodes = docs.elements(); + while (nodes.hasNext()) { + JsonNode node = nodes.next(); + adsResults.add(transformSourceRecords(node.toString())); + } + } else { + adsResults.add(transformSourceRecords(docs.toString())); + } + } catch (URISyntaxException e) { + e.printStackTrace(); + } + return adsResults; + } + + private JsonNode convertStringJsonToJsonNode(String json) { + try { + return new ObjectMapper().readTree(json); + } catch (JsonProcessingException e) { + log.error("Unable to process json response.", e); + } + return null; + } + + public void setUrl(String url) { + this.url = url; + } + + public void setResultFieldList(String resultFieldList) { + this.resultFieldList = resultFieldList; + } + +} \ No newline at end of file diff --git a/dspace-api/src/main/java/org/dspace/importer/external/arxiv/metadatamapping/contributor/ArXivIdMetadataContributor.java b/dspace-api/src/main/java/org/dspace/importer/external/arxiv/metadatamapping/contributor/ArXivIdMetadataContributor.java index ed5ac5960b..7bd42cf07a 100644 --- a/dspace-api/src/main/java/org/dspace/importer/external/arxiv/metadatamapping/contributor/ArXivIdMetadataContributor.java +++ b/dspace-api/src/main/java/org/dspace/importer/external/arxiv/metadatamapping/contributor/ArXivIdMetadataContributor.java @@ -9,10 +9,10 @@ package org.dspace.importer.external.arxiv.metadatamapping.contributor; import java.util.Collection; -import org.apache.axiom.om.OMElement; import org.dspace.importer.external.metadatamapping.MetadatumDTO; import org.dspace.importer.external.metadatamapping.contributor.MetadataContributor; import org.dspace.importer.external.metadatamapping.contributor.SimpleXpathMetadatumContributor; +import org.jdom2.Element; /** * Arxiv specific implementation of {@link MetadataContributor} @@ -32,7 +32,7 @@ public class ArXivIdMetadataContributor extends SimpleXpathMetadatumContributor * @return a collection of import records. Only the identifier of the found records may be put in the record. */ @Override - public Collection contributeMetadata(OMElement t) { + public Collection contributeMetadata(Element t) { Collection values = super.contributeMetadata(t); parseValue(values); return values; diff --git a/dspace-api/src/main/java/org/dspace/importer/external/arxiv/service/ArXivImportMetadataSourceServiceImpl.java b/dspace-api/src/main/java/org/dspace/importer/external/arxiv/service/ArXivImportMetadataSourceServiceImpl.java index 6b418423fa..96689e62ba 100644 --- a/dspace-api/src/main/java/org/dspace/importer/external/arxiv/service/ArXivImportMetadataSourceServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/importer/external/arxiv/service/ArXivImportMetadataSourceServiceImpl.java @@ -7,8 +7,10 @@ */ package org.dspace.importer.external.arxiv.service; +import java.io.IOException; import java.io.StringReader; import java.util.ArrayList; +import java.util.Arrays; import java.util.Collection; import java.util.List; import java.util.concurrent.Callable; @@ -20,10 +22,6 @@ import javax.ws.rs.client.WebTarget; import javax.ws.rs.core.MediaType; import javax.ws.rs.core.Response; -import org.apache.axiom.om.OMElement; -import org.apache.axiom.om.OMXMLBuilderFactory; -import org.apache.axiom.om.OMXMLParserWrapper; -import org.apache.axiom.om.xpath.AXIOMXPath; import org.apache.commons.lang3.StringUtils; import org.dspace.content.Item; import org.dspace.importer.external.datamodel.ImportRecord; @@ -31,7 +29,14 @@ import org.dspace.importer.external.datamodel.Query; import org.dspace.importer.external.exception.MetadataSourceException; import org.dspace.importer.external.service.AbstractImportMetadataSourceService; import org.dspace.importer.external.service.components.QuerySource; -import org.jaxen.JaxenException; +import org.jdom2.Document; +import org.jdom2.Element; +import org.jdom2.JDOMException; +import org.jdom2.Namespace; +import org.jdom2.filter.Filters; +import org.jdom2.input.SAXBuilder; +import org.jdom2.xpath.XPathExpression; +import org.jdom2.xpath.XPathFactory; /** * Implements a data source for querying ArXiv @@ -39,7 +44,7 @@ import org.jaxen.JaxenException; * @author Pasquale Cavallo (pasquale.cavallo at 4Science dot it) * */ -public class ArXivImportMetadataSourceServiceImpl extends AbstractImportMetadataSourceService +public class ArXivImportMetadataSourceServiceImpl extends AbstractImportMetadataSourceService implements QuerySource { private WebTarget webTarget; @@ -213,15 +218,20 @@ public class ArXivImportMetadataSourceServiceImpl extends AbstractImportMetadata Response response = invocationBuilder.get(); if (response.getStatus() == 200) { String responseString = response.readEntity(String.class); - OMXMLParserWrapper records = OMXMLBuilderFactory.createOMBuilder(new StringReader(responseString)); - OMElement element = records.getDocumentElement(); - AXIOMXPath xpath = null; + + SAXBuilder saxBuilder = new SAXBuilder(); + Document document = saxBuilder.build(new StringReader(responseString)); + Element root = document.getRootElement(); + + List namespaces = Arrays.asList(Namespace.getNamespace("opensearch", + "http://a9.com/-/spec/opensearch/1.1/")); + XPathExpression xpath = + XPathFactory.instance().compile("opensearch:totalResults", Filters.element(), null, namespaces); + + Element count = xpath.evaluateFirst(root); try { - xpath = new AXIOMXPath("opensearch:totalResults"); - xpath.addNamespace("opensearch", "http://a9.com/-/spec/opensearch/1.1/"); - OMElement count = (OMElement) xpath.selectSingleNode(element); return Integer.parseInt(count.getText()); - } catch (JaxenException e) { + } catch (NumberFormatException e) { return null; } } else { @@ -274,8 +284,8 @@ public class ArXivImportMetadataSourceServiceImpl extends AbstractImportMetadata Response response = invocationBuilder.get(); if (response.getStatus() == 200) { String responseString = response.readEntity(String.class); - List omElements = splitToRecords(responseString); - for (OMElement record : omElements) { + List elements = splitToRecords(responseString); + for (Element record : elements) { results.add(transformSourceRecords(record)); } return results; @@ -321,8 +331,8 @@ public class ArXivImportMetadataSourceServiceImpl extends AbstractImportMetadata Response response = invocationBuilder.get(); if (response.getStatus() == 200) { String responseString = response.readEntity(String.class); - List omElements = splitToRecords(responseString); - for (OMElement record : omElements) { + List elements = splitToRecords(responseString); + for (Element record : elements) { results.add(transformSourceRecords(record)); } return results; @@ -359,8 +369,8 @@ public class ArXivImportMetadataSourceServiceImpl extends AbstractImportMetadata Response response = invocationBuilder.get(); if (response.getStatus() == 200) { String responseString = response.readEntity(String.class); - List omElements = splitToRecords(responseString); - for (OMElement record : omElements) { + List elements = splitToRecords(responseString); + for (Element record : elements) { results.add(transformSourceRecords(record)); } return results; @@ -387,16 +397,21 @@ public class ArXivImportMetadataSourceServiceImpl extends AbstractImportMetadata } } - private List splitToRecords(String recordsSrc) { - OMXMLParserWrapper records = OMXMLBuilderFactory.createOMBuilder(new StringReader(recordsSrc)); - OMElement element = records.getDocumentElement(); - AXIOMXPath xpath = null; + private List splitToRecords(String recordsSrc) { + try { - xpath = new AXIOMXPath("ns:entry"); - xpath.addNamespace("ns", "http://www.w3.org/2005/Atom"); - List recordsList = xpath.selectNodes(element); + SAXBuilder saxBuilder = new SAXBuilder(); + Document document = saxBuilder.build(new StringReader(recordsSrc)); + Element root = document.getRootElement(); + + List namespaces = Arrays.asList(Namespace.getNamespace("ns", + "http://www.w3.org/2005/Atom")); + XPathExpression xpath = + XPathFactory.instance().compile("ns:entry", Filters.element(), null, namespaces); + + List recordsList = xpath.evaluate(root); return recordsList; - } catch (JaxenException e) { + } catch (JDOMException | IOException e) { return null; } } diff --git a/dspace-api/src/main/java/org/dspace/importer/external/bibtex/service/BibtexImportMetadataSourceServiceImpl.java b/dspace-api/src/main/java/org/dspace/importer/external/bibtex/service/BibtexImportMetadataSourceServiceImpl.java index 7468d601f5..0014088c86 100644 --- a/dspace-api/src/main/java/org/dspace/importer/external/bibtex/service/BibtexImportMetadataSourceServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/importer/external/bibtex/service/BibtexImportMetadataSourceServiceImpl.java @@ -70,11 +70,24 @@ public class BibtexImportMetadataSourceServiceImpl extends AbstractPlainMetadata keyValueItem.setKey(entry.getValue().getType().getValue()); keyValueItem.setValue(entry.getKey().getValue()); keyValues.add(keyValueItem); + PlainMetadataKeyValueItem typeItem = new PlainMetadataKeyValueItem(); + typeItem.setKey("type"); + typeItem.setValue(entry.getValue().getType().getValue()); + keyValues.add(typeItem); if (entry.getValue().getFields() != null) { for (Entry subentry : entry.getValue().getFields().entrySet()) { PlainMetadataKeyValueItem innerItem = new PlainMetadataKeyValueItem(); - innerItem.setKey(subentry.getKey().getValue()); - innerItem.setValue(subentry.getValue().toUserString()); + innerItem.setKey(subentry.getKey().getValue().toLowerCase()); + String latexString = subentry.getValue().toUserString(); + try { + org.jbibtex.LaTeXParser laTeXParser = new org.jbibtex.LaTeXParser(); + List latexObjects = laTeXParser.parse(latexString); + org.jbibtex.LaTeXPrinter laTeXPrinter = new org.jbibtex.LaTeXPrinter(); + String plainTextString = laTeXPrinter.print(latexObjects); + innerItem.setValue(plainTextString.replaceAll("\n", " ")); + } catch (ParseException e) { + innerItem.setValue(latexString); + } keyValues.add(innerItem); } } @@ -92,10 +105,10 @@ public class BibtexImportMetadataSourceServiceImpl extends AbstractPlainMetadata /** - * Retrieve the MetadataFieldMapping containing the mapping between RecordType + * Set the MetadataFieldMapping containing the mapping between RecordType * (in this case PlainMetadataSourceDto.class) and Metadata * - * @return The configured MetadataFieldMapping + * @param metadataFieldMap The configured MetadataFieldMapping */ @Override @SuppressWarnings("unchecked") diff --git a/dspace-api/src/main/java/org/dspace/importer/external/cinii/CiniiFieldMapping.java b/dspace-api/src/main/java/org/dspace/importer/external/cinii/CiniiFieldMapping.java new file mode 100644 index 0000000000..f266ff3d85 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/importer/external/cinii/CiniiFieldMapping.java @@ -0,0 +1,37 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.importer.external.cinii; + +import java.util.Map; +import javax.annotation.Resource; + +import org.dspace.importer.external.metadatamapping.AbstractMetadataFieldMapping; + +/** + * An implementation of {@link AbstractMetadataFieldMapping} + * Responsible for defining the mapping of the Cinii metadatum fields on the DSpace metadatum fields + * + * @author Pasquale Cavallo (pasquale.cavallo at 4science dot it) + */ +public class CiniiFieldMapping extends AbstractMetadataFieldMapping { + + /** + * Defines which metadatum is mapped on which metadatum. Note that while the key must be unique it + * only matters here for postprocessing of the value. The mapped MetadatumContributor has full control over + * what metadatafield is generated. + * + * @param metadataFieldMap The map containing the link between retrieve metadata and metadata that will be set to + * the item. + */ + @Override + @Resource(name = "ciniiMetadataFieldMap") + public void setMetadataFieldMap(Map metadataFieldMap) { + super.setMetadataFieldMap(metadataFieldMap); + } + +} diff --git a/dspace-api/src/main/java/org/dspace/importer/external/cinii/CiniiImportMetadataSourceServiceImpl.java b/dspace-api/src/main/java/org/dspace/importer/external/cinii/CiniiImportMetadataSourceServiceImpl.java new file mode 100644 index 0000000000..5eff46c790 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/importer/external/cinii/CiniiImportMetadataSourceServiceImpl.java @@ -0,0 +1,447 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.importer.external.cinii; + +import java.io.IOException; +import java.io.StringReader; +import java.net.URISyntaxException; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collection; +import java.util.HashMap; +import java.util.LinkedList; +import java.util.List; +import java.util.Map; +import java.util.Objects; +import java.util.concurrent.Callable; +import javax.el.MethodNotFoundException; + +import org.apache.commons.collections.CollectionUtils; +import org.apache.commons.lang.StringUtils; +import org.apache.http.HttpException; +import org.apache.http.client.utils.URIBuilder; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.dspace.content.Item; +import org.dspace.importer.external.datamodel.ImportRecord; +import org.dspace.importer.external.datamodel.Query; +import org.dspace.importer.external.exception.MetadataSourceException; +import org.dspace.importer.external.liveimportclient.service.LiveImportClient; +import org.dspace.importer.external.metadatamapping.MetadatumDTO; +import org.dspace.importer.external.service.AbstractImportMetadataSourceService; +import org.dspace.importer.external.service.components.QuerySource; +import org.dspace.services.ConfigurationService; +import org.jdom2.Attribute; +import org.jdom2.Document; +import org.jdom2.Element; +import org.jdom2.JDOMException; +import org.jdom2.Namespace; +import org.jdom2.filter.Filters; +import org.jdom2.input.SAXBuilder; +import org.jdom2.xpath.XPathExpression; +import org.jdom2.xpath.XPathFactory; +import org.springframework.beans.factory.annotation.Autowired; + +/** + * Implements a data source for querying Cinii + * + * @author Mykhaylo Boychuk (mykhaylo.boychuk at 4science.com) + */ +public class CiniiImportMetadataSourceServiceImpl extends AbstractImportMetadataSourceService + implements QuerySource { + + private final static Logger log = LogManager.getLogger(); + + private String url; + private String urlSearch; + + @Autowired + private LiveImportClient liveImportClient; + + @Autowired + private ConfigurationService configurationService; + + @Override + public String getImportSource() { + return "cinii"; + } + + @Override + public void init() throws Exception {} + + @Override + public ImportRecord getRecord(String id) throws MetadataSourceException { + List records = retry(new SearchByIdCallable(id)); + return CollectionUtils.isNotEmpty(records) ? records.get(0) : null; + } + + @Override + public int getRecordsCount(String query) throws MetadataSourceException { + return retry(new CountByQueryCallable(query)); + } + + @Override + public int getRecordsCount(Query query) throws MetadataSourceException { + return retry(new CountByQueryCallable(query)); + } + + @Override + public Collection getRecords(String query, int start, int count) throws MetadataSourceException { + return retry(new SearchByQueryCallable(query, count, start)); + } + + @Override + public Collection getRecords(Query query) throws MetadataSourceException { + return retry(new SearchByQueryCallable(query)); + } + + @Override + public ImportRecord getRecord(Query query) throws MetadataSourceException { + List records = retry(new SearchByIdCallable(query)); + return CollectionUtils.isNotEmpty(records) ? records.get(0) : null; + } + + @Override + public Collection findMatchingRecords(Query query) throws MetadataSourceException { + return retry(new FindMatchingRecordCallable(query)); + } + + @Override + public Collection findMatchingRecords(Item item) throws MetadataSourceException { + throw new MethodNotFoundException("This method is not implemented for Cinii"); + } + + public String getUrl() { + return url; + } + + public void setUrl(String url) { + this.url = url; + } + + public String getUrlSearch() { + return urlSearch; + } + + public void setUrlSearch(String urlSearch) { + this.urlSearch = urlSearch; + } + + /** + * This class is a Callable implementation to get CiNii entries based on + * query object. + * + * This Callable use as query value the string queryString passed to constructor. + * If the object will be construct through Query.class instance, a Query's map entry with key "query" will be used. + * Pagination is supported too, using the value of the Query's map with keys "start" and "count". + * + * @author Mykhaylo Boychuk (mykhaylo.boychuk at 4science.com) + */ + private class SearchByQueryCallable implements Callable> { + + private Query query; + + private SearchByQueryCallable(String queryString, Integer maxResult, Integer start) { + query = new Query(); + query.addParameter("query", queryString); + query.addParameter("count", maxResult); + query.addParameter("start", start); + } + + private SearchByQueryCallable(Query query) { + this.query = query; + } + + @Override + public List call() throws Exception { + List records = new LinkedList(); + Integer count = query.getParameterAsClass("count", Integer.class); + Integer start = query.getParameterAsClass("start", Integer.class); + String queryString = query.getParameterAsClass("query", String.class); + String appId = configurationService.getProperty("cinii.appid"); + List ids = getCiniiIds(appId, count, null, null, null, start, queryString); + if (CollectionUtils.isNotEmpty(ids)) { + for (String id : ids) { + List tmp = search(id, appId); + if (CollectionUtils.isNotEmpty(tmp)) { + tmp.forEach(x -> x.addValue(createIdentifier(id))); + } + records.addAll(tmp); + } + } + return records; + } + } + + /** + * This class is a Callable implementation to get an CiNii entry using CiNii ID + * + * @author Mykhaylo Boychuk (mykhaylo.boychuk at 4science.com) + */ + private class SearchByIdCallable implements Callable> { + private Query query; + + private SearchByIdCallable(Query query) { + this.query = query; + } + + private SearchByIdCallable(String id) { + this.query = new Query(); + query.addParameter("id", id); + } + + @Override + public List call() throws Exception { + String appId = configurationService.getProperty("cinii.appid"); + String id = query.getParameterAsClass("id", String.class); + List importRecord = search(id, appId); + if (CollectionUtils.isNotEmpty(importRecord)) { + importRecord.forEach(x -> x.addValue(createIdentifier(id))); + } + return importRecord; + } + } + + /** + * This class is a Callable implementation to search CiNii entries + * using author, title and year. + * Pagination is supported too, using the value of the Query's map with keys "start" and "count". + * + * @author Mykhaylo Boychuk (mykhaylo.boychuk at 4science.com) + */ + private class FindMatchingRecordCallable implements Callable> { + + private Query query; + + private FindMatchingRecordCallable(Query q) { + query = q; + } + + @Override + public List call() throws Exception { + List records = new LinkedList(); + String title = query.getParameterAsClass("title", String.class); + String author = query.getParameterAsClass("author", String.class); + Integer year = query.getParameterAsClass("year", Integer.class); + Integer maxResult = query.getParameterAsClass("maxResult", Integer.class); + Integer start = query.getParameterAsClass("start", Integer.class); + String appId = configurationService.getProperty("cinii.appid"); + List ids = getCiniiIds(appId, maxResult, author, title, year, start, null); + if (CollectionUtils.isNotEmpty(ids)) { + for (String id : ids) { + List importRecords = search(id, appId); + if (CollectionUtils.isNotEmpty(importRecords)) { + importRecords.forEach(x -> x.addValue(createIdentifier(id))); + } + records.addAll(importRecords); + } + } + return records; + } + + } + + /** + * This class is a Callable implementation to count the number + * of entries for an CiNii query. + * + * @author Mykhaylo Boychuk (mykhaylo.boychuk at 4science.com) + */ + private class CountByQueryCallable implements Callable { + private Query query; + + + private CountByQueryCallable(String queryString) { + query = new Query(); + query.addParameter("query", queryString); + } + + private CountByQueryCallable(Query query) { + this.query = query; + } + + @Override + public Integer call() throws Exception { + String appId = configurationService.getProperty("cinii.appid"); + String queryString = query.getParameterAsClass("query", String.class); + return countCiniiElement(appId, null, null, null, null, null, queryString); + } + } + + /** + * Get metadata by searching CiNii RDF API with CiNii NAID + * + * @param id CiNii NAID to search by + * @param appId registered application identifier for the API + * @return record metadata + * @throws IOException A general class of exceptions produced by failed or interrupted I/O operations. + * @throws HttpException Represents a XML/HTTP fault and provides access to the HTTP status code. + */ + protected List search(String id, String appId) + throws IOException, HttpException { + try { + List records = new LinkedList(); + URIBuilder uriBuilder = new URIBuilder(this.url + id + ".rdf?appid=" + appId); + Map> params = new HashMap>(); + String response = liveImportClient.executeHttpGetRequest(1000, uriBuilder.toString(), params); + List elements = splitToRecords(response); + for (Element record : elements) { + records.add(transformSourceRecords(record)); + } + return records; + } catch (URISyntaxException e) { + log.error(e.getMessage(), e); + throw new RuntimeException(e.getMessage(), e); + } + } + + private List splitToRecords(String recordsSrc) { + try { + SAXBuilder saxBuilder = new SAXBuilder(); + Document document = saxBuilder.build(new StringReader(recordsSrc)); + Element root = document.getRootElement(); + return root.getChildren(); + } catch (JDOMException | IOException e) { + log.error(e.getMessage(), e); + throw new RuntimeException(e.getMessage(), e); + } + } + + /** + * Returns a list of uri links (for example:https://cir.nii.ac.jp/crid/123456789) + * to the searched CiNii articles + * + * @param appId Application ID + * @param maxResult The number of search results per page + * @param author Author name + * @param title Article name + * @param year Year of publication + * @param start Start number for the acquired search result list + * @param query Keyword to be searched + */ + private List getCiniiIds(String appId, Integer maxResult, String author, String title, + Integer year, Integer start, String query) { + try { + List ids = new ArrayList<>(); + URIBuilder uriBuilder = new URIBuilder(this.urlSearch); + uriBuilder.addParameter("format", "rss"); + if (StringUtils.isNotBlank(appId)) { + uriBuilder.addParameter("appid", appId); + } + if (Objects.nonNull(maxResult) && maxResult != 0) { + uriBuilder.addParameter("count", maxResult.toString()); + } + if (Objects.nonNull(start)) { + uriBuilder.addParameter("start", start.toString()); + } + if (StringUtils.isNotBlank(title)) { + uriBuilder.addParameter("title", title); + } + if (StringUtils.isNotBlank(author)) { + uriBuilder.addParameter("author", author); + } + if (StringUtils.isNotBlank(query)) { + uriBuilder.addParameter("q", query); + } + if (Objects.nonNull(year) && year != -1 && year != 0) { + uriBuilder.addParameter("year_from", String.valueOf(year)); + uriBuilder.addParameter("year_to", String.valueOf(year)); + } + Map> params = new HashMap>(); + String response = liveImportClient.executeHttpGetRequest(1000, uriBuilder.toString(), params); + int url_len = this.url.length() - 1; + SAXBuilder saxBuilder = new SAXBuilder(); + Document document = saxBuilder.build(new StringReader(response)); + Element root = document.getRootElement(); + List namespaces = Arrays.asList( + Namespace.getNamespace("ns", "http://purl.org/rss/1.0/"), + Namespace.getNamespace("rdf", "http://www.w3.org/1999/02/22-rdf-syntax-ns#")); + XPathExpression xpath = XPathFactory.instance().compile("//ns:item/@rdf:about", + Filters.attribute(), null, namespaces); + List recordsList = xpath.evaluate(root); + for (Attribute item : recordsList) { + String value = item.getValue(); + if (value.length() > url_len) { + ids.add(value.substring(url_len + 1)); + } + } + return ids; + } catch (JDOMException | IOException | URISyntaxException e) { + log.error(e.getMessage(), e); + throw new RuntimeException(e.getMessage(), e); + } + } + + /** + * Returns the total number of CiNii articles returned by a specific query + * + * @param appId Application ID + * @param maxResult The number of search results per page + * @param author Author name + * @param title Article name + * @param year Year of publication + * @param start Start number for the acquired search result list + * @param query Keyword to be searched + */ + private Integer countCiniiElement(String appId, Integer maxResult, String author, String title, + Integer year, Integer start, String query) { + try { + URIBuilder uriBuilder = new URIBuilder(this.urlSearch); + uriBuilder.addParameter("format", "rss"); + uriBuilder.addParameter("appid", appId); + if (Objects.nonNull(maxResult) && maxResult != 0) { + uriBuilder.addParameter("count", maxResult.toString()); + } + if (Objects.nonNull(start)) { + uriBuilder.addParameter("start", start.toString()); + } + if (StringUtils.isNotBlank(title)) { + uriBuilder.addParameter("title", title); + } + if (StringUtils.isNotBlank(author)) { + uriBuilder.addParameter("author", author); + } + if (StringUtils.isNotBlank(query)) { + uriBuilder.addParameter("q", query); + } + if (Objects.nonNull(year) && year != -1 && year != 0) { + uriBuilder.addParameter("year_from", String.valueOf(year)); + uriBuilder.addParameter("year_to", String.valueOf(year)); + } + + Map> params = new HashMap>(); + String response = liveImportClient.executeHttpGetRequest(1000, uriBuilder.toString(), params); + + SAXBuilder saxBuilder = new SAXBuilder(); + Document document = saxBuilder.build(new StringReader(response)); + Element root = document.getRootElement(); + List namespaces = Arrays + .asList(Namespace.getNamespace("opensearch", "http://a9.com/-/spec/opensearch/1.1/")); + XPathExpression xpath = XPathFactory.instance().compile("//opensearch:totalResults", + Filters.element(), null, namespaces); + List nodes = xpath.evaluate(root); + if (nodes != null && !nodes.isEmpty()) { + return Integer.parseInt(((Element) nodes.get(0)).getText()); + } + return 0; + } catch (JDOMException | IOException | URISyntaxException e) { + log.error(e.getMessage(), e); + throw new RuntimeException(e.getMessage(), e); + } + } + + private MetadatumDTO createIdentifier(String id) { + MetadatumDTO metadatumDTO = new MetadatumDTO(); + metadatumDTO.setSchema("dc"); + metadatumDTO.setElement("identifier"); + metadatumDTO.setQualifier("other"); + metadatumDTO.setValue(id); + return metadatumDTO; + } + +} \ No newline at end of file diff --git a/dspace-api/src/main/java/org/dspace/importer/external/crossref/CrossRefAuthorMetadataProcessor.java b/dspace-api/src/main/java/org/dspace/importer/external/crossref/CrossRefAuthorMetadataProcessor.java new file mode 100644 index 0000000000..abf84f52d0 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/importer/external/crossref/CrossRefAuthorMetadataProcessor.java @@ -0,0 +1,67 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.importer.external.crossref; + +import java.util.ArrayList; +import java.util.Collection; +import java.util.Iterator; + +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.ObjectMapper; +import org.apache.commons.lang3.StringUtils; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.dspace.importer.external.metadatamapping.contributor.JsonPathMetadataProcessor; + +/** + * This class is used for CrossRef's Live-Import to extract + * attributes such as "given" and "family" from the array of authors/editors + * and return them concatenated. + * Beans are configured in the crossref-integration.xml file. + * + * @author Mykhaylo Boychuk (mykhaylo.boychuk@4science.com) + */ +public class CrossRefAuthorMetadataProcessor implements JsonPathMetadataProcessor { + + private final static Logger log = LogManager.getLogger(); + + private String pathToArray; + + @Override + public Collection processMetadata(String json) { + JsonNode rootNode = convertStringJsonToJsonNode(json); + Iterator authors = rootNode.at(pathToArray).iterator(); + Collection values = new ArrayList<>(); + while (authors.hasNext()) { + JsonNode author = authors.next(); + String givenName = author.at("/given").textValue(); + String familyName = author.at("/family").textValue(); + if (StringUtils.isNoneBlank(givenName) && StringUtils.isNoneBlank(familyName)) { + values.add(givenName + " " + familyName); + } + } + return values; + } + + private JsonNode convertStringJsonToJsonNode(String json) { + ObjectMapper mapper = new ObjectMapper(); + JsonNode body = null; + try { + body = mapper.readTree(json); + } catch (JsonProcessingException e) { + log.error("Unable to process json response.", e); + } + return body; + } + + public void setPathToArray(String pathToArray) { + this.pathToArray = pathToArray; + } + +} \ No newline at end of file diff --git a/dspace-api/src/main/java/org/dspace/importer/external/crossref/CrossRefFieldMapping.java b/dspace-api/src/main/java/org/dspace/importer/external/crossref/CrossRefFieldMapping.java new file mode 100644 index 0000000000..5e879b4d26 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/importer/external/crossref/CrossRefFieldMapping.java @@ -0,0 +1,39 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.importer.external.crossref; + +import java.util.Map; +import javax.annotation.Resource; + +import org.dspace.importer.external.metadatamapping.AbstractMetadataFieldMapping; + +/** + * An implementation of {@link AbstractMetadataFieldMapping} + * Responsible for defining the mapping of the CrossRef metadatum fields on the DSpace metadatum fields + * + * @author Pasquale Cavallo (pasquale.cavallo at 4science dot it) + */ +@SuppressWarnings("rawtypes") +public class CrossRefFieldMapping extends AbstractMetadataFieldMapping { + + /** + * Defines which metadatum is mapped on which metadatum. Note that while the key must be unique it + * only matters here for postprocessing of the value. The mapped MetadatumContributor has full control over + * what metadatafield is generated. + * + * @param metadataFieldMap The map containing the link between retrieve metadata and metadata that will be set to + * the item. + */ + @Override + @SuppressWarnings("unchecked") + @Resource(name = "crossrefMetadataFieldMap") + public void setMetadataFieldMap(Map metadataFieldMap) { + super.setMetadataFieldMap(metadataFieldMap); + } + +} \ No newline at end of file diff --git a/dspace-api/src/main/java/org/dspace/importer/external/crossref/CrossRefImportMetadataSourceServiceImpl.java b/dspace-api/src/main/java/org/dspace/importer/external/crossref/CrossRefImportMetadataSourceServiceImpl.java new file mode 100644 index 0000000000..7dde330b27 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/importer/external/crossref/CrossRefImportMetadataSourceServiceImpl.java @@ -0,0 +1,336 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.importer.external.crossref; + +import java.net.URLDecoder; +import java.util.ArrayList; +import java.util.Collection; +import java.util.HashMap; +import java.util.Iterator; +import java.util.List; +import java.util.Map; +import java.util.Objects; +import java.util.concurrent.Callable; +import javax.el.MethodNotFoundException; + +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.ObjectMapper; +import org.apache.commons.collections4.CollectionUtils; +import org.apache.commons.lang3.StringUtils; +import org.apache.http.client.utils.URIBuilder; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.dspace.content.Item; +import org.dspace.importer.external.datamodel.ImportRecord; +import org.dspace.importer.external.datamodel.Query; +import org.dspace.importer.external.exception.MetadataSourceException; +import org.dspace.importer.external.liveimportclient.service.LiveImportClient; +import org.dspace.importer.external.service.AbstractImportMetadataSourceService; +import org.dspace.importer.external.service.DoiCheck; +import org.dspace.importer.external.service.components.QuerySource; +import org.springframework.beans.factory.annotation.Autowired; + +/** + * Implements a data source for querying CrossRef + * + * @author Mykhaylo Boychuk (mykhaylo.boychuk at 4science.com) + */ +public class CrossRefImportMetadataSourceServiceImpl extends AbstractImportMetadataSourceService + implements QuerySource { + + private final static Logger log = LogManager.getLogger(); + + private String url; + + @Autowired + private LiveImportClient liveImportClient; + + @Override + public String getImportSource() { + return "crossref"; + } + + @Override + public void init() throws Exception {} + + @Override + public ImportRecord getRecord(String recordId) throws MetadataSourceException { + String id = getID(recordId); + List records = StringUtils.isNotBlank(id) ? retry(new SearchByIdCallable(id)) + : retry(new SearchByIdCallable(recordId)); + return CollectionUtils.isEmpty(records) ? null : records.get(0); + } + + @Override + public int getRecordsCount(String query) throws MetadataSourceException { + String id = getID(query); + return StringUtils.isNotBlank(id) ? retry(new DoiCheckCallable(id)) : retry(new CountByQueryCallable(query)); + } + + @Override + public int getRecordsCount(Query query) throws MetadataSourceException { + String id = getID(query.toString()); + return StringUtils.isNotBlank(id) ? retry(new DoiCheckCallable(id)) : retry(new CountByQueryCallable(query)); + } + + @Override + public Collection getRecords(String query, int start, int count) throws MetadataSourceException { + String id = getID(query.toString()); + return StringUtils.isNotBlank(id) ? retry(new SearchByIdCallable(id)) + : retry(new SearchByQueryCallable(query, count, start)); + } + + @Override + public Collection getRecords(Query query) throws MetadataSourceException { + String id = getID(query.toString()); + if (StringUtils.isNotBlank(id)) { + return retry(new SearchByIdCallable(id)); + } + return retry(new SearchByQueryCallable(query)); + } + + @Override + public ImportRecord getRecord(Query query) throws MetadataSourceException { + String id = getID(query.toString()); + List records = StringUtils.isNotBlank(id) ? retry(new SearchByIdCallable(id)) + : retry(new SearchByIdCallable(query)); + return CollectionUtils.isEmpty(records) ? null : records.get(0); + } + + @Override + public Collection findMatchingRecords(Query query) throws MetadataSourceException { + String id = getID(query.toString()); + return StringUtils.isNotBlank(id) ? retry(new SearchByIdCallable(id)) + : retry(new FindMatchingRecordCallable(query)); + } + + @Override + public Collection findMatchingRecords(Item item) throws MetadataSourceException { + throw new MethodNotFoundException("This method is not implemented for CrossRef"); + } + + public String getID(String id) { + return DoiCheck.isDoi(id) ? "filter=doi:" + id : StringUtils.EMPTY; + } + + /** + * This class is a Callable implementation to get CrossRef entries based on query object. + * This Callable use as query value the string queryString passed to constructor. + * If the object will be construct through Query.class instance, a Query's map entry with key "query" will be used. + * Pagination is supported too, using the value of the Query's map with keys "start" and "count". + * + * @author Mykhaylo Boychuk (mykhaylo.boychuk@4science.com) + */ + private class SearchByQueryCallable implements Callable> { + + private Query query; + + private SearchByQueryCallable(String queryString, Integer maxResult, Integer start) { + query = new Query(); + query.addParameter("query", queryString); + query.addParameter("count", maxResult); + query.addParameter("start", start); + } + + private SearchByQueryCallable(Query query) { + this.query = query; + } + + @Override + public List call() throws Exception { + List results = new ArrayList<>(); + Integer count = query.getParameterAsClass("count", Integer.class); + Integer start = query.getParameterAsClass("start", Integer.class); + + URIBuilder uriBuilder = new URIBuilder(url); + uriBuilder.addParameter("query", query.getParameterAsClass("query", String.class)); + if (Objects.nonNull(count)) { + uriBuilder.addParameter("rows", count.toString()); + } + if (Objects.nonNull(start)) { + uriBuilder.addParameter("offset", start.toString()); + } + Map> params = new HashMap>(); + String response = liveImportClient.executeHttpGetRequest(1000, uriBuilder.toString(), params); + JsonNode jsonNode = convertStringJsonToJsonNode(response); + Iterator nodes = jsonNode.at("/message/items").iterator(); + while (nodes.hasNext()) { + JsonNode node = nodes.next(); + results.add(transformSourceRecords(node.toString())); + } + return results; + } + + } + + /** + * This class is a Callable implementation to get an CrossRef entry using DOI + * The DOI to use can be passed through the constructor as a String or as Query's map entry, with the key "id". + * + * @author Mykhaylo Boychuk (mykhaylo.boychuk@4science.com) + */ + private class SearchByIdCallable implements Callable> { + private Query query; + + private SearchByIdCallable(Query query) { + this.query = query; + } + + private SearchByIdCallable(String id) { + this.query = new Query(); + query.addParameter("id", id); + } + + @Override + public List call() throws Exception { + List results = new ArrayList<>(); + String ID = URLDecoder.decode(query.getParameterAsClass("id", String.class), "UTF-8"); + URIBuilder uriBuilder = new URIBuilder(url + "/" + ID); + Map> params = new HashMap>(); + String responseString = liveImportClient.executeHttpGetRequest(1000, uriBuilder.toString(), params); + JsonNode jsonNode = convertStringJsonToJsonNode(responseString); + JsonNode messageNode = jsonNode.at("/message"); + results.add(transformSourceRecords(messageNode.toString())); + return results; + } + } + + /** + * This class is a Callable implementation to search CrossRef entries using author and title. + * There are two field in the Query map to pass, with keys "title" and "author" + * (at least one must be used). + * + * @author Mykhaylo Boychuk (mykhaylo.boychuk@4science.com) + */ + private class FindMatchingRecordCallable implements Callable> { + + private Query query; + + private FindMatchingRecordCallable(Query q) { + query = q; + } + + @Override + public List call() throws Exception { + String queryValue = query.getParameterAsClass("query", String.class); + Integer count = query.getParameterAsClass("count", Integer.class); + Integer start = query.getParameterAsClass("start", Integer.class); + String author = query.getParameterAsClass("author", String.class); + String title = query.getParameterAsClass("title", String.class); + String bibliographics = query.getParameterAsClass("bibliographics", String.class); + List results = new ArrayList<>(); + URIBuilder uriBuilder = new URIBuilder(url); + if (Objects.nonNull(queryValue)) { + uriBuilder.addParameter("query", queryValue); + } + if (Objects.nonNull(count)) { + uriBuilder.addParameter("rows", count.toString()); + } + if (Objects.nonNull(start)) { + uriBuilder.addParameter("offset", start.toString()); + } + if (Objects.nonNull(author)) { + uriBuilder.addParameter("query.author", author); + } + if (Objects.nonNull(title )) { + uriBuilder.addParameter("query.container-title", title); + } + if (Objects.nonNull(bibliographics)) { + uriBuilder.addParameter("query.bibliographic", bibliographics); + } + Map> params = new HashMap>(); + String resp = liveImportClient.executeHttpGetRequest(1000, uriBuilder.toString(), params); + JsonNode jsonNode = convertStringJsonToJsonNode(resp); + Iterator nodes = jsonNode.at("/message/items").iterator(); + while (nodes.hasNext()) { + JsonNode node = nodes.next(); + results.add(transformSourceRecords(node.toString())); + } + return results; + } + + } + + /** + * This class is a Callable implementation to count the number of entries for an CrossRef query. + * This Callable use as query value to CrossRef the string queryString passed to constructor. + * If the object will be construct through Query.class instance, the value of the Query's + * map with the key "query" will be used. + * + * @author Mykhaylo Boychuk (mykhaylo.boychuk@4science.com) + */ + private class CountByQueryCallable implements Callable { + + private Query query; + + private CountByQueryCallable(String queryString) { + query = new Query(); + query.addParameter("query", queryString); + } + + private CountByQueryCallable(Query query) { + this.query = query; + } + + @Override + public Integer call() throws Exception { + URIBuilder uriBuilder = new URIBuilder(url); + uriBuilder.addParameter("query", query.getParameterAsClass("query", String.class)); + Map> params = new HashMap>(); + String responseString = liveImportClient.executeHttpGetRequest(1000, uriBuilder.toString(), params); + JsonNode jsonNode = convertStringJsonToJsonNode(responseString); + return jsonNode.at("/message/total-results").asInt(); + } + } + + /** + * This class is a Callable implementation to check if exist an CrossRef entry using DOI. + * The DOI to use can be passed through the constructor as a String or as Query's map entry, with the key "id". + * return 1 if CrossRef entry exists otherwise 0 + * + * @author Mykhaylo Boychuk (mykhaylo.boychuk@4science.com) + */ + private class DoiCheckCallable implements Callable { + + private final Query query; + + private DoiCheckCallable(final String id) { + final Query query = new Query(); + query.addParameter("id", id); + this.query = query; + } + + private DoiCheckCallable(final Query query) { + this.query = query; + } + + @Override + public Integer call() throws Exception { + Map> params = new HashMap>(); + URIBuilder uriBuilder = new URIBuilder(url + "/" + query.getParameterAsClass("id", String.class)); + String responseString = liveImportClient.executeHttpGetRequest(1000, uriBuilder.toString(), params); + JsonNode jsonNode = convertStringJsonToJsonNode(responseString); + return StringUtils.equals(jsonNode.at("/status").toString(), "ok") ? 1 : 0; + } + } + + private JsonNode convertStringJsonToJsonNode(String json) { + try { + return new ObjectMapper().readTree(json); + } catch (JsonProcessingException e) { + log.error("Unable to process json response.", e); + } + return null; + } + + public void setUrl(String url) { + this.url = url; + } + +} \ No newline at end of file diff --git a/dspace-api/src/main/java/org/dspace/importer/external/datacite/DataCiteFieldMapping.java b/dspace-api/src/main/java/org/dspace/importer/external/datacite/DataCiteFieldMapping.java new file mode 100644 index 0000000000..f8540307b9 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/importer/external/datacite/DataCiteFieldMapping.java @@ -0,0 +1,38 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.importer.external.datacite; + +import java.util.Map; +import javax.annotation.Resource; + +import org.dspace.importer.external.metadatamapping.AbstractMetadataFieldMapping; + +/** + * An implementation of {@link AbstractMetadataFieldMapping} + * Responsible for defining the mapping of the datacite metadatum fields on the DSpace metadatum fields + * + * @author Pasquale Cavallo (pasquale.cavallo at 4science dot it) + * @author Florian Gantner (florian.gantner@uni-bamberg.de) + */ +public class DataCiteFieldMapping extends AbstractMetadataFieldMapping { + + /** + * Defines which metadatum is mapped on which metadatum. Note that while the key must be unique it + * only matters here for postprocessing of the value. The mapped MetadatumContributor has full control over + * what metadatafield is generated. + * + * @param metadataFieldMap The map containing the link between retrieve metadata and metadata that will be set to + * the item. + */ + @Override + @Resource(name = "dataciteMetadataFieldMap") + public void setMetadataFieldMap(Map metadataFieldMap) { + super.setMetadataFieldMap(metadataFieldMap); + } + +} diff --git a/dspace-api/src/main/java/org/dspace/importer/external/datacite/DataCiteImportMetadataSourceServiceImpl.java b/dspace-api/src/main/java/org/dspace/importer/external/datacite/DataCiteImportMetadataSourceServiceImpl.java new file mode 100644 index 0000000000..a11f2bc247 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/importer/external/datacite/DataCiteImportMetadataSourceServiceImpl.java @@ -0,0 +1,168 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.importer.external.datacite; + +import java.util.ArrayList; +import java.util.Collection; +import java.util.HashMap; +import java.util.Iterator; +import java.util.List; +import java.util.Map; +import javax.el.MethodNotFoundException; + +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.ObjectMapper; +import org.apache.commons.lang3.StringUtils; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.dspace.content.Item; +import org.dspace.importer.external.datamodel.ImportRecord; +import org.dspace.importer.external.datamodel.Query; +import org.dspace.importer.external.exception.MetadataSourceException; +import org.dspace.importer.external.liveimportclient.service.LiveImportClient; +import org.dspace.importer.external.service.AbstractImportMetadataSourceService; +import org.dspace.importer.external.service.DoiCheck; +import org.dspace.importer.external.service.components.QuerySource; +import org.dspace.services.ConfigurationService; +import org.springframework.beans.factory.annotation.Autowired; + +/** + * Implements a data source for querying Datacite + * Mainly copied from CrossRefImportMetadataSourceServiceImpl. + * + * optional Affiliation informations are not part of the API request. + * https://support.datacite.org/docs/can-i-see-more-detailed-affiliation-information-in-the-rest-api + * + * @author Pasquale Cavallo (pasquale.cavallo at 4science dot it) + * @author Florian Gantner (florian.gantner@uni-bamberg.de) + * + */ +public class DataCiteImportMetadataSourceServiceImpl + extends AbstractImportMetadataSourceService implements QuerySource { + private final static Logger log = LogManager.getLogger(); + + @Autowired + private LiveImportClient liveImportClient; + + @Autowired + private ConfigurationService configurationService; + + @Override + public String getImportSource() { + return "datacite"; + } + + @Override + public void init() throws Exception { + } + + @Override + public ImportRecord getRecord(String recordId) throws MetadataSourceException { + Collection records = getRecords(recordId, 0, 1); + if (records.size() == 0) { + return null; + } + return records.stream().findFirst().get(); + } + + @Override + public int getRecordsCount(String query) throws MetadataSourceException { + Collection records = getRecords(query, 0, -1); + return records == null ? 0 : records.size(); + } + + @Override + public int getRecordsCount(Query query) throws MetadataSourceException { + String id = getID(query.toString()); + return getRecordsCount(StringUtils.isBlank(id) ? query.toString() : id); + } + + + @Override + public Collection getRecords(String query, int start, int count) throws MetadataSourceException { + List records = new ArrayList<>(); + String id = getID(query); + Map> params = new HashMap<>(); + Map uriParameters = new HashMap<>(); + params.put("uriParameters", uriParameters); + if (StringUtils.isBlank(id)) { + id = query; + } + uriParameters.put("query", id); + int timeoutMs = configurationService.getIntProperty("datacite.timeout", 180000); + String url = configurationService.getProperty("datacite.url", "https://api.datacite.org/dois/"); + String responseString = liveImportClient.executeHttpGetRequest(timeoutMs, url, params); + JsonNode jsonNode = convertStringJsonToJsonNode(responseString); + if (jsonNode == null) { + log.warn("DataCite returned invalid JSON"); + return records; + } + JsonNode dataNode = jsonNode.at("/data"); + if (dataNode.isArray()) { + Iterator iterator = dataNode.iterator(); + while (iterator.hasNext()) { + JsonNode singleDoiNode = iterator.next(); + String json = singleDoiNode.at("/attributes").toString(); + records.add(transformSourceRecords(json)); + } + } else { + String json = dataNode.at("/attributes").toString(); + records.add(transformSourceRecords(json)); + } + + return records; + } + + private JsonNode convertStringJsonToJsonNode(String json) { + try { + return new ObjectMapper().readTree(json); + } catch (JsonProcessingException e) { + log.error("Unable to process json response.", e); + } + return null; + } + + @Override + public Collection getRecords(Query query) throws MetadataSourceException { + String id = getID(query.toString()); + return getRecords(StringUtils.isBlank(id) ? query.toString() : id, 0, -1); + } + + @Override + public ImportRecord getRecord(Query query) throws MetadataSourceException { + String id = getID(query.toString()); + return getRecord(StringUtils.isBlank(id) ? query.toString() : id); + } + + @Override + public Collection findMatchingRecords(Query query) throws MetadataSourceException { + String id = getID(query.toString()); + return getRecords(StringUtils.isBlank(id) ? query.toString() : id, 0, -1); + } + + + @Override + public Collection findMatchingRecords(Item item) throws MetadataSourceException { + throw new MethodNotFoundException("This method is not implemented for DataCite"); + } + + public String getID(String query) { + if (DoiCheck.isDoi(query)) { + return query; + } + // Workaround for encoded slashes. + if (query.contains("%252F")) { + query = query.replace("%252F", "/"); + } + if (DoiCheck.isDoi(query)) { + return query; + } + return StringUtils.EMPTY; + } +} diff --git a/dspace-api/src/main/java/org/dspace/importer/external/epo/service/EpoFieldMapping.java b/dspace-api/src/main/java/org/dspace/importer/external/epo/service/EpoFieldMapping.java new file mode 100644 index 0000000000..64ec53ffb9 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/importer/external/epo/service/EpoFieldMapping.java @@ -0,0 +1,36 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.importer.external.epo.service; + +import java.util.Map; +import javax.annotation.Resource; + +import org.dspace.importer.external.metadatamapping.AbstractMetadataFieldMapping; + + +/** + * An implementation of {@link AbstractMetadataFieldMapping} + * Responsible for defining the mapping of the Epo metadatum fields on the DSpace metadatum fields + * + * @author Pasquale Cavallo (pasquale.cavallo at 4science dot it) + */ +public class EpoFieldMapping extends AbstractMetadataFieldMapping { + /** + * Defines which metadatum is mapped on which metadatum. Note that while the key must be unique it + * only matters here for postprocessing of the value. The mapped MetadatumContributor has full control over + * what metadatafield is generated. + * + * @param metadataFieldMap The map containing the link between retrieve metadata and metadata that will be set to + * the item. + */ + @Override + @Resource(name = "epoMetadataFieldMap") + public void setMetadataFieldMap(Map metadataFieldMap) { + super.setMetadataFieldMap(metadataFieldMap); + } +} diff --git a/dspace-api/src/main/java/org/dspace/importer/external/epo/service/EpoImportMetadataSourceServiceImpl.java b/dspace-api/src/main/java/org/dspace/importer/external/epo/service/EpoImportMetadataSourceServiceImpl.java new file mode 100644 index 0000000000..7240e356e3 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/importer/external/epo/service/EpoImportMetadataSourceServiceImpl.java @@ -0,0 +1,541 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.importer.external.epo.service; + +import static org.dspace.importer.external.liveimportclient.service.LiveImportClientImpl.HEADER_PARAMETERS; + +import java.io.IOException; +import java.io.StringReader; +import java.net.URISyntaxException; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collection; +import java.util.HashMap; +import java.util.LinkedList; +import java.util.List; +import java.util.Map; +import java.util.concurrent.Callable; +import java.util.stream.Collectors; + +import com.fasterxml.jackson.core.JsonFactory; +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.ObjectMapper; +import org.apache.commons.collections.CollectionUtils; +import org.apache.commons.lang3.StringUtils; +import org.apache.http.HttpException; +import org.apache.http.client.utils.URIBuilder; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.apache.xerces.impl.dv.util.Base64; +import org.dspace.content.Item; +import org.dspace.importer.external.datamodel.ImportRecord; +import org.dspace.importer.external.datamodel.Query; +import org.dspace.importer.external.exception.MetadataSourceException; +import org.dspace.importer.external.liveimportclient.service.LiveImportClient; +import org.dspace.importer.external.metadatamapping.MetadataFieldConfig; +import org.dspace.importer.external.metadatamapping.contributor.EpoIdMetadataContributor.EpoDocumentId; +import org.dspace.importer.external.service.AbstractImportMetadataSourceService; +import org.dspace.importer.external.service.components.QuerySource; +import org.jaxen.JaxenException; +import org.jdom2.Attribute; +import org.jdom2.Document; +import org.jdom2.Element; +import org.jdom2.JDOMException; +import org.jdom2.Namespace; +import org.jdom2.Text; +import org.jdom2.filter.Filters; +import org.jdom2.input.SAXBuilder; +import org.jdom2.xpath.XPathExpression; +import org.jdom2.xpath.XPathFactory; +import org.springframework.beans.factory.annotation.Autowired; + +/** + * Implements a data source for querying EPO + * + * @author Pasquale Cavallo (pasquale.cavallo at 4Science dot it) + */ +public class EpoImportMetadataSourceServiceImpl extends AbstractImportMetadataSourceService + implements QuerySource { + + private final static Logger log = LogManager.getLogger(); + + private String url; + private String authUrl; + private String searchUrl; + + private String consumerKey; + private String consumerSecret; + + private MetadataFieldConfig dateFiled; + private MetadataFieldConfig applicationNumber; + + public static final String APP_NO_DATE_SEPARATOR = "$$$"; + private static final String APP_NO_DATE_SEPARATOR_REGEX = "\\$\\$\\$"; + + @Autowired + private LiveImportClient liveImportClient; + + @Override + public void init() throws Exception {} + + /** + * The string that identifies this import implementation. Preferable a URI + * + * @return the identifying uri + */ + @Override + public String getImportSource() { + return "epo"; + } + + /** + * Set the customer epo key + * @param consumerKey the customer consumer key + */ + public void setConsumerKey(String consumerKey) { + this.consumerKey = consumerKey; + } + + public String getConsumerKey() { + return consumerKey; + } + + /** + * Set the costumer epo secret + * @param consumerSecret the customer epo secret + */ + public void setConsumerSecret(String consumerSecret) { + this.consumerSecret = consumerSecret; + } + + public String getConsumerSecret() { + return consumerSecret; + } + + public void setDateFiled(MetadataFieldConfig dateFiled) { + this.dateFiled = dateFiled; + } + + public MetadataFieldConfig getDateFiled() { + return dateFiled; + } + + public void setApplicationNumber(MetadataFieldConfig applicationNumber) { + this.applicationNumber = applicationNumber; + } + + public MetadataFieldConfig getApplicationNumber() { + return applicationNumber; + } + + /*** + * Log to EPO, bearer is valid for 20 minutes + * + * @param consumerKey The consumer Key + * @param consumerSecretKey The consumer secret key + * @return + * @throws IOException + * @throws HttpException + */ + protected String login() throws IOException, HttpException { + Map> params = getLoginParams(); + String entity = "grant_type=client_credentials"; + String json = liveImportClient.executeHttpPostRequest(this.authUrl, params, entity); + ObjectMapper mapper = new ObjectMapper(new JsonFactory()); + JsonNode rootNode = mapper.readTree(json); + JsonNode accessTokenNode = rootNode.get("access_token"); + return accessTokenNode.asText(); + } + + private Map> getLoginParams() { + Map> params = new HashMap>(); + Map headerParams = getLoginHeaderParams(); + params.put(HEADER_PARAMETERS, headerParams); + return params; + } + + private Map getLoginHeaderParams() { + Map params = new HashMap(); + String authString = consumerKey + ":" + consumerSecret; + params.put("Authorization", "Basic " + Base64.encode(authString.getBytes())); + params.put("Content-type", "application/x-www-form-urlencoded"); + return params; + } + + @Override + public int getRecordsCount(String query) throws MetadataSourceException { + if (StringUtils.isNotBlank(consumerKey) && StringUtils.isNotBlank(consumerSecret)) { + try { + String bearer = login(); + return retry(new CountRecordsCallable(query, bearer)); + } catch (IOException | HttpException e) { + log.warn(e.getMessage()); + throw new RuntimeException(e.getMessage(), e); + } + } + return 0; + + } + + @Override + public int getRecordsCount(Query query) throws MetadataSourceException { + if (StringUtils.isNotBlank(consumerKey) && StringUtils.isNotBlank(consumerSecret)) { + try { + String bearer = login(); + return retry(new CountRecordsCallable(query, bearer)); + } catch (IOException | HttpException e) { + e.printStackTrace(); + } + } + return 0; + } + + @Override + public Collection getRecords(String query, int start, + int count) throws MetadataSourceException { + if (StringUtils.isNotBlank(consumerKey) && StringUtils.isNotBlank(consumerSecret)) { + try { + String bearer = login(); + return retry(new SearchByQueryCallable(query, bearer, start, count)); + } catch (IOException | HttpException e) { + log.warn(e.getMessage()); + throw new RuntimeException(e.getMessage(), e); + } + } + return new ArrayList(); + } + + @Override + public Collection getRecords(Query query) + throws MetadataSourceException { + if (StringUtils.isNotBlank(consumerKey) && StringUtils.isNotBlank(consumerSecret)) { + try { + String bearer = login(); + return retry(new SearchByQueryCallable(query, bearer)); + } catch (IOException | HttpException e) { + log.warn(e.getMessage()); + throw new RuntimeException(e.getMessage(), e); + } + } + return new ArrayList(); + } + + @Override + public ImportRecord getRecord(String id) throws MetadataSourceException { + if (StringUtils.isNotBlank(consumerKey) && StringUtils.isNotBlank(consumerSecret)) { + try { + String bearer = login(); + List list = retry(new SearchByIdCallable(id, bearer)); + return CollectionUtils.isNotEmpty(list) ? list.get(0) : null; + } catch (IOException | HttpException e) { + log.warn(e.getMessage()); + throw new RuntimeException(e.getMessage(), e); + } + } + return null; + } + + @Override + public ImportRecord getRecord(Query query) throws MetadataSourceException { + return null; + } + + @Override + public Collection findMatchingRecords(Item item) + throws MetadataSourceException { + return null; + } + + @Override + public Collection findMatchingRecords(Query query) + throws MetadataSourceException { + return null; + } + + /** + * This class is a Callable implementation to count the number of entries for an EPO query. + * This Callable use as query value to EPO the string queryString passed to constructor. + * If the object will be construct through Query.class instance, the value of the Query's + * map with the key "query" will be used. + * + * @author Mykhaylo Boychuk (mykhaylo.boychuk@4science.com) + */ + private class CountRecordsCallable implements Callable { + + private String bearer; + private String query; + + private CountRecordsCallable(Query query, String bearer) { + this.query = query.getParameterAsClass("query", String.class); + this.bearer = bearer; + } + + private CountRecordsCallable(String query, String bearer) { + this.query = query; + this.bearer = bearer; + } + + public Integer call() throws Exception { + return countDocument(bearer, query); + } + } + + /** + * This class is a Callable implementation to get an EPO entry using epodocID (epodoc:AB1234567T) + * The epodocID to use can be passed through the constructor as a String or as Query's map entry, with the key "id". + * + * @author Mykhaylo Boychuk (mykhaylo.boychuk@4science.com) + */ + private class SearchByIdCallable implements Callable> { + + private String id; + private String bearer; + + private SearchByIdCallable(String id, String bearer) { + this.id = id; + this.bearer = bearer; + } + + public List call() throws Exception { + int positionToSplit = id.indexOf(":"); + String docType = EpoDocumentId.EPODOC; + String idS = id; + if (positionToSplit != -1) { + docType = id.substring(0, positionToSplit); + idS = id.substring(positionToSplit + 1, id.length()); + } else if (id.contains(APP_NO_DATE_SEPARATOR)) { + // special case the id is the combination of the applicationnumber and date filed + String query = "applicationnumber=" + id.split(APP_NO_DATE_SEPARATOR_REGEX)[0]; + SearchByQueryCallable search = new SearchByQueryCallable(query, bearer, 0, 10); + List records = search.call().stream() + .filter(r -> r.getValue(dateFiled.getSchema(), dateFiled.getElement(), + dateFiled.getQualifier()) + .stream() + .anyMatch(m -> StringUtils.equals(m.getValue(), + id.split(APP_NO_DATE_SEPARATOR_REGEX)[1]) + )) + .limit(1).collect(Collectors.toList()); + return records; + } + List records = searchDocument(bearer, idS, docType); + if (records.size() > 1) { + log.warn("More record are returned with epocID " + id); + } + return records; + } + } + + /** + * This class is a Callable implementation to get EPO entries based on query object. + * This Callable use as query value the string queryString passed to constructor. + * If the object will be construct through Query.class instance, a Query's map entry with key "query" will be used. + * Pagination is supported too, using the value of the Query's map with keys "start" and "count". + * + * @author Mykhaylo Boychuk (mykhaylo.boychuk@4science.com) + */ + private class SearchByQueryCallable implements Callable> { + + private Query query; + private Integer start; + private Integer count; + private String bearer; + + private SearchByQueryCallable(Query query, String bearer) { + this.query = query; + this.bearer = bearer; + } + + public SearchByQueryCallable(String queryValue, String bearer, int start, int count) { + this.query = new Query(); + query.addParameter("query", queryValue); + this.start = query.getParameterAsClass("start", Integer.class) != null ? + query.getParameterAsClass("start", Integer.class) : 0; + this.count = query.getParameterAsClass("count", Integer.class) != null ? + query.getParameterAsClass("count", Integer.class) : 20; + this.bearer = bearer; + } + + @Override + public List call() throws Exception { + List records = new ArrayList(); + String queryString = query.getParameterAsClass("query", String.class); + if (StringUtils.isNotBlank(consumerKey) && StringUtils.isNotBlank(consumerSecret)) { + if (StringUtils.isNotBlank(queryString) && StringUtils.isNotBlank(bearer)) { + List epoDocIds = searchDocumentIds(bearer, queryString, start + 1, count); + for (EpoDocumentId epoDocId : epoDocIds) { + List recordfounds = searchDocument(bearer, epoDocId); + if (recordfounds.size() > 1) { + log.warn("More record are returned with epocID " + epoDocId.toString()); + } + records.addAll(recordfounds); + } + } + + } + return records; + } + } + + private Integer countDocument(String bearer, String query) { + if (StringUtils.isBlank(bearer)) { + return null; + } + try { + Map> params = new HashMap>(); + Map headerParameters = new HashMap(); + headerParameters.put("Authorization", "Bearer " + bearer); + headerParameters.put("X-OPS-Range", "1-1"); + params.put(HEADER_PARAMETERS, headerParameters); + + URIBuilder uriBuilder = new URIBuilder(this.searchUrl); + uriBuilder.addParameter("q", query); + + String response = liveImportClient.executeHttpGetRequest(1000, uriBuilder.toString(), params); + + SAXBuilder saxBuilder = new SAXBuilder(); + Document document = saxBuilder.build(new StringReader(response)); + Element root = document.getRootElement(); + + List namespaces = Arrays.asList( + Namespace.getNamespace("xlink", "http://www.w3.org/1999/xlink"), + Namespace.getNamespace("ops", "http://ops.epo.org"), + Namespace.getNamespace("ns", "http://www.epo.org/exchange")); + + String totalRes = getElement(root, namespaces, "//ops:biblio-search/@total-result-count"); + return Integer.parseInt(totalRes); + } catch (JDOMException | IOException | URISyntaxException | JaxenException e) { + log.error(e.getMessage(), e); + return null; + } + } + + private List searchDocumentIds(String bearer, String query, int start, int count) { + List results = new ArrayList(); + int end = start + count; + if (StringUtils.isBlank(bearer)) { + return results; + } + try { + Map> params = new HashMap>(); + Map headerParameters = new HashMap(); + headerParameters.put("Authorization", "Bearer " + bearer); + if (start >= 1 && end > start) { + headerParameters.put("X-OPS-Range", start + "-" + end); + } + params.put(HEADER_PARAMETERS, headerParameters); + + URIBuilder uriBuilder = new URIBuilder(this.searchUrl); + uriBuilder.addParameter("q", query); + + String response = liveImportClient.executeHttpGetRequest(1000, uriBuilder.toString(), params); + + SAXBuilder saxBuilder = new SAXBuilder(); + Document document = saxBuilder.build(new StringReader(response)); + Element root = document.getRootElement(); + + List namespaces = Arrays.asList( + Namespace.getNamespace("xlink", "http://www.w3.org/1999/xlink"), + Namespace.getNamespace("ops", "http://ops.epo.org"), + Namespace.getNamespace("ns", "http://www.epo.org/exchange")); + XPathExpression xpath = XPathFactory.instance() + .compile("//ns:document-id", Filters.element(), null, namespaces); + + List documentIds = xpath.evaluate(root); + for (Element documentId : documentIds) { + results.add(new EpoDocumentId(documentId, namespaces)); + } + } catch (Exception e) { + log.error(e.getMessage(), e); + } + return results; + } + + private List searchDocument(String bearer, EpoDocumentId id) { + return searchDocument(bearer, id.getId(), id.getDocumentIdType()); + } + + private List searchDocument(String bearer, String id, String docType) { + List results = new ArrayList(); + if (StringUtils.isBlank(bearer)) { + return results; + } + try { + Map> params = new HashMap>(); + Map headerParameters = new HashMap(); + headerParameters.put("Authorization", "Bearer " + bearer); + params.put(HEADER_PARAMETERS, headerParameters); + + String url = this.url.replace("$(doctype)", docType).replace("$(id)", id); + + String response = liveImportClient.executeHttpGetRequest(1000, url, params); + List elements = splitToRecords(response); + for (Element element : elements) { + results.add(transformSourceRecords(element)); + } + } catch (Exception e) { + log.error(e.getMessage(), e); + } + return results; + } + + private List splitToRecords(String recordsSrc) { + try { + SAXBuilder saxBuilder = new SAXBuilder(); + Document document = saxBuilder.build(new StringReader(recordsSrc)); + Element root = document.getRootElement(); + List namespaces = Arrays.asList(Namespace.getNamespace("ns", "http://www.epo.org/exchange")); + XPathExpression xpath = XPathFactory.instance().compile("//ns:exchange-document", + Filters.element(), null, namespaces); + + List recordsList = xpath.evaluate(root); + return recordsList; + } catch (JDOMException | IOException e) { + log.error(e.getMessage(), e); + return new LinkedList(); + } + } + + private String getElement(Element document, List namespaces, String path) throws JaxenException { + XPathExpression xpath = XPathFactory.instance().compile(path, Filters.fpassthrough(), null, namespaces); + List nodes = xpath.evaluate(document); + //exactly one element expected for any field + if (CollectionUtils.isEmpty(nodes)) { + return StringUtils.EMPTY; + } else { + return getValue(nodes.get(0)); + } + } + + private String getValue(Object el) { + if (el instanceof Element) { + return ((Element) el).getText(); + } else if (el instanceof Attribute) { + return ((Attribute) el).getValue(); + } else if (el instanceof String) { + return (String)el; + } else if (el instanceof Text) { + return ((Text) el).getText(); + } else { + log.error("node of type: " + el.getClass()); + return ""; + } + } + + public void setUrl(String url) { + this.url = url; + } + + public void setAuthUrl(String authUrl) { + this.authUrl = authUrl; + } + + public void setSearchUrl(String searchUrl) { + this.searchUrl = searchUrl; + } + +} diff --git a/dspace-api/src/main/java/org/dspace/importer/external/liveimportclient/service/LiveImportClient.java b/dspace-api/src/main/java/org/dspace/importer/external/liveimportclient/service/LiveImportClient.java new file mode 100644 index 0000000000..a1132cda9c --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/importer/external/liveimportclient/service/LiveImportClient.java @@ -0,0 +1,39 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.importer.external.liveimportclient.service; + +import java.util.Map; + +/** + * Interface for classes that allow to contact LiveImport clients. + * + * @author Mykhaylo Boychuk (mykhaylo.boychuk at 4science.com) + */ +public interface LiveImportClient { + + /** + * Http GET request + * + * @param timeout The connect timeout in milliseconds + * @param URL URL + * @param params This map contains the parameters to be included in the request. + * Each parameter will be added to the url?(key=value) + * @return The response in String type converted from InputStream + */ + public String executeHttpGetRequest(int timeout, String URL, Map> params); + + /** + * Http POST request + * + * @param URL URL + * @param params This map contains the header params to be included in the request. + * @param entry the entity value + * @return the response in String type converted from InputStream + */ + public String executeHttpPostRequest(String URL, Map> params, String entry); +} \ No newline at end of file diff --git a/dspace-api/src/main/java/org/dspace/importer/external/liveimportclient/service/LiveImportClientImpl.java b/dspace-api/src/main/java/org/dspace/importer/external/liveimportclient/service/LiveImportClientImpl.java new file mode 100644 index 0000000000..81a6631127 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/importer/external/liveimportclient/service/LiveImportClientImpl.java @@ -0,0 +1,188 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.importer.external.liveimportclient.service; + +import java.io.InputStream; +import java.net.URISyntaxException; +import java.nio.charset.Charset; +import java.util.Map; +import java.util.Objects; +import java.util.Optional; + +import org.apache.commons.collections.MapUtils; +import org.apache.commons.io.IOUtils; +import org.apache.commons.lang3.StringUtils; +import org.apache.http.HttpHost; +import org.apache.http.HttpResponse; +import org.apache.http.client.config.RequestConfig; +import org.apache.http.client.config.RequestConfig.Builder; +import org.apache.http.client.methods.HttpGet; +import org.apache.http.client.methods.HttpPost; +import org.apache.http.client.methods.HttpRequestBase; +import org.apache.http.client.utils.URIBuilder; +import org.apache.http.entity.StringEntity; +import org.apache.http.impl.client.CloseableHttpClient; +import org.apache.http.impl.client.HttpClients; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.dspace.services.ConfigurationService; +import org.springframework.beans.factory.annotation.Autowired; + +/** + * Implementation of {@link LiveImportClient}. + * + * @author Mykhaylo Boychuk (mykhaylo.boychuk at 4science dot com) + */ +public class LiveImportClientImpl implements LiveImportClient { + + private final static Logger log = LogManager.getLogger(); + + public static final String URI_PARAMETERS = "uriParameters"; + public static final String HEADER_PARAMETERS = "headerParameters"; + + private CloseableHttpClient httpClient; + + @Autowired + private ConfigurationService configurationService; + + @Override + public String executeHttpGetRequest(int timeout, String URL, Map> params) { + HttpGet method = null; + try (CloseableHttpClient httpClient = Optional.ofNullable(this.httpClient) + .orElseGet(HttpClients::createDefault)) { + + Builder requestConfigBuilder = RequestConfig.custom(); + requestConfigBuilder.setConnectionRequestTimeout(timeout); + RequestConfig defaultRequestConfig = requestConfigBuilder.build(); + + method = new HttpGet(buildUrl(URL, params.get(URI_PARAMETERS))); + method.setConfig(defaultRequestConfig); + + Map headerParams = params.get(HEADER_PARAMETERS); + if (MapUtils.isNotEmpty(headerParams)) { + for (String param : headerParams.keySet()) { + method.setHeader(param, headerParams.get(param)); + } + } + + configureProxy(method, defaultRequestConfig); + + HttpResponse httpResponse = httpClient.execute(method); + if (isNotSuccessfull(httpResponse)) { + throw new RuntimeException("The request failed with: " + getStatusCode(httpResponse) + " code, reason= " + + httpResponse.getStatusLine().getReasonPhrase()); + } + InputStream inputStream = httpResponse.getEntity().getContent(); + return IOUtils.toString(inputStream, Charset.defaultCharset()); + } catch (Exception e1) { + log.error(e1.getMessage(), e1); + } finally { + if (Objects.nonNull(method)) { + method.releaseConnection(); + } + } + return StringUtils.EMPTY; + } + + @Override + public String executeHttpPostRequest(String URL, Map> params, String entry) { + HttpPost method = null; + try (CloseableHttpClient httpClient = Optional.ofNullable(this.httpClient) + .orElseGet(HttpClients::createDefault)) { + + Builder requestConfigBuilder = RequestConfig.custom(); + RequestConfig defaultRequestConfig = requestConfigBuilder.build(); + + method = new HttpPost(buildUrl(URL, params.get(URI_PARAMETERS))); + method.setConfig(defaultRequestConfig); + if (StringUtils.isNotBlank(entry)) { + method.setEntity(new StringEntity(entry)); + } + setHeaderParams(method, params); + + configureProxy(method, defaultRequestConfig); + + HttpResponse httpResponse = httpClient.execute(method); + if (isNotSuccessfull(httpResponse)) { + throw new RuntimeException(); + } + InputStream inputStream = httpResponse.getEntity().getContent(); + return IOUtils.toString(inputStream, Charset.defaultCharset()); + } catch (Exception e1) { + log.error(e1.getMessage(), e1); + } finally { + if (Objects.nonNull(method)) { + method.releaseConnection(); + } + } + return StringUtils.EMPTY; + } + + private void configureProxy(HttpRequestBase method, RequestConfig defaultRequestConfig) { + String proxyHost = configurationService.getProperty("http.proxy.host"); + String proxyPort = configurationService.getProperty("http.proxy.port"); + if (StringUtils.isNotBlank(proxyHost) && StringUtils.isNotBlank(proxyPort)) { + RequestConfig requestConfig = RequestConfig.copy(defaultRequestConfig) + .setProxy(new HttpHost(proxyHost, Integer.parseInt(proxyPort), "http")) + .build(); + method.setConfig(requestConfig); + } + } + + /** + * Allows to set the header parameters to the HTTP Post method + * + * @param method HttpPost method + * @param params This map contains the header params to be included in the request. + */ + private void setHeaderParams(HttpPost method, Map> params) { + Map headerParams = params.get(HEADER_PARAMETERS); + if (MapUtils.isNotEmpty(headerParams)) { + for (String param : headerParams.keySet()) { + method.setHeader(param, headerParams.get(param)); + } + } + } + + /** + * This method allows you to add the parameters contained in the requestParams map to the URL + * + * @param URL URL + * @param requestParams This map contains the parameters to be included in the request. + * Each parameter will be added to the url?(key=value) + * @return + * @throws URISyntaxException + */ + private String buildUrl(String URL, Map requestParams) throws URISyntaxException { + URIBuilder uriBuilder = new URIBuilder(URL); + if (MapUtils.isNotEmpty(requestParams)) { + for (String param : requestParams.keySet()) { + uriBuilder.setParameter(param, requestParams.get(param)); + } + } + return uriBuilder.toString(); + } + + private boolean isNotSuccessfull(HttpResponse response) { + int statusCode = getStatusCode(response); + return statusCode < 200 || statusCode > 299; + } + + private int getStatusCode(HttpResponse response) { + return response.getStatusLine().getStatusCode(); + } + + public CloseableHttpClient getHttpClient() { + return httpClient; + } + + public void setHttpClient(CloseableHttpClient httpClient) { + this.httpClient = httpClient; + } + +} \ No newline at end of file diff --git a/dspace-api/src/main/java/org/dspace/importer/external/metadatamapping/contributor/ArrayElementAttributeProcessor.java b/dspace-api/src/main/java/org/dspace/importer/external/metadatamapping/contributor/ArrayElementAttributeProcessor.java new file mode 100644 index 0000000000..b938a290c2 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/importer/external/metadatamapping/contributor/ArrayElementAttributeProcessor.java @@ -0,0 +1,82 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.importer.external.metadatamapping.contributor; + +import java.util.ArrayList; +import java.util.Collection; +import java.util.Iterator; + +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.ObjectMapper; +import org.apache.commons.lang3.StringUtils; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; + +/** + * This Processor allows to extract attribute values of an array. + * For exaple to extract all values of secondAttribute, + * "array":[ + * { + * "firstAttribute":"first value", + * "secondAttribute":"second value" + * }, + * { + * "firstAttribute":"first value", + * "secondAttribute":"second value" + * } + * ] + * + * it's possible configure a bean with + * pathToArray=/array and elementAttribute=/secondAttribute + * + * @author Mykhaylo Boychuk (mykhaylo.boychuk@4science.com) + */ +public class ArrayElementAttributeProcessor implements JsonPathMetadataProcessor { + + private final static Logger log = LogManager.getLogger(); + + private String pathToArray; + + private String elementAttribute; + + @Override + public Collection processMetadata(String json) { + JsonNode rootNode = convertStringJsonToJsonNode(json); + Iterator array = rootNode.at(pathToArray).iterator(); + Collection values = new ArrayList<>(); + while (array.hasNext()) { + JsonNode element = array.next(); + String value = element.at(elementAttribute).textValue(); + if (StringUtils.isNoneBlank(value)) { + values.add(value); + } + } + return values; + } + + private JsonNode convertStringJsonToJsonNode(String json) { + ObjectMapper mapper = new ObjectMapper(); + JsonNode body = null; + try { + body = mapper.readTree(json); + } catch (JsonProcessingException e) { + log.error("Unable to process json response.", e); + } + return body; + } + + public void setPathToArray(String pathToArray) { + this.pathToArray = pathToArray; + } + + public void setElementAttribute(String elementAttribute) { + this.elementAttribute = elementAttribute; + } + +} \ No newline at end of file diff --git a/dspace-api/src/main/java/org/dspace/importer/external/metadatamapping/contributor/AuthorMetadataContributor.java b/dspace-api/src/main/java/org/dspace/importer/external/metadatamapping/contributor/AuthorMetadataContributor.java new file mode 100644 index 0000000000..26063dc744 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/importer/external/metadatamapping/contributor/AuthorMetadataContributor.java @@ -0,0 +1,173 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.importer.external.metadatamapping.contributor; + +import java.util.ArrayList; +import java.util.Collection; +import java.util.HashMap; +import java.util.LinkedList; +import java.util.List; +import java.util.Map; +import java.util.Objects; + +import org.apache.commons.lang.StringUtils; +import org.dspace.importer.external.metadatamapping.MetadataFieldConfig; +import org.dspace.importer.external.metadatamapping.MetadatumDTO; +import org.jaxen.JaxenException; +import org.jdom2.Element; +import org.jdom2.Namespace; + +/** + * Scopus specific implementation of {@link MetadataContributor} + * Responsible for generating the ScopusID, orcid, author name and affiliationID + * from the retrieved item. + * + * @author Boychuk Mykhaylo (boychuk.mykhaylo at 4science dot it) + */ +public class AuthorMetadataContributor extends SimpleXpathMetadatumContributor { + + private static final Namespace NAMESPACE = Namespace.getNamespace("http://www.w3.org/2005/Atom"); + + private MetadataFieldConfig orcid; + private MetadataFieldConfig scopusId; + private MetadataFieldConfig authname; + private MetadataFieldConfig affiliation; + + private Map affId2affName = new HashMap(); + + /** + * Retrieve the metadata associated with the given object. + * Depending on the retrieved node (using the query), + * different types of values will be added to the MetadatumDTO list. + * + * @param element A class to retrieve metadata from. + * @return A collection of import records. Only the ScopusID, orcid, author name and affiliation + * of the found records may be put in the record. + */ + @Override + public Collection contributeMetadata(Element element) { + List values = new LinkedList<>(); + List metadatums = null; + fillAffillation(element); + try { + List nodes = element.getChildren("author", NAMESPACE); + for (Element el : nodes) { + metadatums = getMetadataOfAuthors(el); + if (Objects.nonNull(metadatums)) { + for (MetadatumDTO metadatum : metadatums) { + values.add(metadatum); + } + } + } + } catch (JaxenException e) { + throw new RuntimeException(e); + } + return values; + } + + /** + * Retrieve the the ScopusID, orcid, author name and affiliationID + * metadata associated with the given element object. + * If the value retrieved from the element is empty + * it is set PLACEHOLDER_PARENT_METADATA_VALUE + * + * @param element A class to retrieve metadata from + * @throws JaxenException If Xpath evaluation failed + */ + private List getMetadataOfAuthors(Element element) throws JaxenException { + List metadatums = new ArrayList(); + Element authname = element.getChild("authname", NAMESPACE); + Element scopusId = element.getChild("authid", NAMESPACE); + Element orcid = element.getChild("orcid", NAMESPACE); + Element afid = element.getChild("afid", NAMESPACE); + + addMetadatum(metadatums, getMetadata(getElementValue(authname), this.authname)); + addMetadatum(metadatums, getMetadata(getElementValue(scopusId), this.scopusId)); + addMetadatum(metadatums, getMetadata(getElementValue(orcid), this.orcid)); + addMetadatum(metadatums, getMetadata(StringUtils.isNotBlank(afid.getValue()) + ? this.affId2affName.get(afid.getValue()) : null, this.affiliation)); + return metadatums; + } + + private void addMetadatum(List list, MetadatumDTO metadatum) { + if (Objects.nonNull(metadatum)) { + list.add(metadatum); + } + } + + private String getElementValue(Element element) { + if (Objects.nonNull(element)) { + return element.getValue(); + } + return StringUtils.EMPTY; + } + + private MetadatumDTO getMetadata(String value, MetadataFieldConfig metadaConfig) { + if (StringUtils.isBlank(value)) { + return null; + } + MetadatumDTO metadata = new MetadatumDTO(); + metadata.setElement(metadaConfig.getElement()); + metadata.setQualifier(metadaConfig.getQualifier()); + metadata.setSchema(metadaConfig.getSchema()); + metadata.setValue(value); + return metadata; + } + + private void fillAffillation(Element element) { + try { + List nodes = element.getChildren("affiliation", NAMESPACE); + for (Element el : nodes) { + fillAffiliation2Name(el); + } + } catch (JaxenException e) { + throw new RuntimeException(e); + } + } + + private void fillAffiliation2Name(Element element) throws JaxenException { + Element affilationName = element.getChild("affilname", NAMESPACE); + Element affilationId = element.getChild("afid", NAMESPACE); + if (Objects.nonNull(affilationId) && Objects.nonNull(affilationName)) { + affId2affName.put(affilationId.getValue(), affilationName.getValue()); + } + } + + public MetadataFieldConfig getAuthname() { + return authname; + } + + public void setAuthname(MetadataFieldConfig authname) { + this.authname = authname; + } + + public MetadataFieldConfig getOrcid() { + return orcid; + } + + public void setOrcid(MetadataFieldConfig orcid) { + this.orcid = orcid; + } + + public MetadataFieldConfig getScopusId() { + return scopusId; + } + + public void setScopusId(MetadataFieldConfig scopusId) { + this.scopusId = scopusId; + } + + public MetadataFieldConfig getAffiliation() { + return affiliation; + } + + public void setAffiliation(MetadataFieldConfig affiliation) { + this.affiliation = affiliation; + } + +} \ No newline at end of file diff --git a/dspace-api/src/main/java/org/dspace/importer/external/metadatamapping/contributor/EpoIdMetadataContributor.java b/dspace-api/src/main/java/org/dspace/importer/external/metadatamapping/contributor/EpoIdMetadataContributor.java new file mode 100644 index 0000000000..e32f45a4d5 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/importer/external/metadatamapping/contributor/EpoIdMetadataContributor.java @@ -0,0 +1,312 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.importer.external.metadatamapping.contributor; + +import java.util.Arrays; +import java.util.Collection; +import java.util.LinkedList; +import java.util.List; +import java.util.Map; +import java.util.Objects; +import javax.annotation.Resource; + +import org.apache.commons.collections4.CollectionUtils; +import org.apache.commons.lang3.StringUtils; +import org.dspace.importer.external.metadatamapping.MetadataFieldConfig; +import org.dspace.importer.external.metadatamapping.MetadataFieldMapping; +import org.dspace.importer.external.metadatamapping.MetadatumDTO; +import org.jaxen.JaxenException; +import org.jdom2.Attribute; +import org.jdom2.Element; +import org.jdom2.Namespace; +import org.jdom2.Text; +import org.jdom2.filter.Filters; +import org.jdom2.xpath.XPathExpression; +import org.jdom2.xpath.XPathFactory; +import org.springframework.beans.factory.annotation.Required; + +/** + * Custom MetadataContributor to manage Epo ID. + * Need as input element and all children. + * + * @author Pasquale Cavallo + */ +public class EpoIdMetadataContributor implements MetadataContributor { + + protected MetadataFieldConfig field; + + private boolean needType; + + /** + * This property will be used in ID definition. + * If this is true, id will be in the form docType:EpoID, otherwise EpoID will be returned + * + * @param needType if true, docType will be included in id definition + */ + public void setNeedType(boolean needType) { + this.needType = needType; + } + + /** + * Return prefixToNamespaceMapping + * + * @return a prefixToNamespaceMapping map + */ + public Map getPrefixToNamespaceMapping() { + return prefixToNamespaceMapping; + } + + protected MetadataFieldMapping> metadataFieldMapping; + + /** + * Return metadataFieldMapping + * + * @return MetadataFieldMapping + */ + public MetadataFieldMapping> getMetadataFieldMapping() { + return metadataFieldMapping; + } + + /** + * Set the metadataFieldMapping of this SimpleXpathMetadatumContributor + * + * @param metadataFieldMapping the new mapping. + */ + public void setMetadataFieldMapping( + MetadataFieldMapping> metadataFieldMapping) { + this.metadataFieldMapping = metadataFieldMapping; + } + + /** + * Set the prefixToNamespaceMapping for this object, + * + * @param prefixToNamespaceMapping the new mapping. + */ + @Resource(name = "isiFullprefixMapping") + public void setPrefixToNamespaceMapping(Map prefixToNamespaceMapping) { + this.prefixToNamespaceMapping = prefixToNamespaceMapping; + } + + protected Map prefixToNamespaceMapping; + + /** + * Initialize EpoIdMetadataContributor with a query, prefixToNamespaceMapping and MetadataFieldConfig + * + * @param query query string + * @param prefixToNamespaceMapping metadata prefix to namespace mapping + * @param field + * MetadataFieldConfig + */ + public EpoIdMetadataContributor(String query, Map prefixToNamespaceMapping, + MetadataFieldConfig field) { + this.query = query; + this.prefixToNamespaceMapping = prefixToNamespaceMapping; + this.field = field; + } + + /** + * Empty constructor for EpoIdMetadataContributor + */ + public EpoIdMetadataContributor() { + + } + + protected String query; + + /** + * Return the MetadataFieldConfig used while retrieving MetadatumDTO + * + * @return MetadataFieldConfig + */ + public MetadataFieldConfig getField() { + return field; + } + + /** + * Setting the MetadataFieldConfig + * + * @param field MetadataFieldConfig used while retrieving MetadatumDTO + */ + @Required + public void setField(MetadataFieldConfig field) { + this.field = field; + } + + /** + * Return query used to create an xpathExpression on, this query is used to + * + * @return the query this instance is based on + */ + public String getQuery() { + return query; + } + + @Required + public void setQuery(String query) { + this.query = query; + } + + /** + * Retrieve the metadata associated with the given object. + * Depending on the retrieved node (using the query), different types of values will be added to the MetadatumDTO + * list + * + * @param element A class to retrieve metadata from. + * @return a collection of import records. Only the identifier of the found records may be put in the record. + */ + @Override + public Collection contributeMetadata(Element element) { + List values = new LinkedList<>(); + try { + List namespaces = Arrays.asList( + Namespace.getNamespace("xlink", "http://www.w3.org/1999/xlink"), + Namespace.getNamespace("ops", "http://ops.epo.org"), + Namespace.getNamespace("ns", "http://www.epo.org/exchange")); + XPathExpression xpath = XPathFactory.instance().compile(query, Filters.element(), null, + namespaces); + List elements = xpath.evaluate(element); + for (Element el : elements) { + EpoDocumentId document = new EpoDocumentId(el, namespaces); + MetadatumDTO metadatum = new MetadatumDTO(); + metadatum.setElement(field.getElement()); + metadatum.setQualifier(field.getQualifier()); + metadatum.setSchema(field.getSchema()); + if (needType) { + metadatum.setValue(document.getIdAndType()); + } else { + metadatum.setValue(document.getId()); + } + values.add(metadatum); + } + return values; + } catch (JaxenException e) { + System.err.println(query); + throw new RuntimeException(e); + } + } + + /** + * This class maps EPO's response metadata needs to extract epo ID. + * + * @author Pasquale Cavallo + * + */ + public static class EpoDocumentId { + + private String documentIdType; + private String country; + private String docNumber; + private String kind; + private String date; + private List namespaces; + + + public static final String DOCDB = "docdb"; + public static final String EPODOC = "epodoc"; + public static final String ORIGIN = "origin"; + + + public EpoDocumentId(Element documentId, List namespaces) throws JaxenException { + this.namespaces = namespaces; + Element preferredId = null; + XPathExpression xpath = XPathFactory.instance().compile( + "./ns:document-id[@document-id-type=\"epodoc\"]", Filters.fpassthrough(), null, namespaces); + + List nodes = xpath.evaluate(documentId); + if (CollectionUtils.isNotEmpty(nodes)) { + preferredId = (Element) nodes.get(0); + } + if (Objects.isNull(preferredId)) { + preferredId = documentId; + } + + this.documentIdType = buildDocumentIdType(preferredId); + this.country = buildCountry(preferredId); + this.docNumber = buildDocNumber(preferredId); + this.kind = buildKind(preferredId); + this.date = buildDate(preferredId); + } + + private String buildDocumentIdType(Element documentId) throws JaxenException { + return getElement(documentId, "./@document-id-type"); + } + + private String buildCountry(Element documentId) throws JaxenException { + return getElement(documentId, "./ns:country"); + } + + private String buildDocNumber(Element documentId) throws JaxenException { + return getElement(documentId, "./ns:doc-number"); + } + + private String buildKind(Element documentId) throws JaxenException { + return getElement(documentId, "./ns:kind"); + } + + private String buildDate(Element documentId) throws JaxenException { + return getElement(documentId, "./ns:date"); + } + + + public String getDocumentIdType() { + return documentIdType; + } + + /** + * This method compute the epo ID from fields + * + * @return the EPO id + */ + public String getId() { + if (DOCDB.equals(documentIdType)) { + return country + "." + docNumber + "." + kind; + } else if (EPODOC.equals(documentIdType)) { + return docNumber + ((kind != null) ? kind : StringUtils.EMPTY); + } else { + return StringUtils.EMPTY; + } + } + + public String getIdAndType() { + if (EPODOC.equals(documentIdType)) { + return documentIdType + ":" + docNumber + ((kind != null) ? kind : ""); + } else if (DOCDB.equals(documentIdType)) { + return documentIdType + ":" + country + "." + docNumber + "." + kind; + } else { + return StringUtils.EMPTY; + } + } + + + private String getElement(Element documentId, String path) throws JaxenException { + if (Objects.isNull(documentId)) { + return StringUtils.EMPTY; + } + XPathExpression xpath = XPathFactory.instance().compile(path, Filters.fpassthrough(), null, + namespaces); + List nodes = xpath.evaluate(documentId); + //exactly one element expected for any field + return CollectionUtils.isNotEmpty(nodes) ? getValue(nodes.get(0)) : StringUtils.EMPTY; + } + + private String getValue(Object el) { + if (el instanceof Element) { + return ((Element) el).getText(); + } else if (el instanceof Attribute) { + return ((Attribute) el).getValue(); + } else if (el instanceof String) { + return (String)el; + } else if (el instanceof Text) { + return ((Text) el).getText(); + } else { + return StringUtils.EMPTY; + } + } + } + +} \ No newline at end of file diff --git a/dspace-api/src/main/java/org/dspace/importer/external/metadatamapping/contributor/JsonPathMetadataProcessor.java b/dspace-api/src/main/java/org/dspace/importer/external/metadatamapping/contributor/JsonPathMetadataProcessor.java new file mode 100644 index 0000000000..2de0c6a0bb --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/importer/external/metadatamapping/contributor/JsonPathMetadataProcessor.java @@ -0,0 +1,23 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.importer.external.metadatamapping.contributor; + +import java.util.Collection; + +/** + * Service interface class for processing json object. + * The implementation of this class is responsible for all business logic calls + * for extracting of values from json object. + * + * @author Mykhaylo Boychuk (mykhaylo.boychuk@4science.com) + */ +public interface JsonPathMetadataProcessor { + + public Collection processMetadata(String json); + +} \ No newline at end of file diff --git a/dspace-api/src/main/java/org/dspace/importer/external/metadatamapping/contributor/MatrixElementProcessor.java b/dspace-api/src/main/java/org/dspace/importer/external/metadatamapping/contributor/MatrixElementProcessor.java new file mode 100644 index 0000000000..c8e93971f4 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/importer/external/metadatamapping/contributor/MatrixElementProcessor.java @@ -0,0 +1,87 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.importer.external.metadatamapping.contributor; + +import java.util.ArrayList; +import java.util.Collection; +import java.util.Iterator; + +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.ObjectMapper; +import org.apache.commons.lang3.StringUtils; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; + +/** + * This Processor allows to extract all values of a matrix. + * Only need to configure the path to the matrix in "pathToMatrix" + * For exaple to extract all values + * "matrix": [ + * [ + * "first", + * "second" + * ], + * [ + * "third" + * ], + * [ + * "fourth", + * "fifth" + * ] + * ], + * + * @author Mykhaylo Boychuk (mykhaylo.boychuk@4science.com) + */ +public class MatrixElementProcessor implements JsonPathMetadataProcessor { + + private final static Logger log = LogManager.getLogger(); + + private String pathToMatrix; + + @Override + public Collection processMetadata(String json) { + JsonNode rootNode = convertStringJsonToJsonNode(json); + Iterator array = rootNode.at(pathToMatrix).elements(); + Collection values = new ArrayList<>(); + while (array.hasNext()) { + JsonNode element = array.next(); + if (element.isArray()) { + Iterator nodes = element.iterator(); + while (nodes.hasNext()) { + String nodeValue = nodes.next().textValue(); + if (StringUtils.isNotBlank(nodeValue)) { + values.add(nodeValue); + } + } + } else { + String nodeValue = element.textValue(); + if (StringUtils.isNotBlank(nodeValue)) { + values.add(nodeValue); + } + } + } + return values; + } + + private JsonNode convertStringJsonToJsonNode(String json) { + ObjectMapper mapper = new ObjectMapper(); + JsonNode body = null; + try { + body = mapper.readTree(json); + } catch (JsonProcessingException e) { + log.error("Unable to process json response.", e); + } + return body; + } + + public void setPathToMatrix(String pathToMatrix) { + this.pathToMatrix = pathToMatrix; + } + +} \ No newline at end of file diff --git a/dspace-api/src/main/java/org/dspace/importer/external/metadatamapping/contributor/PageRangeXPathMetadataContributor.java b/dspace-api/src/main/java/org/dspace/importer/external/metadatamapping/contributor/PageRangeXPathMetadataContributor.java new file mode 100644 index 0000000000..0bcb33d689 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/importer/external/metadatamapping/contributor/PageRangeXPathMetadataContributor.java @@ -0,0 +1,110 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.importer.external.metadatamapping.contributor; + +import java.util.ArrayList; +import java.util.Collection; +import java.util.LinkedList; +import java.util.List; +import java.util.Objects; + +import org.apache.commons.lang3.StringUtils; +import org.dspace.importer.external.metadatamapping.MetadataFieldConfig; +import org.dspace.importer.external.metadatamapping.MetadatumDTO; +import org.jdom2.Element; +import org.jdom2.Namespace; + +/** + * Scopus specific implementation of {@link MetadataContributor} + * Responsible for generating the Scopus startPage and endPage from the retrieved item. + * + * @author Boychuk Mykhaylo (boychuk.mykhaylo at 4science.com) + */ +public class PageRangeXPathMetadataContributor extends SimpleXpathMetadatumContributor { + + private MetadataFieldConfig startPageMetadata; + + private MetadataFieldConfig endPageMetadata; + + /** + * Retrieve the metadata associated with the given Element object. + * Depending on the retrieved node (using the query), + * StartPage and EndPage values will be added to the MetadatumDTO list + * + * @param el A class to retrieve metadata from. + * @return A collection of import records. Only the StartPage and EndPage + * of the found records may be put in the record. + */ + @Override + public Collection contributeMetadata(Element el) { + List values = new LinkedList<>(); + List metadatums = null; + for (String ns : prefixToNamespaceMapping.keySet()) { + List nodes = el.getChildren(query, Namespace.getNamespace(ns)); + for (Element element : nodes) { + metadatums = getMetadatum(element.getValue()); + if (Objects.nonNull(metadatums)) { + for (MetadatumDTO metadatum : metadatums) { + values.add(metadatum); + } + } + } + } + return values; + } + + private List getMetadatum(String value) { + List metadatums = new ArrayList(); + if (StringUtils.isBlank(value)) { + return null; + } + String [] range = value.split("-"); + if (range.length == 2) { + metadatums.add(setStartPage(range)); + metadatums.add(setEndPage(range)); + } else if (range.length != 0) { + metadatums.add(setStartPage(range)); + } + return metadatums; + } + + private MetadatumDTO setEndPage(String[] range) { + MetadatumDTO endPage = new MetadatumDTO(); + endPage.setValue(range[1]); + endPage.setElement(endPageMetadata.getElement()); + endPage.setQualifier(endPageMetadata.getQualifier()); + endPage.setSchema(endPageMetadata.getSchema()); + return endPage; + } + + private MetadatumDTO setStartPage(String[] range) { + MetadatumDTO startPage = new MetadatumDTO(); + startPage.setValue(range[0]); + startPage.setElement(startPageMetadata.getElement()); + startPage.setQualifier(startPageMetadata.getQualifier()); + startPage.setSchema(startPageMetadata.getSchema()); + return startPage; + } + + public MetadataFieldConfig getStartPageMetadata() { + return startPageMetadata; + } + + public void setStartPageMetadata(MetadataFieldConfig startPageMetadata) { + this.startPageMetadata = startPageMetadata; + } + + public MetadataFieldConfig getEndPageMetadata() { + return endPageMetadata; + } + + public void setEndPageMetadata(MetadataFieldConfig endPageMetadata) { + this.endPageMetadata = endPageMetadata; + } + +} \ No newline at end of file diff --git a/dspace-api/src/main/java/org/dspace/importer/external/metadatamapping/contributor/ReplaceCharacterXPathMetadataContributor.java b/dspace-api/src/main/java/org/dspace/importer/external/metadatamapping/contributor/ReplaceCharacterXPathMetadataContributor.java new file mode 100644 index 0000000000..9fb92348be --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/importer/external/metadatamapping/contributor/ReplaceCharacterXPathMetadataContributor.java @@ -0,0 +1,66 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.importer.external.metadatamapping.contributor; + +import java.util.Collection; +import java.util.LinkedList; +import java.util.List; +import java.util.Objects; + +import org.dspace.importer.external.metadatamapping.MetadataFieldConfig; +import org.dspace.importer.external.metadatamapping.MetadatumDTO; +import org.jdom2.Element; +import org.jdom2.Namespace; + +/** + * This contributor replace specific character in the metadata value. + * It is useful for some provider (e.g. Scopus) which use containing "/" character. + * Actually, "/" will never encode by framework in URL building. In the same ways, if we + * encode "/" -> %2F, it will be encoded by framework and become %252F. + * + * @author Boychuk Mykhaylo (boychuk.mykhaylo at 4science.com) + */ +public class ReplaceCharacterXPathMetadataContributor extends SimpleXpathMetadatumContributor { + + private char characterToBeReplaced; + + private char characterToReplaceWith; + + @Override + public Collection contributeMetadata(Element element) { + List values = new LinkedList<>(); + for (String ns : prefixToNamespaceMapping.keySet()) { + List nodes = element.getChildren(query, Namespace.getNamespace(ns)); + for (Element el : nodes) { + values.add(getMetadatum(field, el.getValue())); + } + } + return values; + } + + private MetadatumDTO getMetadatum(MetadataFieldConfig field, String value) { + MetadatumDTO dcValue = new MetadatumDTO(); + if (Objects.isNull(field)) { + return null; + } + dcValue.setValue(value == null ? null : value.replace(characterToBeReplaced, characterToReplaceWith)); + dcValue.setElement(field.getElement()); + dcValue.setQualifier(field.getQualifier()); + dcValue.setSchema(field.getSchema()); + return dcValue; + } + + public void setCharacterToBeReplaced(int characterToBeReplaced) { + this.characterToBeReplaced = (char)characterToBeReplaced; + } + + public void setCharacterToReplaceWith(int characterToReplaceWith) { + this.characterToReplaceWith = (char)characterToReplaceWith; + } + +} \ No newline at end of file diff --git a/dspace-api/src/main/java/org/dspace/importer/external/metadatamapping/contributor/SimpleConcatContributor.java b/dspace-api/src/main/java/org/dspace/importer/external/metadatamapping/contributor/SimpleConcatContributor.java new file mode 100644 index 0000000000..d84bc65701 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/importer/external/metadatamapping/contributor/SimpleConcatContributor.java @@ -0,0 +1,65 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.importer.external.metadatamapping.contributor; + +import java.util.ArrayList; +import java.util.Collection; +import java.util.LinkedList; +import java.util.List; + +import org.apache.commons.lang3.StringUtils; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.dspace.importer.external.metadatamapping.MetadatumDTO; +import org.jdom2.Element; +import org.jdom2.Namespace; +import org.jdom2.filter.Filters; +import org.jdom2.xpath.XPathExpression; +import org.jdom2.xpath.XPathFactory; + +/** + * This contributor is able to concat multi value. + * Given a certain path, if it contains several nodes, + * the values of nodes will be concatenated into a single one. + * The concrete example we can see in the file wos-responce.xml in the node, + * which may contain several

    paragraphs, + * this Contributor allows concatenating all

    paragraphs. to obtain a single one. + * + * @author Boychuk Mykhaylo (boychuk.mykhaylo at 4Science dot it) + */ +public class SimpleConcatContributor extends SimpleXpathMetadatumContributor { + + private final static Logger log = LogManager.getLogger(); + + @Override + public Collection contributeMetadata(Element t) { + List values = new LinkedList<>(); + StringBuilder text = new StringBuilder(); + List namespaces = new ArrayList(); + for (String ns : prefixToNamespaceMapping.keySet()) { + namespaces.add(Namespace.getNamespace(prefixToNamespaceMapping.get(ns), ns)); + } + XPathExpression xpath = XPathFactory.instance().compile(query, Filters.fpassthrough(), null,namespaces); + List nodes = xpath.evaluate(t); + for (Object el : nodes) { + if (el instanceof Element) { + Element element = (Element) el; + if (StringUtils.isNotBlank(element.getText())) { + text.append(element.getText()); + } + } else { + log.warn("node of type: " + el.getClass()); + } + } + if (StringUtils.isNotBlank(text.toString())) { + values.add(metadataFieldMapping.toDCValue(field, text.toString())); + } + return values; + } + +} \ No newline at end of file diff --git a/dspace-api/src/main/java/org/dspace/importer/external/metadatamapping/contributor/SimpleJsonPathMetadataContributor.java b/dspace-api/src/main/java/org/dspace/importer/external/metadatamapping/contributor/SimpleJsonPathMetadataContributor.java new file mode 100644 index 0000000000..590fc63283 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/importer/external/metadatamapping/contributor/SimpleJsonPathMetadataContributor.java @@ -0,0 +1,181 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.importer.external.metadatamapping.contributor; + +import java.util.ArrayList; +import java.util.Collection; +import java.util.Iterator; +import java.util.Objects; + +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.ObjectMapper; +import org.apache.commons.lang3.StringUtils; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.dspace.importer.external.metadatamapping.MetadataFieldConfig; +import org.dspace.importer.external.metadatamapping.MetadataFieldMapping; +import org.dspace.importer.external.metadatamapping.MetadatumDTO; + +/** + * A simple JsonPath Metadata processor + * that allow extract value from json object + * by configuring the path in the query variable via the bean. + * moreover this can also perform more compact extractions + * by configuring specific json processor in "metadataProcessor" + * + * @author Mykhaylo Boychuk (mykhaylo.boychuk@4science.com) + */ +public class SimpleJsonPathMetadataContributor implements MetadataContributor { + + private final static Logger log = LogManager.getLogger(); + + private String query; + + private MetadataFieldConfig field; + + protected JsonPathMetadataProcessor metadataProcessor; + + /** + * Initialize SimpleJsonPathMetadataContributor with a query, prefixToNamespaceMapping and MetadataFieldConfig + * + * @param query The JSonPath query + * @param field the matadata field to map the result of the Json path query + * MetadataFieldConfig + */ + public SimpleJsonPathMetadataContributor(String query, MetadataFieldConfig field) { + this.query = query; + this.field = field; + } + + + /** + * Unused by this implementation + */ + @Override + public void setMetadataFieldMapping(MetadataFieldMapping> rt) { + + } + + /** + * Empty constructor for SimpleJsonPathMetadataContributor + */ + public SimpleJsonPathMetadataContributor() { + + } + + /** + * Return the MetadataFieldConfig used while retrieving MetadatumDTO + * + * @return MetadataFieldConfig + */ + public MetadataFieldConfig getField() { + return field; + } + + /** + * Setting the MetadataFieldConfig + * + * @param field MetadataFieldConfig used while retrieving MetadatumDTO + */ + public void setField(MetadataFieldConfig field) { + this.field = field; + } + + /** + * Return query used to create the JSonPath + * + * @return the query this instance is based on + */ + public String getQuery() { + return query; + } + + /** + * Return query used to create the JSonPath + * + */ + public void setQuery(String query) { + this.query = query; + } + + /** + * Used to process data got by jsonpath expression, like arrays to stringify, change date format or else + * If it is null, toString will be used. + * + * @param metadataProcessor + */ + public void setMetadataProcessor(JsonPathMetadataProcessor metadataProcessor) { + this.metadataProcessor = metadataProcessor; + } + + /** + * Retrieve the metadata associated with the given object. + * The toString() of the resulting object will be used. + * + * @param fullJson A class to retrieve metadata from. + * @return a collection of import records. Only the identifier of the found records may be put in the record. + */ + @Override + public Collection contributeMetadata(String fullJson) { + Collection metadata = new ArrayList<>(); + Collection metadataValue = new ArrayList<>(); + if (Objects.nonNull(metadataProcessor)) { + metadataValue = metadataProcessor.processMetadata(fullJson); + } else { + JsonNode jsonNode = convertStringJsonToJsonNode(fullJson); + JsonNode node = jsonNode.at(query); + if (node.isArray()) { + Iterator nodes = node.iterator(); + while (nodes.hasNext()) { + String nodeValue = getStringValue(nodes.next()); + if (StringUtils.isNotBlank(nodeValue)) { + metadataValue.add(nodeValue); + } + } + } else if (!node.isNull() && StringUtils.isNotBlank(node.toString())) { + String nodeValue = getStringValue(node); + if (StringUtils.isNotBlank(nodeValue)) { + metadataValue.add(nodeValue); + } + } + } + for (String value : metadataValue) { + MetadatumDTO metadatumDto = new MetadatumDTO(); + metadatumDto.setValue(value); + metadatumDto.setElement(field.getElement()); + metadatumDto.setQualifier(field.getQualifier()); + metadatumDto.setSchema(field.getSchema()); + metadata.add(metadatumDto); + } + return metadata; + } + + private String getStringValue(JsonNode node) { + if (node.isTextual()) { + return node.textValue(); + } + if (node.isNumber()) { + return node.numberValue().toString(); + } + log.error("It wasn't possible to convert the value of the following JsonNode:" + node.asText()); + return StringUtils.EMPTY; + } + + private JsonNode convertStringJsonToJsonNode(String json) { + ObjectMapper mapper = new ObjectMapper(); + JsonNode body = null; + try { + body = mapper.readTree(json); + } catch (JsonProcessingException e) { + log.error("Unable to process json response.", e); + } + return body; + } + +} \ No newline at end of file diff --git a/dspace-api/src/main/java/org/dspace/importer/external/metadatamapping/contributor/SimpleMultiplePathContributor.java b/dspace-api/src/main/java/org/dspace/importer/external/metadatamapping/contributor/SimpleMultiplePathContributor.java new file mode 100644 index 0000000000..57a3293151 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/importer/external/metadatamapping/contributor/SimpleMultiplePathContributor.java @@ -0,0 +1,75 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.importer.external.metadatamapping.contributor; + +import java.util.ArrayList; +import java.util.Collection; +import java.util.LinkedList; +import java.util.List; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.dspace.importer.external.metadatamapping.MetadatumDTO; +import org.jdom2.Element; +import org.jdom2.Namespace; +import org.jdom2.filter.Filters; +import org.jdom2.xpath.XPathExpression; +import org.jdom2.xpath.XPathFactory; + +/** + * Web of Science specific implementation of {@link MetadataContributor}. + * This contributor can perform research on multi-paths. + * For example, to populate the subject metadata, in the Web of Science response + * the values are contained in different paths, + * so this Contributor allows you to collect the values by configuring the paths in the paths list. + * + * @author Boychuk Mykhaylo (boychuk.mykhaylo at 4Science dot it) + */ +public class SimpleMultiplePathContributor extends SimpleXpathMetadatumContributor { + + private final static Logger log = LogManager.getLogger(); + + private List paths; + + public SimpleMultiplePathContributor() {} + + public SimpleMultiplePathContributor(List paths) { + this.paths = paths; + } + + @Override + public Collection contributeMetadata(Element t) { + List values = new LinkedList<>(); + for (String path : this.paths) { + List namespaces = new ArrayList(); + for (String ns : prefixToNamespaceMapping.keySet()) { + namespaces.add(Namespace.getNamespace(prefixToNamespaceMapping.get(ns), ns)); + } + XPathExpression xpath = XPathFactory.instance().compile(path, Filters.fpassthrough(), null, + namespaces); + List nodes = xpath.evaluate(t); + for (Object el : nodes) { + if (el instanceof Element) { + values.add(metadataFieldMapping.toDCValue(field, ((Element) el).getText())); + } else { + log.warn("node of type: " + el.getClass()); + } + } + } + return values; + } + + public List getPaths() { + return paths; + } + + public void setPaths(List paths) { + this.paths = paths; + } + +} \ No newline at end of file diff --git a/dspace-api/src/main/java/org/dspace/importer/external/metadatamapping/contributor/SimpleRisToMetadataConcatContributor.java b/dspace-api/src/main/java/org/dspace/importer/external/metadatamapping/contributor/SimpleRisToMetadataConcatContributor.java new file mode 100644 index 0000000000..5dd354c6f1 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/importer/external/metadatamapping/contributor/SimpleRisToMetadataConcatContributor.java @@ -0,0 +1,59 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.importer.external.metadatamapping.contributor; + +import java.util.Collection; +import java.util.LinkedList; +import java.util.List; +import java.util.Map; +import java.util.Optional; +import java.util.stream.Collectors; + +import org.dspace.importer.external.metadatamapping.MetadataFieldConfig; +import org.dspace.importer.external.metadatamapping.MetadatumDTO; + +/** + * This contributor extends SimpleRisToMetadataContributor, + * in particular, this one is able to chain multi values into a single one + * + * @author Mykhaylo Boychuk (mykhaylo.boychuk at 4science.it) + */ +public class SimpleRisToMetadataConcatContributor extends SimpleRisToMetadataContributor { + + private String tag; + + private MetadataFieldConfig metadata; + + @Override + public Collection contributeMetadata(Map> record) { + List values = new LinkedList<>(); + List fieldValues = record.get(this.tag); + Optional.ofNullable(fieldValues) + .map(fv -> fv.stream()) + .map(s -> s.collect(Collectors.joining(" "))) + .ifPresent(t -> values.add(this.metadataFieldMapping.toDCValue(this.metadata, t))); + return values; + } + + public String getTag() { + return tag; + } + + public void setTag(String tag) { + this.tag = tag; + } + + public MetadataFieldConfig getMetadata() { + return metadata; + } + + public void setMetadata(MetadataFieldConfig metadata) { + this.metadata = metadata; + } + +} \ No newline at end of file diff --git a/dspace-api/src/main/java/org/dspace/importer/external/metadatamapping/contributor/SimpleRisToMetadataContributor.java b/dspace-api/src/main/java/org/dspace/importer/external/metadatamapping/contributor/SimpleRisToMetadataContributor.java new file mode 100644 index 0000000000..36ea0dd478 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/importer/external/metadatamapping/contributor/SimpleRisToMetadataContributor.java @@ -0,0 +1,71 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.importer.external.metadatamapping.contributor; + +import java.util.Collection; +import java.util.LinkedList; +import java.util.List; +import java.util.Map; +import java.util.Objects; + +import org.dspace.importer.external.metadatamapping.MetadataFieldConfig; +import org.dspace.importer.external.metadatamapping.MetadataFieldMapping; +import org.dspace.importer.external.metadatamapping.MetadatumDTO; + +/** + * Metadata contributor that takes a record defined as Map> + * and turns it into metadatums configured in fieldToMetadata + * + * @author Mykhaylo Boychuk (mykhaylo.boychuk at 4science.it) + */ +public class SimpleRisToMetadataContributor implements MetadataContributor>> { + + protected Map fieldToMetadata; + + protected MetadataFieldMapping>, + MetadataContributor>>> metadataFieldMapping; + + public SimpleRisToMetadataContributor() {} + + public SimpleRisToMetadataContributor(Map fieldToMetadata) { + this.fieldToMetadata = fieldToMetadata; + } + + @Override + public Collection contributeMetadata(Map> record) { + List values = new LinkedList<>(); + for (String field : fieldToMetadata.keySet()) { + List fieldValues = record.get(field); + if (Objects.nonNull(fieldValues)) { + for (String value : fieldValues) { + values.add(metadataFieldMapping.toDCValue(fieldToMetadata.get(field), value)); + } + } + } + return values; + } + + public Map getFieldToMetadata() { + return fieldToMetadata; + } + + public void setFieldToMetadata(Map fieldToMetadata) { + this.fieldToMetadata = fieldToMetadata; + } + + public MetadataFieldMapping>, + MetadataContributor>>> getMetadataFieldMapping() { + return metadataFieldMapping; + } + + public void setMetadataFieldMapping(MetadataFieldMapping>, + MetadataContributor>>> metadataFieldMapping) { + this.metadataFieldMapping = metadataFieldMapping; + } + +} \ No newline at end of file diff --git a/dspace-api/src/main/java/org/dspace/importer/external/metadatamapping/contributor/SimpleXpathDateFormatMetadataContributor.java b/dspace-api/src/main/java/org/dspace/importer/external/metadatamapping/contributor/SimpleXpathDateFormatMetadataContributor.java new file mode 100644 index 0000000000..fb15cd60ab --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/importer/external/metadatamapping/contributor/SimpleXpathDateFormatMetadataContributor.java @@ -0,0 +1,91 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.importer.external.metadatamapping.contributor; + +import java.text.DateFormat; +import java.text.ParseException; +import java.text.SimpleDateFormat; +import java.util.ArrayList; +import java.util.Collection; +import java.util.LinkedList; +import java.util.List; + +import org.dspace.importer.external.metadatamapping.MetadataFieldConfig; +import org.dspace.importer.external.metadatamapping.MetadatumDTO; +import org.jdom2.Attribute; +import org.jdom2.Element; +import org.jdom2.Namespace; +import org.jdom2.Text; +import org.jdom2.filter.Filters; +import org.jdom2.xpath.XPathExpression; +import org.jdom2.xpath.XPathFactory; + +/** + * This contributor can be used when parsing an XML file, + * particularly to extract a date and convert it to a specific format. + * In the variable dateFormatFrom the read format should be configured, + * instead in the variable dateFormatTo the format you want to obtain. + * + * @author Mykhaylo Boychuk (mykhaylo.boychuk at 4science.com) + */ +public class SimpleXpathDateFormatMetadataContributor extends SimpleXpathMetadatumContributor { + + private DateFormat dateFormatFrom; + private DateFormat dateFormatTo; + + public void setDateFormatFrom(String dateFormatFrom) { + this.dateFormatFrom = new SimpleDateFormat(dateFormatFrom); + } + + public void setDateFormatTo(String dateFormatTo) { + this.dateFormatTo = new SimpleDateFormat(dateFormatTo); + } + + @Override + public Collection contributeMetadata(Element element) { + List values = new LinkedList<>(); + List namespaces = new ArrayList(); + for (String ns : prefixToNamespaceMapping.keySet()) { + namespaces.add(Namespace.getNamespace(prefixToNamespaceMapping.get(ns), ns)); + } + XPathExpression xpath = XPathFactory.instance() + .compile(query,Filters.fpassthrough(), null, namespaces); + List nodes = xpath.evaluate(element); + for (Object el : nodes) { + if (el instanceof Element) { + values.add(getMetadatum(field, ((Element) el).getText())); + } else if (el instanceof Attribute) { + values.add(getMetadatum(field, ((Attribute) el).getValue())); + } else if (el instanceof String) { + values.add(getMetadatum(field, (String) el)); + } else if (el instanceof Text) { + values.add(metadataFieldMapping.toDCValue(field, ((Text) el).getText())); + } else { + System.err.println("node of type: " + el.getClass()); + } + } + return values; + } + + private MetadatumDTO getMetadatum(MetadataFieldConfig field, String value) { + MetadatumDTO dcValue = new MetadatumDTO(); + if (field == null) { + return null; + } + try { + dcValue.setValue(dateFormatTo.format(dateFormatFrom.parse(value))); + } catch (ParseException e) { + dcValue.setValue(value); + } + dcValue.setElement(field.getElement()); + dcValue.setQualifier(field.getQualifier()); + dcValue.setSchema(field.getSchema()); + return dcValue; + } + +} \ No newline at end of file diff --git a/dspace-api/src/main/java/org/dspace/importer/external/metadatamapping/contributor/SimpleXpathMetadatumAndAttributeContributor.java b/dspace-api/src/main/java/org/dspace/importer/external/metadatamapping/contributor/SimpleXpathMetadatumAndAttributeContributor.java new file mode 100644 index 0000000000..edaad8a249 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/importer/external/metadatamapping/contributor/SimpleXpathMetadatumAndAttributeContributor.java @@ -0,0 +1,69 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.importer.external.metadatamapping.contributor; + +import java.util.ArrayList; +import java.util.Collection; +import java.util.LinkedList; +import java.util.List; + +import org.apache.commons.lang3.StringUtils; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.dspace.importer.external.metadatamapping.MetadatumDTO; +import org.jdom2.Element; +import org.jdom2.Namespace; +import org.jdom2.filter.Filters; +import org.jdom2.xpath.XPathExpression; +import org.jdom2.xpath.XPathFactory; + +/** + * This contributor checks for each node returned for the supplied path + * if node contains supplied attribute - the value of the current node is taken if exist. + * + * @author Boychuk Mykhaylo (boychuk.mykhaylo at 4Science dot com) + */ +public class SimpleXpathMetadatumAndAttributeContributor extends SimpleXpathMetadatumContributor { + + private final static Logger log = LogManager.getLogger(); + + private String attribute; + + @Override + public Collection contributeMetadata(Element t) { + List values = new LinkedList<>(); + List namespaces = new ArrayList(); + for (String ns : prefixToNamespaceMapping.keySet()) { + namespaces.add(Namespace.getNamespace(prefixToNamespaceMapping.get(ns), ns)); + } + XPathExpression xpath = XPathFactory.instance().compile(query, Filters.fpassthrough(), null, + namespaces); + List nodes = xpath.evaluate(t); + for (Object el : nodes) { + if (el instanceof Element) { + Element element = (Element) el; + String attributeValue = element.getAttributeValue(this.attribute); + if (StringUtils.isNotBlank(attributeValue)) { + values.add(metadataFieldMapping.toDCValue(this.field, attributeValue)); + } + } else { + log.warn("node of type: " + el.getClass()); + } + } + return values; + } + + public String getAttribute() { + return attribute; + } + + public void setAttribute(String attribute) { + this.attribute = attribute; + } + +} \ No newline at end of file diff --git a/dspace-api/src/main/java/org/dspace/importer/external/metadatamapping/contributor/SimpleXpathMetadatumContributor.java b/dspace-api/src/main/java/org/dspace/importer/external/metadatamapping/contributor/SimpleXpathMetadatumContributor.java index 87cdbfa6ed..05f8647d78 100644 --- a/dspace-api/src/main/java/org/dspace/importer/external/metadatamapping/contributor/SimpleXpathMetadatumContributor.java +++ b/dspace-api/src/main/java/org/dspace/importer/external/metadatamapping/contributor/SimpleXpathMetadatumContributor.java @@ -7,33 +7,36 @@ */ package org.dspace.importer.external.metadatamapping.contributor; +import java.util.ArrayList; import java.util.Collection; import java.util.LinkedList; import java.util.List; import java.util.Map; import javax.annotation.Resource; -import org.apache.axiom.om.OMAttribute; -import org.apache.axiom.om.OMElement; -import org.apache.axiom.om.OMText; -import org.apache.axiom.om.xpath.AXIOMXPath; +import org.apache.logging.log4j.Logger; import org.dspace.importer.external.metadatamapping.MetadataFieldConfig; import org.dspace.importer.external.metadatamapping.MetadataFieldMapping; import org.dspace.importer.external.metadatamapping.MetadatumDTO; -import org.jaxen.JaxenException; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +import org.jdom2.Attribute; +import org.jdom2.Element; +import org.jdom2.Namespace; +import org.jdom2.Text; +import org.jdom2.filter.Filters; +import org.jdom2.xpath.XPathExpression; +import org.jdom2.xpath.XPathFactory; import org.springframework.beans.factory.annotation.Autowired; /** - * Metadata contributor that takes an axiom OMElement and turns it into a metadatum + * Metadata contributor that takes a JDOM Element and turns it into a metadatum * * @author Roeland Dillen (roeland at atmire dot com) */ -public class SimpleXpathMetadatumContributor implements MetadataContributor { - private MetadataFieldConfig field; +public class SimpleXpathMetadatumContributor implements MetadataContributor { - private static final Logger log = LoggerFactory.getLogger(SimpleXpathMetadatumContributor.class); + private static final Logger log = org.apache.logging.log4j.LogManager.getLogger(); + + protected MetadataFieldConfig field; /** * Return prefixToNamespaceMapping @@ -44,14 +47,14 @@ public class SimpleXpathMetadatumContributor implements MetadataContributor> metadataFieldMapping; + protected MetadataFieldMapping> metadataFieldMapping; /** * Return metadataFieldMapping * * @return MetadataFieldMapping */ - public MetadataFieldMapping> getMetadataFieldMapping() { + public MetadataFieldMapping> getMetadataFieldMapping() { return metadataFieldMapping; } @@ -62,7 +65,7 @@ public class SimpleXpathMetadatumContributor implements MetadataContributor> metadataFieldMapping) { + MetadataFieldMapping> metadataFieldMapping) { this.metadataFieldMapping = metadataFieldMapping; } @@ -76,7 +79,7 @@ public class SimpleXpathMetadatumContributor implements MetadataContributor prefixToNamespaceMapping; + protected Map prefixToNamespaceMapping; /** * Initialize SimpleXpathMetadatumContributor with a query, prefixToNamespaceMapping and MetadataFieldConfig @@ -100,7 +103,7 @@ public class SimpleXpathMetadatumContributor implements MetadataContributor contributeMetadata(OMElement t) { + public Collection contributeMetadata(Element t) { List values = new LinkedList<>(); - try { - AXIOMXPath xpath = new AXIOMXPath(query); - for (String ns : prefixToNamespaceMapping.keySet()) { - xpath.addNamespace(prefixToNamespaceMapping.get(ns), ns); - } - List nodes = xpath.selectNodes(t); - for (Object el : nodes) { - if (el instanceof OMElement) { - values.add(metadataFieldMapping.toDCValue(field, ((OMElement) el).getText())); - } else if (el instanceof OMAttribute) { - values.add(metadataFieldMapping.toDCValue(field, ((OMAttribute) el).getAttributeValue())); - } else if (el instanceof String) { - values.add(metadataFieldMapping.toDCValue(field, (String) el)); - } else if (el instanceof OMText) { - values.add(metadataFieldMapping.toDCValue(field, ((OMText) el).getText())); - } else { - log.error("node of type: " + el.getClass()); - } - } - return values; - } catch (JaxenException e) { - log.error(query, e); - throw new RuntimeException(e); - } + List namespaces = new ArrayList<>(); + for (String ns : prefixToNamespaceMapping.keySet()) { + namespaces.add(Namespace.getNamespace(prefixToNamespaceMapping.get(ns), ns)); + } + XPathExpression xpath = XPathFactory.instance().compile(query, Filters.fpassthrough(), null,namespaces); + List nodes = xpath.evaluate(t); + for (Object el : nodes) { + if (el instanceof Element) { + values.add(metadataFieldMapping.toDCValue(field, ((Element) el).getText())); + } else if (el instanceof Attribute) { + values.add(metadataFieldMapping.toDCValue(field, ((Attribute) el).getValue())); + } else if (el instanceof String) { + values.add(metadataFieldMapping.toDCValue(field, (String) el)); + } else if (el instanceof Text) { + values.add(metadataFieldMapping.toDCValue(field, ((Text) el).getText())); + } else { + log.error("Encountered unsupported XML node of type: {}. Skipped that node.", el.getClass()); + } + } + return values; } -} + +} \ No newline at end of file diff --git a/dspace-api/src/main/java/org/dspace/importer/external/metadatamapping/contributor/SplitMetadataContributor.java b/dspace-api/src/main/java/org/dspace/importer/external/metadatamapping/contributor/SplitMetadataContributor.java new file mode 100644 index 0000000000..c04081957f --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/importer/external/metadatamapping/contributor/SplitMetadataContributor.java @@ -0,0 +1,65 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.importer.external.metadatamapping.contributor; + +import java.util.ArrayList; +import java.util.Collection; + +import org.dspace.importer.external.metadatamapping.MetadataFieldMapping; +import org.dspace.importer.external.metadatamapping.MetadatumDTO; + +/** + * Wrapper class used to split another MetadataContributor's output into distinct values. + * The split is performed by matching a regular expression against the wrapped MetadataContributor's output. + * + * @author Philipp Rumpf (philipp.rumpf@uni-bamberg.de) + */ + +public class SplitMetadataContributor implements MetadataContributor { + private final MetadataContributor innerContributor; + private final String regex; + + /** + * @param innerContributor The MetadataContributor whose output is split + * @param regex A regular expression matching the separator between different values + */ + public SplitMetadataContributor(MetadataContributor innerContributor, String regex) { + this.innerContributor = innerContributor; + this.regex = regex; + } + + @Override + public void setMetadataFieldMapping(MetadataFieldMapping> rt) { + + } + + /** + * Each metadatum returned by the wrapped MetadataContributor is split into one or more metadata values + * based on the provided regular expression. + * + * @param t The recordType object to retrieve metadata from + * @return The collection of processed metadata values + */ + @Override + public Collection contributeMetadata(T t) { + Collection metadata = innerContributor.contributeMetadata(t); + ArrayList splitMetadata = new ArrayList<>(); + for (MetadatumDTO metadatumDTO : metadata) { + String[] split = metadatumDTO.getValue().split(regex); + for (String splitItem : split) { + MetadatumDTO splitMetadatumDTO = new MetadatumDTO(); + splitMetadatumDTO.setSchema(metadatumDTO.getSchema()); + splitMetadatumDTO.setElement(metadatumDTO.getElement()); + splitMetadatumDTO.setQualifier(metadatumDTO.getQualifier()); + splitMetadatumDTO.setValue(splitItem); + splitMetadata.add(splitMetadatumDTO); + } + } + return splitMetadata; + } +} diff --git a/dspace-api/src/main/java/org/dspace/importer/external/metadatamapping/contributor/WosAttribute2ValueContributor.java b/dspace-api/src/main/java/org/dspace/importer/external/metadatamapping/contributor/WosAttribute2ValueContributor.java new file mode 100644 index 0000000000..66e16f7ae8 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/importer/external/metadatamapping/contributor/WosAttribute2ValueContributor.java @@ -0,0 +1,160 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.importer.external.metadatamapping.contributor; + +import java.util.ArrayList; +import java.util.Collection; +import java.util.LinkedList; +import java.util.List; +import java.util.Map; +import javax.annotation.Resource; + +import org.apache.commons.lang3.StringUtils; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.dspace.importer.external.metadatamapping.MetadataFieldConfig; +import org.dspace.importer.external.metadatamapping.MetadataFieldMapping; +import org.dspace.importer.external.metadatamapping.MetadatumDTO; +import org.jdom2.Element; +import org.jdom2.Namespace; +import org.jdom2.filter.Filters; +import org.jdom2.xpath.XPathExpression; +import org.jdom2.xpath.XPathFactory; + +/** + * Web Of Science specific implementation of {@link MetadataContributor} + * This contributor checks for each node returned for the given path if the node contains "this.attribute" + * and then checks if the attribute value is one of the values configured + * in the "this.attributeValue2metadata" map, if the value of the current known is taken. + * If "this.firstChild" is true, it takes the value of the child of the known. + * The mapping and configuration of this class can be found in the following wos-integration.xml file. + * + * @author Boychuk Mykhaylo (boychuk.mykhaylo at 4Science dot it) + */ +public class WosAttribute2ValueContributor implements MetadataContributor { + + private final static Logger log = LogManager.getLogger(); + + private String query; + + private String attribute; + + private boolean firstChild; + + private String childName; + + private Map prefixToNamespaceMapping; + + private Map attributeValue2metadata; + + private MetadataFieldMapping> metadataFieldMapping; + + public WosAttribute2ValueContributor() {} + + public WosAttribute2ValueContributor(String query, + Map prefixToNamespaceMapping, + Map attributeValue2metadata) { + this.query = query; + this.prefixToNamespaceMapping = prefixToNamespaceMapping; + this.attributeValue2metadata = attributeValue2metadata; + } + + @Override + public Collection contributeMetadata(Element t) { + List values = new LinkedList<>(); + List namespaces = new ArrayList(); + for (String ns : prefixToNamespaceMapping.keySet()) { + namespaces.add(Namespace.getNamespace(prefixToNamespaceMapping.get(ns), ns)); + } + XPathExpression xpath = XPathFactory.instance().compile(query, Filters.fpassthrough(), null, + namespaces); + List nodes = xpath.evaluate(t); + for (Object el : nodes) { + if (el instanceof Element) { + Element element = (Element) el; + String attributeValue = element.getAttributeValue(this.attribute); + setField(attributeValue, element, values); + } else { + log.warn("node of type: " + el.getClass()); + } + } + return values; + } + + private void setField(String attributeValue, Element el, List values) { + for (String id : attributeValue2metadata.keySet()) { + if (StringUtils.equals(id, attributeValue)) { + if (this.firstChild) { + String value = el.getChild(this.childName).getValue(); + values.add(metadataFieldMapping.toDCValue(attributeValue2metadata.get(id), value)); + } else { + values.add(metadataFieldMapping.toDCValue(attributeValue2metadata.get(id), el.getText())); + } + } + } + } + + public MetadataFieldMapping> getMetadataFieldMapping() { + return metadataFieldMapping; + } + + public void setMetadataFieldMapping( + MetadataFieldMapping> metadataFieldMapping) { + this.metadataFieldMapping = metadataFieldMapping; + } + + @Resource(name = "isiFullprefixMapping") + public void setPrefixToNamespaceMapping(Map prefixToNamespaceMapping) { + this.prefixToNamespaceMapping = prefixToNamespaceMapping; + } + + public Map getPrefixToNamespaceMapping() { + return prefixToNamespaceMapping; + } + + public String getAttribute() { + return attribute; + } + + public void setAttribute(String attribute) { + this.attribute = attribute; + } + + public Map getAttributeValue2metadata() { + return attributeValue2metadata; + } + + public void setAttributeValue2metadata(Map attributeValue2metadata) { + this.attributeValue2metadata = attributeValue2metadata; + } + + public String getQuery() { + return query; + } + + public void setQuery(String query) { + this.query = query; + } + + public boolean isFirstChild() { + return firstChild; + } + + public void setFirstChild(boolean firstChild) { + this.firstChild = firstChild; + } + + public String getChildName() { + return childName; + } + + public void setChildName(String childName) { + this.childName = childName; + } + +} \ No newline at end of file diff --git a/dspace-api/src/main/java/org/dspace/importer/external/metadatamapping/contributor/WosIdentifierContributor.java b/dspace-api/src/main/java/org/dspace/importer/external/metadatamapping/contributor/WosIdentifierContributor.java new file mode 100644 index 0000000000..cf434c326e --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/importer/external/metadatamapping/contributor/WosIdentifierContributor.java @@ -0,0 +1,71 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.importer.external.metadatamapping.contributor; + +import java.util.ArrayList; +import java.util.Collection; +import java.util.LinkedList; +import java.util.List; +import java.util.Map; + +import org.apache.commons.lang3.StringUtils; +import org.dspace.importer.external.metadatamapping.MetadataFieldConfig; +import org.dspace.importer.external.metadatamapping.MetadatumDTO; +import org.jdom2.Element; +import org.jdom2.Namespace; +import org.jdom2.filter.Filters; +import org.jdom2.xpath.XPathExpression; +import org.jdom2.xpath.XPathFactory; + +/** + * This contributor can retrieve the identifiers + * configured in "this.identifire2field" from the Web of Science response. + * The mapping and configuration of this class can be found in the following wos-integration.xml file. + * + * @author Boychuk Mykhaylo (boychuk.mykhaylo at 4Science dot it) + */ +public class WosIdentifierContributor extends SimpleXpathMetadatumContributor { + + protected Map identifier2field; + + @Override + public Collection contributeMetadata(Element element) { + List values = new LinkedList<>(); + List namespaces = new ArrayList<>(); + for (String ns : prefixToNamespaceMapping.keySet()) { + namespaces.add(Namespace.getNamespace(prefixToNamespaceMapping.get(ns), ns)); + } + XPathExpression xpath = + XPathFactory.instance().compile(query, Filters.element(), null, namespaces); + + List nodes = xpath.evaluate(element); + for (Element el : nodes) { + String type = el.getAttributeValue("type"); + setIdentyfier(type, el, values); + } + return values; + } + + private void setIdentyfier(String type, Element el, List values) { + for (String id : identifier2field.keySet()) { + if (StringUtils.equals(id, type)) { + String value = el.getAttributeValue("value"); + values.add(metadataFieldMapping.toDCValue(identifier2field.get(id), value)); + } + } + } + + public Map getIdentifier2field() { + return identifier2field; + } + + public void setIdentifier2field(Map identifier2field) { + this.identifier2field = identifier2field; + } + +} \ No newline at end of file diff --git a/dspace-api/src/main/java/org/dspace/importer/external/metadatamapping/contributor/WosIdentifierRidContributor.java b/dspace-api/src/main/java/org/dspace/importer/external/metadatamapping/contributor/WosIdentifierRidContributor.java new file mode 100644 index 0000000000..768ef50e65 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/importer/external/metadatamapping/contributor/WosIdentifierRidContributor.java @@ -0,0 +1,68 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.importer.external.metadatamapping.contributor; + +import java.util.ArrayList; +import java.util.Collection; +import java.util.LinkedList; +import java.util.List; +import java.util.Objects; + +import org.apache.commons.lang3.StringUtils; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.dspace.importer.external.metadatamapping.MetadatumDTO; +import org.jdom2.Element; +import org.jdom2.Namespace; +import org.jdom2.filter.Filters; +import org.jdom2.xpath.XPathExpression; +import org.jdom2.xpath.XPathFactory; + +/** + * Web Of Science specific implementation of {@link MetadataContributor} + * + * @author Boychuk Mykhaylo (boychuk.mykhaylo at 4Science dot it) + */ +public class WosIdentifierRidContributor extends SimpleXpathMetadatumContributor { + + private final static Logger log = LogManager.getLogger(); + + @Override + public Collection contributeMetadata(Element t) { + List values = new LinkedList<>(); + List namespaces = new ArrayList(); + for (String ns : prefixToNamespaceMapping.keySet()) { + namespaces.add(Namespace.getNamespace(prefixToNamespaceMapping.get(ns), ns)); + } + XPathExpression xpath = XPathFactory.instance().compile(query, Filters.fpassthrough(), null, + namespaces); + List nodes = xpath.evaluate(t); + for (Object el : nodes) { + if (el instanceof Element) { + Element element = ((Element) el).getChild("name"); + if (Objects.nonNull(element)) { + String type = element.getAttributeValue("role"); + setIdentyfier(type, element, values); + } + } else { + log.warn("node of type: " + el.getClass()); + } + } + return values; + } + + private void setIdentyfier(String type, Element el, List values) { + if (StringUtils.equals("researcher_id", type)) { + String value = el.getAttributeValue("r_id"); + if (StringUtils.isNotBlank(value)) { + values.add(metadataFieldMapping.toDCValue(this.field, value)); + } + } + } + +} \ No newline at end of file diff --git a/dspace-api/src/main/java/org/dspace/importer/external/pubmed/service/PubmedImportMetadataSourceServiceImpl.java b/dspace-api/src/main/java/org/dspace/importer/external/pubmed/service/PubmedImportMetadataSourceServiceImpl.java index 4802dcfa17..b30ea22ca4 100644 --- a/dspace-api/src/main/java/org/dspace/importer/external/pubmed/service/PubmedImportMetadataSourceServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/importer/external/pubmed/service/PubmedImportMetadataSourceServiceImpl.java @@ -14,31 +14,34 @@ import java.io.InputStreamReader; import java.io.Reader; import java.io.StringReader; import java.util.Collection; +import java.util.HashMap; import java.util.LinkedList; import java.util.List; +import java.util.Map; +import java.util.Objects; import java.util.concurrent.Callable; -import javax.ws.rs.client.Client; -import javax.ws.rs.client.ClientBuilder; -import javax.ws.rs.client.Invocation; -import javax.ws.rs.client.WebTarget; -import javax.ws.rs.core.MediaType; -import javax.ws.rs.core.Response; import com.google.common.io.CharStreams; -import org.apache.axiom.om.OMElement; -import org.apache.axiom.om.OMXMLBuilderFactory; -import org.apache.axiom.om.OMXMLParserWrapper; -import org.apache.axiom.om.xpath.AXIOMXPath; +import org.apache.commons.lang3.StringUtils; +import org.apache.http.client.utils.URIBuilder; import org.dspace.content.Item; import org.dspace.importer.external.datamodel.ImportRecord; import org.dspace.importer.external.datamodel.Query; import org.dspace.importer.external.exception.FileMultipleOccurencesException; import org.dspace.importer.external.exception.FileSourceException; import org.dspace.importer.external.exception.MetadataSourceException; +import org.dspace.importer.external.liveimportclient.service.LiveImportClient; import org.dspace.importer.external.service.AbstractImportMetadataSourceService; import org.dspace.importer.external.service.components.FileSource; import org.dspace.importer.external.service.components.QuerySource; -import org.jaxen.JaxenException; +import org.jdom2.Document; +import org.jdom2.Element; +import org.jdom2.JDOMException; +import org.jdom2.filter.Filters; +import org.jdom2.input.SAXBuilder; +import org.jdom2.xpath.XPathExpression; +import org.jdom2.xpath.XPathFactory; +import org.springframework.beans.factory.annotation.Autowired; /** * Implements a data source for querying PubMed Central @@ -46,20 +49,23 @@ import org.jaxen.JaxenException; * @author Roeland Dillen (roeland at atmire dot com) * @author Pasquale Cavallo (pasquale.cavallo at 4science dot it) */ -public class PubmedImportMetadataSourceServiceImpl extends AbstractImportMetadataSourceService +public class PubmedImportMetadataSourceServiceImpl extends AbstractImportMetadataSourceService implements QuerySource, FileSource { - private String baseAddress; + private String urlFetch; + private String urlSearch; - // it is protected so that subclass can mock it for testing - protected WebTarget pubmedWebTarget; + private int attempt = 3; private List supportedExtensions; + @Autowired + private LiveImportClient liveImportClient; + /** * Set the file extensions supported by this metadata service * - * @param supportedExtensionsthe file extensions (xml,txt,...) supported by this service + * @param supportedExtensions the file extensions (xml,txt,...) supported by this service */ public void setSupportedExtensions(List supportedExtensions) { this.supportedExtensions = supportedExtensions; @@ -185,29 +191,7 @@ public class PubmedImportMetadataSourceServiceImpl extends AbstractImportMetadat * @throws Exception on generic exception */ @Override - public void init() throws Exception { - Client client = ClientBuilder.newClient(); - WebTarget webTarget = client.target(baseAddress); - pubmedWebTarget = webTarget.queryParam("db", "pubmed"); - } - - /** - * Return the baseAddress set to this object - * - * @return The String object that represents the baseAddress of this object - */ - public String getBaseAddress() { - return baseAddress; - } - - /** - * Set the baseAddress to this object - * - * @param baseAddress The String object that represents the baseAddress of this object - */ - public void setBaseAddress(String baseAddress) { - this.baseAddress = baseAddress; - } + public void init() throws Exception {} private class GetNbRecords implements Callable { @@ -224,36 +208,43 @@ public class PubmedImportMetadataSourceServiceImpl extends AbstractImportMetadat @Override public Integer call() throws Exception { - WebTarget getRecordIdsTarget = pubmedWebTarget - .queryParam("term", query.getParameterAsClass("query", String.class)); + URIBuilder uriBuilder = new URIBuilder(urlSearch); + uriBuilder.addParameter("db", "pubmed"); + uriBuilder.addParameter("term", query.getParameterAsClass("query", String.class)); + Map> params = new HashMap>(); + String response = StringUtils.EMPTY; + int countAttempt = 0; + while (StringUtils.isBlank(response) && countAttempt <= attempt) { + countAttempt++; + response = liveImportClient.executeHttpGetRequest(1000, uriBuilder.toString(), params); + } - getRecordIdsTarget = getRecordIdsTarget.path("esearch.fcgi"); + if (StringUtils.isBlank(response)) { + throw new RuntimeException("After " + attempt + + " attempts to contact the PubMed service, a correct answer could not be received." + + " The request was made with this URL:" + uriBuilder.toString()); + } - Invocation.Builder invocationBuilder = getRecordIdsTarget.request(MediaType.TEXT_PLAIN_TYPE); - - Response response = invocationBuilder.get(); - - String responseString = response.readEntity(String.class); - - String count = getSingleElementValue(responseString, "Count"); - - return Integer.parseInt(count); + return Integer.parseInt(getSingleElementValue(response, "Count")); } } - private String getSingleElementValue(String src, String elementName) { - OMXMLParserWrapper records = OMXMLBuilderFactory.createOMBuilder(new StringReader(src)); - OMElement element = records.getDocumentElement(); - AXIOMXPath xpath = null; String value = null; + try { - xpath = new AXIOMXPath("//" + elementName); - List recordsList = xpath.selectNodes(element); - if (!recordsList.isEmpty()) { - value = recordsList.get(0).getText(); + SAXBuilder saxBuilder = new SAXBuilder(); + Document document = saxBuilder.build(new StringReader(src)); + Element root = document.getRootElement(); + + XPathExpression xpath = + XPathFactory.instance().compile("//" + elementName, Filters.element()); + + Element record = xpath.evaluateFirst(root); + if (record != null) { + value = record.getText(); } - } catch (JaxenException e) { + } catch (JDOMException | IOException e) { value = null; } return value; @@ -280,43 +271,63 @@ public class PubmedImportMetadataSourceServiceImpl extends AbstractImportMetadat Integer start = query.getParameterAsClass("start", Integer.class); Integer count = query.getParameterAsClass("count", Integer.class); - if (count == null || count < 0) { + if (Objects.isNull(count) || count < 0) { count = 10; } - if (start == null || start < 0) { + if (Objects.isNull(start) || start < 0) { start = 0; } List records = new LinkedList(); - WebTarget getRecordIdsTarget = pubmedWebTarget.queryParam("term", queryString); - getRecordIdsTarget = getRecordIdsTarget.queryParam("retstart", start); - getRecordIdsTarget = getRecordIdsTarget.queryParam("retmax", count); - getRecordIdsTarget = getRecordIdsTarget.queryParam("usehistory", "y"); - getRecordIdsTarget = getRecordIdsTarget.path("esearch.fcgi"); + URIBuilder uriBuilder = new URIBuilder(urlSearch); + uriBuilder.addParameter("db", "pubmed"); + uriBuilder.addParameter("retstart", start.toString()); + uriBuilder.addParameter("retmax", count.toString()); + uriBuilder.addParameter("usehistory", "y"); + uriBuilder.addParameter("term", queryString); + Map> params = new HashMap>(); + String response = StringUtils.EMPTY; + int countAttempt = 0; + while (StringUtils.isBlank(response) && countAttempt <= attempt) { + countAttempt++; + response = liveImportClient.executeHttpGetRequest(1000, uriBuilder.toString(), params); + } - Invocation.Builder invocationBuilder = getRecordIdsTarget.request(MediaType.TEXT_PLAIN_TYPE); + if (StringUtils.isBlank(response)) { + throw new RuntimeException("After " + attempt + + " attempts to contact the PubMed service, a correct answer could not be received." + + " The request was made with this URL:" + uriBuilder.toString()); + } - Response response = invocationBuilder.get(); - String responseString = response.readEntity(String.class); + String queryKey = getSingleElementValue(response, "QueryKey"); + String webEnv = getSingleElementValue(response, "WebEnv"); - String queryKey = getSingleElementValue(responseString, "QueryKey"); - String webEnv = getSingleElementValue(responseString, "WebEnv"); + URIBuilder uriBuilder2 = new URIBuilder(urlFetch); + uriBuilder2.addParameter("db", "pubmed"); + uriBuilder2.addParameter("retstart", start.toString()); + uriBuilder2.addParameter("retmax", count.toString()); + uriBuilder2.addParameter("WebEnv", webEnv); + uriBuilder2.addParameter("query_key", queryKey); + uriBuilder2.addParameter("retmode", "xml"); + Map> params2 = new HashMap>(); + String response2 = StringUtils.EMPTY; + countAttempt = 0; + while (StringUtils.isBlank(response2) && countAttempt <= attempt) { + countAttempt++; + response2 = liveImportClient.executeHttpGetRequest(1000, uriBuilder2.toString(), params2); + } - WebTarget getRecordsTarget = pubmedWebTarget.queryParam("WebEnv", webEnv); - getRecordsTarget = getRecordsTarget.queryParam("query_key", queryKey); - getRecordsTarget = getRecordsTarget.queryParam("retmode", "xml"); - getRecordsTarget = getRecordsTarget.path("efetch.fcgi"); - getRecordsTarget = getRecordsTarget.queryParam("retmax", count); - getRecordsTarget = getRecordsTarget.queryParam("retstart", start); + if (StringUtils.isBlank(response2)) { + throw new RuntimeException("After " + attempt + + " attempts to contact the PubMed service, a correct answer could not be received." + + " The request was made with this URL:" + uriBuilder2.toString()); + } - invocationBuilder = getRecordsTarget.request(MediaType.TEXT_PLAIN_TYPE); - response = invocationBuilder.get(); + List elements = splitToRecords(response2); - List omElements = splitToRecords(response.readEntity(String.class)); - - for (OMElement record : omElements) { + for (Element record : elements) { records.add(transformSourceRecords(record)); } @@ -324,15 +335,18 @@ public class PubmedImportMetadataSourceServiceImpl extends AbstractImportMetadat } } - private List splitToRecords(String recordsSrc) { - OMXMLParserWrapper records = OMXMLBuilderFactory.createOMBuilder(new StringReader(recordsSrc)); - OMElement element = records.getDocumentElement(); - AXIOMXPath xpath = null; + private List splitToRecords(String recordsSrc) { try { - xpath = new AXIOMXPath("//PubmedArticle"); - List recordsList = xpath.selectNodes(element); + SAXBuilder saxBuilder = new SAXBuilder(); + Document document = saxBuilder.build(new StringReader(recordsSrc)); + Element root = document.getRootElement(); + + XPathExpression xpath = + XPathFactory.instance().compile("//PubmedArticle", Filters.element()); + + List recordsList = xpath.evaluate(root); return recordsList; - } catch (JaxenException e) { + } catch (JDOMException | IOException e) { return null; } } @@ -352,23 +366,29 @@ public class PubmedImportMetadataSourceServiceImpl extends AbstractImportMetadat @Override public ImportRecord call() throws Exception { - String id = query.getParameterAsClass("id", String.class); - WebTarget getRecordTarget = pubmedWebTarget.queryParam("id", id); - getRecordTarget = getRecordTarget.queryParam("retmode", "xml"); - getRecordTarget = getRecordTarget.path("efetch.fcgi"); + URIBuilder uriBuilder = new URIBuilder(urlFetch); + uriBuilder.addParameter("db", "pubmed"); + uriBuilder.addParameter("retmode", "xml"); + uriBuilder.addParameter("id", query.getParameterAsClass("id", String.class)); - Invocation.Builder invocationBuilder = getRecordTarget.request(MediaType.TEXT_PLAIN_TYPE); - - Response response = invocationBuilder.get(); - - List omElements = splitToRecords(response.readEntity(String.class)); - - if (omElements.size() == 0) { - return null; + Map> params = new HashMap>(); + String response = StringUtils.EMPTY; + int countAttempt = 0; + while (StringUtils.isBlank(response) && countAttempt <= attempt) { + countAttempt++; + response = liveImportClient.executeHttpGetRequest(1000, uriBuilder.toString(), params); } - return transformSourceRecords(omElements.get(0)); + if (StringUtils.isBlank(response)) { + throw new RuntimeException("After " + attempt + + " attempts to contact the PubMed service, a correct answer could not be received." + + " The request was made with this URL:" + uriBuilder.toString()); + } + + List elements = splitToRecords(response); + + return elements.isEmpty() ? null : transformSourceRecords(elements.get(0)); } } @@ -387,40 +407,57 @@ public class PubmedImportMetadataSourceServiceImpl extends AbstractImportMetadat @Override public Collection call() throws Exception { - WebTarget getRecordIdsTarget = pubmedWebTarget - .queryParam("term", query.getParameterAsClass("term", String.class)); - getRecordIdsTarget = getRecordIdsTarget - .queryParam("field", query.getParameterAsClass("field", String.class)); - getRecordIdsTarget = getRecordIdsTarget.queryParam("usehistory", "y"); - getRecordIdsTarget = getRecordIdsTarget.path("esearch.fcgi"); + URIBuilder uriBuilder = new URIBuilder(urlSearch); + uriBuilder.addParameter("db", "pubmed"); + uriBuilder.addParameter("usehistory", "y"); + uriBuilder.addParameter("term", query.getParameterAsClass("term", String.class)); + uriBuilder.addParameter("field", query.getParameterAsClass("field", String.class)); - Invocation.Builder invocationBuilder = getRecordIdsTarget.request(MediaType.TEXT_PLAIN_TYPE); + Map> params = new HashMap>(); + String response = StringUtils.EMPTY; + int countAttempt = 0; + while (StringUtils.isBlank(response) && countAttempt <= attempt) { + countAttempt++; + response = liveImportClient.executeHttpGetRequest(1000, uriBuilder.toString(), params); + } - Response response = invocationBuilder.get(); - String responseString = response.readEntity(String.class); + if (StringUtils.isBlank(response)) { + throw new RuntimeException("After " + attempt + + " attempts to contact the PubMed service, a correct answer could not be received." + + " The request was made with this URL:" + uriBuilder.toString()); + } - String queryKey = getSingleElementValue(responseString, "QueryKey"); - String webEnv = getSingleElementValue(responseString, "WebEnv"); + String webEnv = getSingleElementValue(response, "WebEnv"); + String queryKey = getSingleElementValue(response, "QueryKey"); - WebTarget getRecordsTarget = pubmedWebTarget.queryParam("WebEnv", webEnv); - getRecordsTarget = getRecordsTarget.queryParam("query_key", queryKey); - getRecordsTarget = getRecordsTarget.queryParam("retmode", "xml"); - getRecordsTarget = getRecordsTarget.path("efetch.fcgi"); + URIBuilder uriBuilder2 = new URIBuilder(urlFetch); + uriBuilder2.addParameter("db", "pubmed"); + uriBuilder2.addParameter("retmode", "xml"); + uriBuilder2.addParameter("WebEnv", webEnv); + uriBuilder2.addParameter("query_key", queryKey); - invocationBuilder = getRecordsTarget.request(MediaType.TEXT_PLAIN_TYPE); - response = invocationBuilder.get(); + Map> params2 = new HashMap>(); + String response2 = StringUtils.EMPTY; + countAttempt = 0; + while (StringUtils.isBlank(response2) && countAttempt <= attempt) { + countAttempt++; + response2 = liveImportClient.executeHttpGetRequest(1000, uriBuilder2.toString(), params2); + } - String xml = response.readEntity(String.class); - return parseXMLString(xml); + if (StringUtils.isBlank(response2)) { + throw new RuntimeException("After " + attempt + + " attempts to contact the PubMed service, a correct answer could not be received." + + " The request was made with this URL:" + uriBuilder2.toString()); + } + + return parseXMLString(response2); } } - @Override public List getRecords(InputStream inputStream) throws FileSourceException { - String xml = null; try (Reader reader = new InputStreamReader(inputStream, "UTF-8")) { - xml = CharStreams.toString(reader); + String xml = CharStreams.toString(reader); return parseXMLString(xml); } catch (IOException e) { throw new FileSourceException ("Cannot read XML from InputStream", e); @@ -441,10 +478,27 @@ public class PubmedImportMetadataSourceServiceImpl extends AbstractImportMetadat private List parseXMLString(String xml) { List records = new LinkedList(); - List omElements = splitToRecords(xml); - for (OMElement record : omElements) { + List elements = splitToRecords(xml); + for (Element record : elements) { records.add(transformSourceRecords(record)); } return records; } -} + + public String getUrlFetch() { + return urlFetch; + } + + public void setUrlFetch(String urlFetch) { + this.urlFetch = urlFetch; + } + + public String getUrlSearch() { + return urlSearch; + } + + public void setUrlSearch(String urlSearch) { + this.urlSearch = urlSearch; + } + +} \ No newline at end of file diff --git a/dspace-api/src/main/java/org/dspace/importer/external/pubmedeurope/PubmedEuropeFieldMapping.java b/dspace-api/src/main/java/org/dspace/importer/external/pubmedeurope/PubmedEuropeFieldMapping.java new file mode 100644 index 0000000000..8c8e23fe98 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/importer/external/pubmedeurope/PubmedEuropeFieldMapping.java @@ -0,0 +1,37 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.importer.external.pubmedeurope; + +import java.util.Map; +import javax.annotation.Resource; + +import org.dspace.importer.external.metadatamapping.AbstractMetadataFieldMapping; + +/** + * An implementation of {@link AbstractMetadataFieldMapping} + * Responsible for defining the mapping of the PubmedEurope metadatum fields on the DSpace metadatum fields + * + * @author Pasquale Cavallo (pasquale.cavallo at 4science dot it) + */ +public class PubmedEuropeFieldMapping extends AbstractMetadataFieldMapping { + + /** + * Defines which metadatum is mapped on which metadatum. Note that while the key must be unique it + * only matters here for postprocessing of the value. The mapped MetadatumContributor has full control over + * what metadatafield is generated. + * + * @param metadataFieldMap The map containing the link between retrieve metadata and metadata that will be set to + * the item. + */ + @Override + @Resource(name = "pubmedEuropeMetadataFieldMap") + public void setMetadataFieldMap(Map metadataFieldMap) { + super.setMetadataFieldMap(metadataFieldMap); + } + +} diff --git a/dspace-api/src/main/java/org/dspace/importer/external/pubmedeurope/PubmedEuropeMetadataSourceServiceImpl.java b/dspace-api/src/main/java/org/dspace/importer/external/pubmedeurope/PubmedEuropeMetadataSourceServiceImpl.java new file mode 100644 index 0000000000..1ec0da7420 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/importer/external/pubmedeurope/PubmedEuropeMetadataSourceServiceImpl.java @@ -0,0 +1,419 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.importer.external.pubmedeurope; + +import java.io.IOException; +import java.io.StringReader; +import java.net.URISyntaxException; +import java.util.ArrayList; +import java.util.Collection; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Objects; +import java.util.concurrent.Callable; +import javax.el.MethodNotFoundException; + +import org.apache.commons.lang3.StringUtils; +import org.apache.http.HttpException; +import org.apache.http.client.ClientProtocolException; +import org.apache.http.client.utils.URIBuilder; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.dspace.content.Item; +import org.dspace.importer.external.datamodel.ImportRecord; +import org.dspace.importer.external.datamodel.Query; +import org.dspace.importer.external.exception.MetadataSourceException; +import org.dspace.importer.external.liveimportclient.service.LiveImportClient; +import org.dspace.importer.external.service.AbstractImportMetadataSourceService; +import org.dspace.importer.external.service.components.QuerySource; +import org.jaxen.JaxenException; +import org.jdom2.Document; +import org.jdom2.Element; +import org.jdom2.JDOMException; +import org.jdom2.filter.Filters; +import org.jdom2.input.SAXBuilder; +import org.jdom2.xpath.XPathExpression; +import org.jdom2.xpath.XPathFactory; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.util.CollectionUtils; + +/** + * Implements a data source for querying PubMed Europe + * + * @author Mykhaylo Boychuk (mykhaylo.boychuk at 4science.com) + */ +public class PubmedEuropeMetadataSourceServiceImpl extends AbstractImportMetadataSourceService + implements QuerySource { + + private final static Logger log = LogManager.getLogger(); + + private String url; + + @Autowired + private LiveImportClient liveImportClient; + + @Override + public String getImportSource() { + return "pubmedeu"; + } + + /** + * Get a single record from the PubMed Europe. + * + * @param id Identifier for the record + * @return The first matching record + * @throws MetadataSourceException If the underlying methods throw any exception. + */ + @Override + public ImportRecord getRecord(String id) throws MetadataSourceException { + List records = retry(new SearchByIdCallable(id)); + return CollectionUtils.isEmpty(records) ? null : records.get(0); + } + + /** + * Find the number of records matching a query; + * + * @param query a query string to base the search on. + * @return the sum of the matching records over this import source + * @throws MetadataSourceException if the underlying methods throw any exception. + */ + @Override + public int getRecordsCount(String query) throws MetadataSourceException { + return retry(new CountByQueryCallable(query)); + } + + /** + * Find the number of records matching a query; + * + * @param query A query string to base the search on. + * @return The sum of the matching records over this import source + * @throws MetadataSourceException If the underlying methods throw any exception. + */ + @Override + public int getRecordsCount(Query query) throws MetadataSourceException { + return retry(new CountByQueryCallable(query)); + } + + /** + * Find records matching a string query. + * + * @param query A query string to base the search on. + * @param start Offset to start at + * @param count Number of records to retrieve. + * @return A set of records. Fully transformed. + * @throws MetadataSourceException If the underlying methods throw any exception. + */ + @Override + public Collection getRecords(String query, int start, int count) throws MetadataSourceException { + return retry(new SearchByQueryCallable(query, count, start)); + } + + /** + * Find records based on a object query. + * + * @param query A query object to base the search on. + * @return A set of records. Fully transformed. + * @throws MetadataSourceException If the underlying methods throw any exception. + */ + @Override + public Collection getRecords(Query query) throws MetadataSourceException { + return retry(new SearchByQueryCallable(query)); + } + + /** + * Get a single record from the PubMed Europe. + * + * @param query A query matching a single record + * @return The first matching record + * @throws MetadataSourceException If the underlying methods throw any exception. + */ + @Override + public ImportRecord getRecord(Query query) throws MetadataSourceException { + List records = retry(new SearchByIdCallable(query)); + return CollectionUtils.isEmpty(records) ? null : records.get(0); + } + + /** + * Finds records based on query object. + * + * @param query A query object to base the search on. + * @return A collection of import records. + * @throws MetadataSourceException If the underlying methods throw any exception. + */ + @Override + public Collection findMatchingRecords(Query query) throws MetadataSourceException { + return retry(new FindMatchingRecordCallable(query)); + } + + @Override + public Collection findMatchingRecords(Item item) throws MetadataSourceException { + throw new MethodNotFoundException("This method is not implemented for PubMed Europe"); + } + + @Override + public void init() throws Exception {} + + public List getByPubmedEuropeID(String pubmedID, Integer start, Integer size) + throws IOException, HttpException { + String query = "(EXT_ID:" + pubmedID + ")"; + return search(query, size < 1 ? 1 : size, start); + } + + /** + * This class is a Callable implementation to get PubMed Europe entries based on + * query object. + * + * This Callable use as query value the string queryString passed to constructor. + * If the object will be construct through Query.class instance, a Query's map entry with key "query" will be used. + * Pagination is supported too, using the value of the Query's map with keys "start" and "count". + * + * @author Mykhaylo Boychuk (mykhaylo.boychuk at 4science.com) + */ + private class SearchByQueryCallable implements Callable> { + + private Query query; + + private SearchByQueryCallable(String queryString, Integer maxResult, Integer start) { + query = new Query(); + query.addParameter("query", queryString); + query.addParameter("count", maxResult); + query.addParameter("start", start); + } + + private SearchByQueryCallable(Query query) { + this.query = query; + } + + @Override + public List call() throws Exception { + Integer count = query.getParameterAsClass("count", Integer.class); + Integer start = query.getParameterAsClass("start", Integer.class); + String queryString = query.getParameterAsClass("query", String.class); + return search(queryString, count, start); + + } + } + + /** + * This class is a Callable implementation to get an PubMed Europe entry using PubMed Europe ID + * + * @author Mykhaylo Boychuk (mykhaylo.boychuk at 4science.com) + */ + private class SearchByIdCallable implements Callable> { + private Query query; + + private SearchByIdCallable(Query query) { + this.query = query; + } + + private SearchByIdCallable(String id) { + this.query = new Query(); + query.addParameter("id", id); + } + + @Override + public List call() throws Exception { + return getByPubmedEuropeID(query.getParameterAsClass("id", String.class), 1 ,0); + } + } + + /** + * This class is a Callable implementation to search PubMed Europe entries + * using author, title and year. + * Pagination is supported too, using the value of the Query's map with keys "start" and "count". + * + * @author Mykhaylo Boychuk (mykhaylo.boychuk at 4science.com) + */ + public class FindMatchingRecordCallable implements Callable> { + + private Query query; + + private FindMatchingRecordCallable(Query q) { + query = q; + } + + @Override + public List call() throws Exception { + String title = query.getParameterAsClass("title", String.class); + String author = query.getParameterAsClass("author", String.class); + Integer year = query.getParameterAsClass("year", Integer.class); + Integer maxResult = query.getParameterAsClass("maxResult", Integer.class); + Integer start = query.getParameterAsClass("start", Integer.class); + return search(title, author, year, maxResult, start); + } + + } + + /** + * This class is a Callable implementation to count the number + * of entries for an PubMed Europe query. + * + * @author Mykhaylo Boychuk (mykhaylo.boychuk at 4science.com) + */ + private class CountByQueryCallable implements Callable { + private Query query; + + + private CountByQueryCallable(String queryString) { + query = new Query(); + query.addParameter("query", queryString); + } + + private CountByQueryCallable(Query query) { + this.query = query; + } + + @Override + public Integer call() throws Exception { + try { + return count(query.getParameterAsClass("query", String.class)); + } catch (Exception e) { + throw new RuntimeException(); + } + } + } + + /** + * Returns the total number of PubMed Europe publications returned by a specific query + * + * @param query A keyword or combination of keywords to be searched + * @throws URISyntaxException If URI syntax error + * @throws ClientProtocolException The client protocol exception + * @throws IOException If IO error + * @throws JaxenException If Xpath evaluation failed + */ + public Integer count(String query) throws URISyntaxException, ClientProtocolException, IOException, JaxenException { + try { + Map> params = new HashMap>(); + String response = liveImportClient.executeHttpGetRequest(1000, buildURI(1, query), params); + + SAXBuilder saxBuilder = new SAXBuilder(); + Document document = saxBuilder.build(new StringReader(response)); + Element root = document.getRootElement(); + Element element = root.getChild("hitCount"); + return Integer.parseInt(element.getValue()); + } catch (JDOMException e) { + log.error(e.getMessage(), e); + throw new RuntimeException(e.getMessage(), e); + } + } + + public List search(String title, String author, int year, int count, int start) + throws IOException { + StringBuffer query = new StringBuffer(); + query.append("("); + if (StringUtils.isNotBlank(title)) { + query.append("TITLE:").append(title); + query.append(")"); + } + if (StringUtils.isNotBlank(author)) { + // Search for a surname and (optionally) initial(s) in publication author lists + // AUTH:einstein, AUTH:”Smith AB” + String splitRegex = "(\\s*,\\s+|\\s*;\\s+|\\s*;+|\\s*,+|\\s+)"; + String[] authors = author.split(splitRegex); + if (query.length() > 0) { + query.append(" AND "); + } + query.append("("); + int countAuthors = 0; + for (String auth : authors) { + countAuthors++; + query.append("AUTH:\"").append(auth).append("\""); + if (countAuthors < authors.length) { + query.append(" AND "); + } + } + query.append(")"); + } + if (year != -1) { + if (query.length() > 0) { + query.append(" AND "); + } + query.append("( PUB_YEAR:").append(year).append(")"); + } + query.append(")"); + return search(query.toString(), count, start); + } + + /** + * Returns a list of PubMed Europe publication records + * + * @param query A keyword or combination of keywords to be searched + * @param size The number of search results per page + * @param start Start number for the acquired search result list + * @throws IOException If IO error + */ + public List search(String query, Integer size, Integer start) throws IOException { + List results = new ArrayList<>(); + try { + URIBuilder uriBuilder = new URIBuilder(this.url); + uriBuilder.addParameter("format", "xml"); + uriBuilder.addParameter("resulttype", "core"); + uriBuilder.addParameter("pageSize", String.valueOf(size)); + uriBuilder.addParameter("query", query); + Map> params = new HashMap>(); + boolean lastPage = false; + int skipped = 0; + while (!lastPage || results.size() < size) { + String response = liveImportClient.executeHttpGetRequest(1000, uriBuilder.toString(), params); + String cursorMark = StringUtils.EMPTY; + if (StringUtils.isNotBlank(response)) { + SAXBuilder saxBuilder = new SAXBuilder(); + Document document = saxBuilder.build(new StringReader(response)); + XPathFactory xpfac = XPathFactory.instance(); + XPathExpression xPath = xpfac.compile("//responseWrapper/resultList/result", + Filters.element()); + List records = xPath.evaluate(document); + if (records.size() > 0) { + for (Element item : records) { + if (start > skipped) { + skipped++; + } else { + results.add(transformSourceRecords(item)); + } + } + } else { + lastPage = true; + break; + } + Element root = document.getRootElement(); + Element nextCursorMark = root.getChild("nextCursorMark"); + cursorMark = Objects.nonNull(nextCursorMark) ? nextCursorMark.getValue() : StringUtils.EMPTY; + } + if (StringUtils.isNotBlank(cursorMark)) { + uriBuilder.setParameter("cursorMar", cursorMark); + } else { + lastPage = true; + } + } + } catch (URISyntaxException | JDOMException e) { + log.error(e.getMessage(), e); + throw new RuntimeException(e.getMessage(), e); + } + return results; + } + + private String buildURI(Integer pageSize, String query) throws URISyntaxException { + URIBuilder uriBuilder = new URIBuilder(this.url); + uriBuilder.addParameter("format", "xml"); + uriBuilder.addParameter("resulttype", "core"); + uriBuilder.addParameter("pageSize", String.valueOf(pageSize)); + uriBuilder.addParameter("query", query); + return uriBuilder.toString(); + } + + public String getUrl() { + return url; + } + + public void setUrl(String url) { + this.url = url; + } + +} \ No newline at end of file diff --git a/dspace-api/src/main/java/org/dspace/importer/external/ris/service/RisImportMetadataSourceServiceImpl.java b/dspace-api/src/main/java/org/dspace/importer/external/ris/service/RisImportMetadataSourceServiceImpl.java index 2574e187df..1f460c19e6 100644 --- a/dspace-api/src/main/java/org/dspace/importer/external/ris/service/RisImportMetadataSourceServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/importer/external/ris/service/RisImportMetadataSourceServiceImpl.java @@ -126,10 +126,10 @@ public class RisImportMetadataSourceServiceImpl extends AbstractPlainMetadataSou } /** - * Retrieve the MetadataFieldMapping containing the mapping between RecordType + * Set the MetadataFieldMapping containing the mapping between RecordType * (in this case PlainMetadataSourceDto.class) and Metadata * - * @return The configured MetadataFieldMapping + * @param metadataFieldMap The configured MetadataFieldMapping */ @Override @SuppressWarnings("unchecked") diff --git a/dspace-api/src/main/java/org/dspace/importer/external/scielo/service/ScieloFieldMapping.java b/dspace-api/src/main/java/org/dspace/importer/external/scielo/service/ScieloFieldMapping.java new file mode 100644 index 0000000000..0d7183a1f0 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/importer/external/scielo/service/ScieloFieldMapping.java @@ -0,0 +1,37 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.importer.external.scielo.service; +import java.util.Map; +import javax.annotation.Resource; + +import org.dspace.importer.external.metadatamapping.AbstractMetadataFieldMapping; + +/** + * An implementation of {@link AbstractMetadataFieldMapping} + * Responsible for defining the mapping of the Scielo metadatum fields on the DSpace metadatum fields + * + * @author Boychuk Mykhaylo (boychuk.mykhaylo at 4science dot it) + */ +@SuppressWarnings("rawtypes") +public class ScieloFieldMapping extends AbstractMetadataFieldMapping { + + /** + * Defines which metadatum is mapped on which metadatum. Note that while the key must be unique it + * only matters here for postprocessing of the value. The mapped MetadatumContributor has full control over + * what metadatafield is generated. + * + * @param metadataFieldMap The map containing the link between retrieve metadata and + * metadata that will be set to the item. + */ + @Override + @SuppressWarnings("unchecked") + @Resource(name = "scieloMetadataFieldMap") + public void setMetadataFieldMap(Map metadataFieldMap) { + super.setMetadataFieldMap(metadataFieldMap); + } +} \ No newline at end of file diff --git a/dspace-api/src/main/java/org/dspace/importer/external/scielo/service/ScieloImportMetadataSourceServiceImpl.java b/dspace-api/src/main/java/org/dspace/importer/external/scielo/service/ScieloImportMetadataSourceServiceImpl.java new file mode 100644 index 0000000000..4f83ffe978 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/importer/external/scielo/service/ScieloImportMetadataSourceServiceImpl.java @@ -0,0 +1,263 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.importer.external.scielo.service; + +import java.io.BufferedReader; +import java.io.StringReader; +import java.net.URLEncoder; +import java.nio.charset.StandardCharsets; +import java.util.ArrayList; +import java.util.Collection; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Objects; +import java.util.concurrent.Callable; +import java.util.regex.Matcher; +import java.util.regex.Pattern; +import javax.el.MethodNotFoundException; +import javax.ws.rs.BadRequestException; + +import org.apache.commons.collections4.CollectionUtils; +import org.apache.http.client.utils.URIBuilder; +import org.dspace.content.Item; +import org.dspace.importer.external.datamodel.ImportRecord; +import org.dspace.importer.external.datamodel.Query; +import org.dspace.importer.external.exception.FileSourceException; +import org.dspace.importer.external.exception.MetadataSourceException; +import org.dspace.importer.external.liveimportclient.service.LiveImportClient; +import org.dspace.importer.external.service.AbstractImportMetadataSourceService; +import org.dspace.importer.external.service.components.QuerySource; +import org.springframework.beans.factory.annotation.Autowired; + +/** + * Implements a data source for querying Scielo + * + * @author Boychuk Mykhaylo (boychuk.mykhaylo at 4Science dot it) + */ +public class ScieloImportMetadataSourceServiceImpl extends AbstractImportMetadataSourceService>> + implements QuerySource { + + /** + * This pattern is used when reading the Scielo response, + * to check if the fields you are reading is in rid format + */ + private static final String PATTERN = "^([A-Z][A-Z0-9]) - (.*)$"; + + /** + * This pattern is used to verify correct format of ScieloId + */ + private static final String ID_PATTERN = "^(.....)-(.*)-(...)$"; + + private int timeout = 1000; + + private String url; + + @Autowired + private LiveImportClient liveImportClient; + + @Override + public void init() throws Exception {} + + @Override + public String getImportSource() { + return "scielo"; + } + + @Override + public Collection getRecords(String query, int start, int count) throws MetadataSourceException { + return retry(new SearchByQueryCallable(query, count, start)); + } + + @Override + public Collection getRecords(Query query) throws MetadataSourceException { + return retry(new SearchByQueryCallable(query)); + } + + + @Override + public ImportRecord getRecord(Query query) throws MetadataSourceException { + List records = retry(new SearchByQueryCallable(query)); + return CollectionUtils.isEmpty(records) ? null : records.get(0); + } + + @Override + public ImportRecord getRecord(String id) throws MetadataSourceException { + List records = retry(new FindByIdCallable(id)); + return CollectionUtils.isEmpty(records) ? null : records.get(0); + } + + @Override + public int getRecordsCount(String query) throws MetadataSourceException { + return retry(new SearchNBByQueryCallable(query)); + } + + @Override + public int getRecordsCount(Query query) throws MetadataSourceException { + throw new MethodNotFoundException("This method is not implemented for Scielo"); + } + + @Override + public Collection findMatchingRecords(Item item) throws MetadataSourceException { + throw new MethodNotFoundException("This method is not implemented for Scielo"); + } + + @Override + public Collection findMatchingRecords(Query query) throws MetadataSourceException { + throw new MethodNotFoundException("This method is not implemented for Scielo"); + } + + /** + * This class is a Callable implementation to count the number of entries for an Scielo query + * + * @author Mykhaylo Boychuk (mykhaylo.boychuk@4science.com) + */ + private class SearchNBByQueryCallable implements Callable { + + private String query; + + private SearchNBByQueryCallable(String queryString) { + this.query = queryString; + } + + private SearchNBByQueryCallable(Query query) { + this.query = query.getParameterAsClass("query", String.class); + } + + @Override + public Integer call() throws Exception { + Map> params = new HashMap>(); + URIBuilder uriBuilder = new URIBuilder(url + URLEncoder.encode(query, StandardCharsets.UTF_8)); + String resp = liveImportClient.executeHttpGetRequest(timeout, uriBuilder.toString(), params); + Map>> records = getRecords(resp); + return Objects.nonNull(records.size()) ? records.size() : 0; + } + } + + /** + * This class is a Callable implementation to get an Scielo entry using ScieloID + * The ScieloID to use can be passed through the constructor as a String + * or as Query's map entry, with the key "id". + * + * @author Mykhaylo Boychuk (mykhaylo.boychuk@4science.com) + */ + private class FindByIdCallable implements Callable> { + + private String id; + + private FindByIdCallable(String id) { + this.id = id; + } + + @Override + public List call() throws Exception { + List results = new ArrayList<>(); + String scieloId = id.trim(); + Pattern risPattern = Pattern.compile(ID_PATTERN); + Matcher risMatcher = risPattern.matcher(scieloId); + if (risMatcher.matches()) { + Map> params = new HashMap>(); + URIBuilder uriBuilder = new URIBuilder(url + URLEncoder.encode(scieloId, StandardCharsets.UTF_8)); + String resp = liveImportClient.executeHttpGetRequest(timeout, uriBuilder.toString(), params); + Map>> records = getRecords(resp); + if (Objects.nonNull(records) & !records.isEmpty()) { + results.add(transformSourceRecords(records.get(1))); + } + } else { + throw new BadRequestException("id provided : " + scieloId + " is not an ScieloID"); + } + return results; + } + } + + /** + * This class is a Callable implementation to get Scielo entries based on query object. + * This Callable use as query value the string queryString passed to constructor. + * If the object will be construct through Query.class instance, a Query's map entry with key "query" will be used. + * Pagination is supported too, using the value of the Query's map with keys "start" and "count". + * + * @author Mykhaylo Boychuk (mykhaylo.boychuk@4science.com) + */ + private class SearchByQueryCallable implements Callable> { + + private Query query; + + private SearchByQueryCallable(String queryString, Integer maxResult, Integer start) { + query = new Query(); + query.addParameter("query", queryString); + query.addParameter("start", start); + query.addParameter("count", maxResult); + } + + private SearchByQueryCallable(Query query) { + this.query = query; + } + + @Override + public List call() throws Exception { + List results = new ArrayList<>(); + String q = query.getParameterAsClass("query", String.class); + Integer count = query.getParameterAsClass("count", Integer.class); + Integer start = query.getParameterAsClass("start", Integer.class); + URIBuilder uriBuilder = new URIBuilder(url + URLEncoder.encode(q, StandardCharsets.UTF_8)); + uriBuilder.addParameter("start", start.toString()); + uriBuilder.addParameter("count", count.toString()); + Map> params = new HashMap>(); + String resp = liveImportClient.executeHttpGetRequest(timeout, uriBuilder.toString(), params); + Map>> records = getRecords(resp); + for (int record : records.keySet()) { + results.add(transformSourceRecords(records.get(record))); + } + return results; + } + } + + private Map>> getRecords(String resp) throws FileSourceException { + Map>> records = new HashMap>>(); + BufferedReader reader; + int countRecord = 0; + try { + reader = new BufferedReader(new StringReader(resp)); + String line; + while ((line = reader.readLine()) != null) { + if (line.isEmpty() || line.equals("") || line.matches("^\\s*$")) { + continue; + } + line = line.replaceAll("\\uFEFF", "").trim(); + Pattern risPattern = Pattern.compile(PATTERN); + Matcher risMatcher = risPattern.matcher(line); + if (risMatcher.matches()) { + if (risMatcher.group(1).equals("TY") & risMatcher.group(2).equals("JOUR")) { + countRecord ++; + Map> newMap = new HashMap>(); + records.put(countRecord, newMap); + } else { + Map> tag2values = records.get(countRecord); + List values = tag2values.get(risMatcher.group(1)); + if (Objects.isNull(values)) { + List newValues = new ArrayList(); + newValues.add(risMatcher.group(2)); + tag2values.put(risMatcher.group(1), newValues); + } else { + values.add(risMatcher.group(2)); + tag2values.put(risMatcher.group(1), values); + } + } + } + } + } catch (Exception e) { + throw new FileSourceException("Cannot parse RIS file", e); + } + return records; + } + + public void setUrl(String url) { + this.url = url; + } + +} \ No newline at end of file diff --git a/dspace-api/src/main/java/org/dspace/importer/external/scopus/service/ScopusFieldMapping.java b/dspace-api/src/main/java/org/dspace/importer/external/scopus/service/ScopusFieldMapping.java new file mode 100644 index 0000000000..c8143339b4 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/importer/external/scopus/service/ScopusFieldMapping.java @@ -0,0 +1,38 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.importer.external.scopus.service; + +import java.util.Map; +import javax.annotation.Resource; + +import org.dspace.importer.external.metadatamapping.AbstractMetadataFieldMapping; + + +/** + * An implementation of {@link AbstractMetadataFieldMapping} + * Responsible for defining the mapping of the Scopus metadatum fields on the DSpace metadatum fields + * + * @author Pasquale Cavallo (pasquale.cavallo at 4science dot it) + */ +public class ScopusFieldMapping extends AbstractMetadataFieldMapping { + + /** + * Defines which metadatum is mapped on which metadatum. Note that while the key must be unique it + * only matters here for postprocessing of the value. The mapped MetadatumContributor has full control over + * what metadatafield is generated. + * + * @param metadataFieldMap The map containing the link between retrieve metadata and metadata that will be set to + * the item. + */ + @Override + @Resource(name = "scopusMetadataFieldMap") + public void setMetadataFieldMap(Map metadataFieldMap) { + super.setMetadataFieldMap(metadataFieldMap); + } + +} diff --git a/dspace-api/src/main/java/org/dspace/importer/external/scopus/service/ScopusImportMetadataSourceServiceImpl.java b/dspace-api/src/main/java/org/dspace/importer/external/scopus/service/ScopusImportMetadataSourceServiceImpl.java new file mode 100644 index 0000000000..d0c2fb078a --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/importer/external/scopus/service/ScopusImportMetadataSourceServiceImpl.java @@ -0,0 +1,421 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.importer.external.scopus.service; + +import static org.dspace.importer.external.liveimportclient.service.LiveImportClientImpl.URI_PARAMETERS; + +import java.io.IOException; +import java.io.StringReader; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collection; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Objects; +import java.util.concurrent.Callable; +import java.util.regex.Matcher; +import java.util.regex.Pattern; +import javax.el.MethodNotFoundException; + +import org.apache.commons.lang3.StringUtils; +import org.dspace.content.Item; +import org.dspace.importer.external.datamodel.ImportRecord; +import org.dspace.importer.external.datamodel.Query; +import org.dspace.importer.external.exception.MetadataSourceException; +import org.dspace.importer.external.liveimportclient.service.LiveImportClient; +import org.dspace.importer.external.service.AbstractImportMetadataSourceService; +import org.dspace.importer.external.service.DoiCheck; +import org.dspace.importer.external.service.components.QuerySource; +import org.jdom2.Document; +import org.jdom2.Element; +import org.jdom2.JDOMException; +import org.jdom2.Namespace; +import org.jdom2.filter.Filters; +import org.jdom2.input.SAXBuilder; +import org.jdom2.xpath.XPathExpression; +import org.jdom2.xpath.XPathFactory; +import org.springframework.beans.factory.annotation.Autowired; + +/** + * Implements a data source for querying Scopus + * + * @author Mykhaylo Boychuk (mykhaylo.boychuk at 4science dot com) + */ +public class ScopusImportMetadataSourceServiceImpl extends AbstractImportMetadataSourceService + implements QuerySource { + + private int timeout = 1000; + + int itemPerPage = 25; + + private String url; + private String apiKey; + private String instKey; + private String viewMode; + + @Autowired + private LiveImportClient liveImportClient; + + public LiveImportClient getLiveImportClient() { + return liveImportClient; + } + + public void setLiveImportClient(LiveImportClient liveImportClient) { + this.liveImportClient = liveImportClient; + } + + @Override + public void init() throws Exception {} + + /** + * The string that identifies this import implementation. Preferable a URI + * + * @return the identifying uri + */ + @Override + public String getImportSource() { + return "scopus"; + } + + @Override + public int getRecordsCount(String query) throws MetadataSourceException { + if (isEID(query)) { + return retry(new FindByIdCallable(query)).size(); + } + if (DoiCheck.isDoi(query)) { + query = DoiCheck.purgeDoiValue(query); + } + return retry(new SearchNBByQueryCallable(query)); + } + + @Override + public int getRecordsCount(Query query) throws MetadataSourceException { + if (isEID(query.toString())) { + return retry(new FindByIdCallable(query.toString())).size(); + } + if (DoiCheck.isDoi(query.toString())) { + query.addParameter("query", DoiCheck.purgeDoiValue(query.toString())); + } + return retry(new SearchNBByQueryCallable(query)); + } + + @Override + public Collection getRecords(String query, int start, + int count) throws MetadataSourceException { + if (isEID(query)) { + return retry(new FindByIdCallable(query)); + } + if (DoiCheck.isDoi(query)) { + query = DoiCheck.purgeDoiValue(query); + } + return retry(new SearchByQueryCallable(query, count, start)); + } + + @Override + public Collection getRecords(Query query) + throws MetadataSourceException { + if (isEID(query.toString())) { + return retry(new FindByIdCallable(query.toString())); + } + if (DoiCheck.isDoi(query.toString())) { + query.addParameter("query", DoiCheck.purgeDoiValue(query.toString())); + } + return retry(new SearchByQueryCallable(query)); + } + + + @Override + public ImportRecord getRecord(Query query) throws MetadataSourceException { + List records = null; + if (DoiCheck.isDoi(query.toString())) { + query.addParameter("query", DoiCheck.purgeDoiValue(query.toString())); + } + if (isEID(query.toString())) { + records = retry(new FindByIdCallable(query.toString())); + } else { + records = retry(new SearchByQueryCallable(query)); + } + return records == null || records.isEmpty() ? null : records.get(0); + } + + @Override + public Collection findMatchingRecords(Item item) + throws MetadataSourceException { + throw new MethodNotFoundException("This method is not implemented for Scopus"); + } + + @Override + public ImportRecord getRecord(String id) throws MetadataSourceException { + List records = retry(new FindByIdCallable(id)); + return records == null || records.isEmpty() ? null : records.get(0); + } + + @Override + public Collection findMatchingRecords(Query query) + throws MetadataSourceException { + if (isEID(query.toString())) { + return retry(new FindByIdCallable(query.toString())); + } + if (DoiCheck.isDoi(query.toString())) { + query.addParameter("query", DoiCheck.purgeDoiValue(query.toString())); + } + return retry(new FindByQueryCallable(query)); + } + + private boolean isEID(String query) { + Pattern pattern = Pattern.compile("2-s2\\.0-\\d+"); + Matcher match = pattern.matcher(query); + if (match.matches()) { + return true; + } + return false; + } + + /** + * This class implements a callable to get the numbers of result + */ + private class SearchNBByQueryCallable implements Callable { + + private String query; + + private SearchNBByQueryCallable(String queryString) { + this.query = queryString; + } + + private SearchNBByQueryCallable(Query query) { + this.query = query.getParameterAsClass("query", String.class); + } + + @Override + public Integer call() throws Exception { + if (StringUtils.isNotBlank(apiKey)) { + // Execute the request. + Map> params = new HashMap>(); + Map requestParams = getRequestParameters(query, null, null, null); + params.put(URI_PARAMETERS, requestParams); + String response = liveImportClient.executeHttpGetRequest(timeout, url, params); + + SAXBuilder saxBuilder = new SAXBuilder(); + Document document = saxBuilder.build(new StringReader(response)); + Element root = document.getRootElement(); + + List namespaces = Arrays.asList( + Namespace.getNamespace("opensearch", "http://a9.com/-/spec/opensearch/1.1/")); + XPathExpression xpath = XPathFactory.instance() + .compile("opensearch:totalResults", Filters.element(), null, namespaces); + + Element count = xpath.evaluateFirst(root); + try { + return Integer.parseInt(count.getText()); + } catch (NumberFormatException e) { + return null; + } + } + return null; + } + } + + /** + * This class is a Callable implementation to get a Scopus entry using EID + * + * @author Mykhaylo Boychuk (mykhaylo.boychuk at 4science.com) + */ + private class FindByIdCallable implements Callable> { + + private String eid; + + private FindByIdCallable(String eid) { + this.eid = eid; + } + + @Override + public List call() throws Exception { + List results = new ArrayList<>(); + String queryString = "EID(" + eid.replace("!", "/") + ")"; + if (StringUtils.isNotBlank(apiKey)) { + Map> params = new HashMap>(); + Map requestParams = getRequestParameters(queryString, viewMode, null, null); + params.put(URI_PARAMETERS, requestParams); + String response = liveImportClient.executeHttpGetRequest(timeout, url, params); + List elements = splitToRecords(response); + for (Element record : elements) { + results.add(transformSourceRecords(record)); + } + } + return results; + } + } + + /** + * This class implements a callable to get the items based on query parameters + */ + private class FindByQueryCallable implements Callable> { + + private String title; + private String author; + private Integer year; + private Integer start; + private Integer count; + + private FindByQueryCallable(Query query) { + this.title = query.getParameterAsClass("title", String.class); + this.year = query.getParameterAsClass("year", Integer.class); + this.author = query.getParameterAsClass("author", String.class); + this.start = query.getParameterAsClass("start", Integer.class) != null ? + query.getParameterAsClass("start", Integer.class) : 0; + this.count = query.getParameterAsClass("count", Integer.class) != null ? + query.getParameterAsClass("count", Integer.class) : 20; + } + + @Override + public List call() throws Exception { + List results = new ArrayList<>(); + String queryString = ""; + StringBuffer query = new StringBuffer(); + if (StringUtils.isNotBlank(title)) { + query.append("title(").append(title).append(""); + } + if (StringUtils.isNotBlank(author)) { + // [FAU] + if (query.length() > 0) { + query.append(" AND "); + } + query.append("AUTH(").append(author).append(")"); + } + if (year != -1) { + // [DP] + if (query.length() > 0) { + query.append(" AND "); + } + query.append("PUBYEAR IS ").append(year); + } + queryString = query.toString(); + + if (apiKey != null && !apiKey.equals("")) { + Map> params = new HashMap>(); + Map requestParams = getRequestParameters(queryString, viewMode, start, count); + params.put(URI_PARAMETERS, requestParams); + String response = liveImportClient.executeHttpGetRequest(timeout, url, params); + List elements = splitToRecords(response); + for (Element record : elements) { + results.add(transformSourceRecords(record)); + } + } + return results; + } + } + + /** + * Find records matching a string query. + * + * @param query A query string to base the search on. + * @param start Offset to start at + * @param count Number of records to retrieve. + * @return A set of records. Fully transformed. + * + * @author Mykhaylo Boychuk (mykhaylo.boychuk at 4science.com) + */ + private class SearchByQueryCallable implements Callable> { + private Query query; + + + private SearchByQueryCallable(String queryString, Integer maxResult, Integer start) { + query = new Query(); + query.addParameter("query", queryString); + query.addParameter("start", start); + query.addParameter("count", maxResult); + } + + private SearchByQueryCallable(Query query) { + this.query = query; + } + + @Override + public List call() throws Exception { + List results = new ArrayList<>(); + String queryString = query.getParameterAsClass("query", String.class); + Integer start = query.getParameterAsClass("start", Integer.class); + Integer count = query.getParameterAsClass("count", Integer.class); + if (StringUtils.isNotBlank(apiKey)) { + Map> params = new HashMap>(); + Map requestParams = getRequestParameters(queryString, viewMode, start, count); + params.put(URI_PARAMETERS, requestParams); + String response = liveImportClient.executeHttpGetRequest(timeout, url, params); + List elements = splitToRecords(response); + for (Element record : elements) { + results.add(transformSourceRecords(record)); + } + } + return results; + } + } + + private Map getRequestParameters(String query, String viewMode, Integer start, Integer count) { + Map params = new HashMap(); + params.put("httpAccept", "application/xml"); + params.put("apiKey", apiKey); + params.put("query", query); + + if (StringUtils.isNotBlank(instKey)) { + params.put("insttoken", instKey); + } + if (StringUtils.isNotBlank(viewMode)) { + params.put("view", viewMode); + } + + params.put("start", (Objects.nonNull(start) ? start + "" : "0")); + params.put("count", (Objects.nonNull(count) ? count + "" : "20")); + return params; + } + + private List splitToRecords(String recordsSrc) { + try { + SAXBuilder saxBuilder = new SAXBuilder(); + Document document = saxBuilder.build(new StringReader(recordsSrc)); + Element root = document.getRootElement(); + List records = root.getChildren("entry",Namespace.getNamespace("http://www.w3.org/2005/Atom")); + return records; + } catch (JDOMException | IOException e) { + return new ArrayList(); + } + } + + public String getUrl() { + return url; + } + + public void setUrl(String url) { + this.url = url; + } + + public String getViewMode() { + return viewMode; + } + + public void setViewMode(String viewMode) { + this.viewMode = viewMode; + } + + public String getApiKey() { + return apiKey; + } + + public String getInstKey() { + return instKey; + } + + public void setApiKey(String apiKey) { + this.apiKey = apiKey; + } + + public void setInstKey(String instKey) { + this.instKey = instKey; + } + +} \ No newline at end of file diff --git a/dspace-api/src/main/java/org/dspace/importer/external/service/DoiCheck.java b/dspace-api/src/main/java/org/dspace/importer/external/service/DoiCheck.java new file mode 100644 index 0000000000..95d42e3a27 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/importer/external/service/DoiCheck.java @@ -0,0 +1,47 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.importer.external.service; + +import java.util.Arrays; +import java.util.List; +import java.util.regex.Matcher; +import java.util.regex.Pattern; + +/** + * Utility class that provides methods to check if a given string is a DOI + * + * @author Corrado Lombardi (corrado.lombardi at 4science.it) + */ +public class DoiCheck { + + private static final List DOI_PREFIXES = Arrays.asList("http://dx.doi.org/", "https://dx.doi.org/"); + + private static final Pattern PATTERN = Pattern.compile("10.\\d{4,9}/[-._;()/:A-Z0-9]+" + + "|10.1002/[^\\s]+" + + "|10.\\d{4}/\\d+-\\d+X?(\\d+)" + + "\\d+<[\\d\\w]+:[\\d\\w]*>\\d+.\\d+.\\w+;\\d" + + "|10.1021/\\w\\w\\d++" + + "|10.1207/[\\w\\d]+\\&\\d+_\\d+", + Pattern.CASE_INSENSITIVE); + + private DoiCheck() {} + + public static boolean isDoi(final String value) { + Matcher m = PATTERN.matcher(purgeDoiValue(value)); + return m.matches(); + } + + public static String purgeDoiValue(final String query) { + String value = query.replaceAll(",", ""); + for (final String prefix : DOI_PREFIXES) { + value = value.replaceAll(prefix, ""); + } + return value.trim(); + } + +} \ No newline at end of file diff --git a/dspace-api/src/main/java/org/dspace/importer/external/service/components/AbstractPlainMetadataSource.java b/dspace-api/src/main/java/org/dspace/importer/external/service/components/AbstractPlainMetadataSource.java index 019cf33177..5d83b9a7cc 100644 --- a/dspace-api/src/main/java/org/dspace/importer/external/service/components/AbstractPlainMetadataSource.java +++ b/dspace-api/src/main/java/org/dspace/importer/external/service/components/AbstractPlainMetadataSource.java @@ -42,7 +42,7 @@ public abstract class AbstractPlainMetadataSource /** * Set the file extensions supported by this metadata service * - * @param supportedExtensionsthe file extensions (xml,txt,...) supported by this service + * @param supportedExtensions the file extensions (xml,txt,...) supported by this service */ public void setSupportedExtensions(List supportedExtensions) { this.supportedExtensions = supportedExtensions; @@ -57,7 +57,7 @@ public abstract class AbstractPlainMetadataSource * Return a list of ImportRecord constructed from input file. This list is based on * the results retrieved from the file (InputStream) parsed through abstract method readData * - * @param InputStream The inputStream of the file + * @param is The inputStream of the file * @return A list of {@link ImportRecord} * @throws FileSourceException if, for any reason, the file is not parsable */ @@ -76,7 +76,7 @@ public abstract class AbstractPlainMetadataSource * the result retrieved from the file (InputStream) parsed through abstract method * "readData" implementation * - * @param InputStream The inputStream of the file + * @param is The inputStream of the file * @return An {@link ImportRecord} matching the file content * @throws FileSourceException if, for any reason, the file is not parsable * @throws FileMultipleOccurencesException if the file contains more than one entry diff --git a/dspace-api/src/main/java/org/dspace/importer/external/service/components/FileSource.java b/dspace-api/src/main/java/org/dspace/importer/external/service/components/FileSource.java index 5bef0984df..13c81d1516 100644 --- a/dspace-api/src/main/java/org/dspace/importer/external/service/components/FileSource.java +++ b/dspace-api/src/main/java/org/dspace/importer/external/service/components/FileSource.java @@ -30,7 +30,7 @@ public interface FileSource extends MetadataSource { /** * Return a list of ImportRecord constructed from input file. * - * @param InputStream The inputStream of the file + * @param inputStream The inputStream of the file * @return A list of {@link ImportRecord} * @throws FileSourceException if, for any reason, the file is not parsable */ @@ -40,7 +40,7 @@ public interface FileSource extends MetadataSource { /** * Return an ImportRecord constructed from input file. * - * @param InputStream The inputStream of the file + * @param inputStream The inputStream of the file * @return An {@link ImportRecord} matching the file content * @throws FileSourceException if, for any reason, the file is not parsable * @throws FileMultipleOccurencesException if the file contains more than one entry diff --git a/dspace-api/src/main/java/org/dspace/importer/external/vufind/VuFindImportMetadataSourceServiceImpl.java b/dspace-api/src/main/java/org/dspace/importer/external/vufind/VuFindImportMetadataSourceServiceImpl.java new file mode 100644 index 0000000000..a4f90fa5ba --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/importer/external/vufind/VuFindImportMetadataSourceServiceImpl.java @@ -0,0 +1,339 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.importer.external.vufind; + +import java.util.ArrayList; +import java.util.Collection; +import java.util.HashMap; +import java.util.Iterator; +import java.util.List; +import java.util.Map; +import java.util.Objects; +import java.util.concurrent.Callable; +import javax.el.MethodNotFoundException; + +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.ObjectMapper; +import org.apache.commons.lang3.StringUtils; +import org.apache.http.client.utils.URIBuilder; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.dspace.content.Item; +import org.dspace.importer.external.datamodel.ImportRecord; +import org.dspace.importer.external.datamodel.Query; +import org.dspace.importer.external.exception.MetadataSourceException; +import org.dspace.importer.external.liveimportclient.service.LiveImportClient; +import org.dspace.importer.external.service.AbstractImportMetadataSourceService; +import org.dspace.importer.external.service.components.QuerySource; +import org.springframework.beans.factory.annotation.Autowired; + +/** + * Implements a data source for querying VuFind + * + * @author Mykhaylo Boychuk (mykhaylo.boychuk at 4science.com) + */ +public class VuFindImportMetadataSourceServiceImpl extends AbstractImportMetadataSourceService + implements QuerySource { + + private final static Logger log = LogManager.getLogger(); + + private String url; + private String urlSearch; + + private String fields; + + @Autowired + private LiveImportClient liveImportClient; + + public VuFindImportMetadataSourceServiceImpl(String fields) { + this.fields = fields; + } + + @Override + public String getImportSource() { + return "VuFind"; + } + + @Override + public ImportRecord getRecord(String id) throws MetadataSourceException { + String records = retry(new GetByVuFindIdCallable(id, fields)); + List importRecords = extractMetadataFromRecordList(records); + return importRecords != null && !importRecords.isEmpty() ? importRecords.get(0) : null; + } + + @Override + public int getRecordsCount(String query) throws MetadataSourceException { + return retry(new CountByQueryCallable(query)); + } + + @Override + public int getRecordsCount(Query query) throws MetadataSourceException { + return retry(new CountByQueryCallable(query)); + } + + @Override + public Collection getRecords(String query, int start, int count) throws MetadataSourceException { + String records = retry(new SearchByQueryCallable(query, count, start, fields)); + return extractMetadataFromRecordList(records); + } + + @Override + public Collection getRecords(Query query) throws MetadataSourceException { + String records = retry(new SearchByQueryCallable(query, fields)); + return extractMetadataFromRecordList(records); + } + + @Override + public ImportRecord getRecord(Query query) throws MetadataSourceException { + String records = retry(new SearchByQueryCallable(query, fields)); + List importRecords = extractMetadataFromRecordList(records); + return importRecords != null && !importRecords.isEmpty() ? importRecords.get(0) : null; + } + + @Override + public Collection findMatchingRecords(Query query) throws MetadataSourceException { + String records = retry(new FindMatchingRecordsCallable(query)); + return extractMetadataFromRecordList(records); + } + + @Override + public Collection findMatchingRecords(Item item) throws MetadataSourceException { + throw new MethodNotFoundException("This method is not implemented for VuFind"); + } + + @Override + public void init() throws Exception {} + + /** + * This class is a Callable implementation to count the number of entries for an VuFind query. + * This Callable use as query value to CrossRef the string queryString passed to constructor. + * If the object will be construct through Query.class instance, the value of the Query's + * map with the key "query" will be used. + * + * @author Mykhaylo Boychuk (mykhaylo.boychuk@4science.com) + */ + private class CountByQueryCallable implements Callable { + + private Query query; + + public CountByQueryCallable(String queryString) { + query = new Query(); + query.addParameter("query", queryString); + } + + public CountByQueryCallable(Query query) { + this.query = query; + } + + @Override + public Integer call() throws Exception { + Integer start = 0; + Integer count = 1; + int page = start / count + 1; + URIBuilder uriBuilder = new URIBuilder(urlSearch); + uriBuilder.addParameter("type", "AllField"); + uriBuilder.addParameter("page", String.valueOf(page)); + uriBuilder.addParameter("limit", count.toString()); + uriBuilder.addParameter("prettyPrint", String.valueOf(true)); + uriBuilder.addParameter("lookfor", query.getParameterAsClass("query", String.class)); + Map> params = new HashMap>(); + String responseString = liveImportClient.executeHttpGetRequest(1000, uriBuilder.toString(), params); + JsonNode node = convertStringJsonToJsonNode(responseString); + JsonNode resultCountNode = node.get("resultCount"); + return resultCountNode.intValue(); + } + } + + /** + * This class is a Callable implementation to get an VuFind entry using VuFind id + * The id to use can be passed through the constructor as a String or as Query's map entry, with the key "id". + * + * @author Mykhaylo Boychuk (mykhaylo.boychuk@4science.com) + */ + private class GetByVuFindIdCallable implements Callable { + + private String id; + + private String fields; + + public GetByVuFindIdCallable(String id, String fields) { + this.id = id; + if (fields != null && fields.length() > 0) { + this.fields = fields; + } else { + this.fields = null; + } + } + + @Override + public String call() throws Exception { + URIBuilder uriBuilder = new URIBuilder(url); + uriBuilder.addParameter("id", id); + uriBuilder.addParameter("prettyPrint", "false"); + if (StringUtils.isNotBlank(fields)) { + for (String field : fields.split(",")) { + uriBuilder.addParameter("field[]", field); + } + } + Map> params = new HashMap>(); + String response = liveImportClient.executeHttpGetRequest(1000, uriBuilder.toString(), params); + return response; + } + } + + /** + * This class is a Callable implementation to get VuFind entries based on query object. + * This Callable use as query value the string queryString passed to constructor. + * If the object will be construct through Query.class instance, a Query's map entry with key "query" will be used. + * Pagination is supported too, using the value of the Query's map with keys "start" and "count". + * + * @author Mykhaylo Boychuk (mykhaylo.boychuk@4science.com) + */ + private class SearchByQueryCallable implements Callable { + + private Query query; + + private String fields; + + public SearchByQueryCallable(String queryString, Integer maxResult, Integer start, String fields) { + query = new Query(); + query.addParameter("query", queryString); + query.addParameter("count", maxResult); + query.addParameter("start", start); + if (StringUtils.isNotBlank(fields)) { + this.fields = fields; + } else { + this.fields = null; + } + } + + public SearchByQueryCallable(Query query, String fields) { + this.query = query; + if (StringUtils.isNotBlank(fields)) { + this.fields = fields; + } else { + this.fields = null; + } + } + + @Override + public String call() throws Exception { + Integer start = query.getParameterAsClass("start", Integer.class); + Integer count = query.getParameterAsClass("count", Integer.class); + int page = count != 0 ? start / count : 0; + URIBuilder uriBuilder = new URIBuilder(urlSearch); + uriBuilder.addParameter("type", "AllField"); + //page looks 1 based (start = 0, count = 20 -> page = 0) + uriBuilder.addParameter("page", String.valueOf(page + 1)); + uriBuilder.addParameter("limit", count.toString()); + uriBuilder.addParameter("prettyPrint", String.valueOf(true)); + uriBuilder.addParameter("lookfor", query.getParameterAsClass("query", String.class)); + if (StringUtils.isNotBlank(fields)) { + for (String field : fields.split(",")) { + uriBuilder.addParameter("field[]", field); + } + } + Map> params = new HashMap>(); + return liveImportClient.executeHttpGetRequest(1000, uriBuilder.toString(), params); + } + + } + + /** + * This class is a Callable implementation to search VuFind entries using author and title. + * + * @author Mykhaylo Boychuk (mykhaylo.boychuk@4science.com) + */ + public class FindMatchingRecordsCallable implements Callable { + + private Query query; + + private String fields; + + public FindMatchingRecordsCallable(Query query) { + this.query = query; + } + + @Override + public String call() throws Exception { + String author = query.getParameterAsClass("author", String.class); + String title = query.getParameterAsClass("title", String.class); + Integer start = query.getParameterAsClass("start", Integer.class); + Integer count = query.getParameterAsClass("count", Integer.class); + int page = count != 0 ? start / count : 0; + URIBuilder uriBuilder = new URIBuilder(url); + uriBuilder.addParameter("type", "AllField"); + //pagination is 1 based (first page: start = 0, count = 20 -> page = 0 -> +1 = 1) + uriBuilder.addParameter("page", String.valueOf(page ++)); + uriBuilder.addParameter("limit", count.toString()); + uriBuilder.addParameter("prettyPrint", "true"); + if (fields != null && !fields.isEmpty()) { + for (String field : fields.split(",")) { + uriBuilder.addParameter("field[]", field); + } + } + String filter = StringUtils.EMPTY; + if (StringUtils.isNotBlank(author)) { + filter = "author:" + author; + } + if (StringUtils.isNotBlank(title)) { + if (StringUtils.isNotBlank(filter)) { + filter = filter + " AND title:" + title; + } else { + filter = "title:" + title; + } + } + uriBuilder.addParameter("lookfor", filter); + Map> params = new HashMap>(); + return liveImportClient.executeHttpGetRequest(1000, uriBuilder.toString(), params); + } + + } + + private JsonNode convertStringJsonToJsonNode(String json) { + ObjectMapper mapper = new ObjectMapper(); + JsonNode body = null; + try { + body = mapper.readTree(json); + } catch (JsonProcessingException e) { + log.error("Unable to process json response.", e); + } + return body; + } + + private List extractMetadataFromRecordList(String records) { + List recordsResult = new ArrayList<>(); + JsonNode jsonNode = convertStringJsonToJsonNode(records); + JsonNode node = jsonNode.get("records"); + if (Objects.nonNull(node) && node.isArray()) { + Iterator nodes = node.iterator(); + while (nodes.hasNext()) { + recordsResult.add(transformSourceRecords(nodes.next().toString())); + } + } + return recordsResult; + } + + public String getUrl() { + return url; + } + + public void setUrl(String url) { + this.url = url; + } + + public String getUrlSearch() { + return urlSearch; + } + + public void setUrlSearch(String urlSearch) { + this.urlSearch = urlSearch; + } + +} \ No newline at end of file diff --git a/dspace-api/src/main/java/org/dspace/importer/external/vufind/metadatamapping/VuFindFieldMapping.java b/dspace-api/src/main/java/org/dspace/importer/external/vufind/metadatamapping/VuFindFieldMapping.java new file mode 100644 index 0000000000..b14927a14c --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/importer/external/vufind/metadatamapping/VuFindFieldMapping.java @@ -0,0 +1,39 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.importer.external.vufind.metadatamapping; + +import java.util.Map; +import javax.annotation.Resource; + +import org.dspace.importer.external.metadatamapping.AbstractMetadataFieldMapping; + +/** + * An implementation of {@link AbstractMetadataFieldMapping} + * Responsible for defining the mapping of the VuFind metadatum fields on the DSpace metadatum fields + * + * @author Mykhaylo Boychuk (mykhaylo.boychuk@4science.com) + */ +@SuppressWarnings("rawtypes") +public class VuFindFieldMapping extends AbstractMetadataFieldMapping { + + /** + * Defines which metadatum is mapped on which metadatum. Note that while the key must be unique it + * only matters here for postprocessing of the value. The mapped MetadatumContributor has full control over + * what metadatafield is generated. + * + * @param metadataFieldMap The map containing the link between retrieve metadata and metadata that will be set to + * the item. + */ + @Override + @SuppressWarnings("unchecked") + @Resource(name = "vufindMetadataFieldMap") + public void setMetadataFieldMap(Map metadataFieldMap) { + super.setMetadataFieldMap(metadataFieldMap); + } + +} diff --git a/dspace-api/src/main/java/org/dspace/importer/external/wos/service/WOSFieldMapping.java b/dspace-api/src/main/java/org/dspace/importer/external/wos/service/WOSFieldMapping.java new file mode 100644 index 0000000000..be4acfbcea --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/importer/external/wos/service/WOSFieldMapping.java @@ -0,0 +1,37 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.importer.external.wos.service; +import java.util.Map; +import javax.annotation.Resource; + +import org.dspace.importer.external.metadatamapping.AbstractMetadataFieldMapping; + +/** + * An implementation of {@link AbstractMetadataFieldMapping} + * Responsible for defining the mapping of the Web of Science metadatum fields on the DSpace metadatum fields + * + * @author Boychuk Mykhaylo (boychuk.mykhaylo at 4science dot it) + */ +@SuppressWarnings("rawtypes") +public class WOSFieldMapping extends AbstractMetadataFieldMapping { + + /** + * Defines which metadatum is mapped on which metadatum. Note that while the key must be unique it + * only matters here for postprocessing of the value. The mapped MetadatumContributor has full control over + * what metadatafield is generated. + * + * @param metadataFieldMap The map containing the link between retrieve + * metadata and metadata that will be set to the item. + */ + @Override + @SuppressWarnings("unchecked") + @Resource(name = "wosMetadataFieldMap") + public void setMetadataFieldMap(Map metadataFieldMap) { + super.setMetadataFieldMap(metadataFieldMap); + } +} \ No newline at end of file diff --git a/dspace-api/src/main/java/org/dspace/importer/external/wos/service/WOSImportMetadataSourceServiceImpl.java b/dspace-api/src/main/java/org/dspace/importer/external/wos/service/WOSImportMetadataSourceServiceImpl.java new file mode 100644 index 0000000000..2ccdc12b8d --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/importer/external/wos/service/WOSImportMetadataSourceServiceImpl.java @@ -0,0 +1,329 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.importer.external.wos.service; + +import static org.dspace.importer.external.liveimportclient.service.LiveImportClientImpl.HEADER_PARAMETERS; + +import java.io.IOException; +import java.io.StringReader; +import java.net.URLEncoder; +import java.nio.charset.StandardCharsets; +import java.util.ArrayList; +import java.util.Collection; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.concurrent.Callable; +import java.util.regex.Matcher; +import java.util.regex.Pattern; +import javax.el.MethodNotFoundException; + +import org.apache.commons.collections.CollectionUtils; +import org.apache.commons.lang3.StringUtils; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.dspace.content.Item; +import org.dspace.importer.external.datamodel.ImportRecord; +import org.dspace.importer.external.datamodel.Query; +import org.dspace.importer.external.exception.MetadataSourceException; +import org.dspace.importer.external.liveimportclient.service.LiveImportClient; +import org.dspace.importer.external.service.AbstractImportMetadataSourceService; +import org.dspace.importer.external.service.DoiCheck; +import org.dspace.importer.external.service.components.QuerySource; +import org.jdom2.Document; +import org.jdom2.Element; +import org.jdom2.JDOMException; +import org.jdom2.filter.Filters; +import org.jdom2.input.SAXBuilder; +import org.jdom2.xpath.XPathExpression; +import org.jdom2.xpath.XPathFactory; +import org.springframework.beans.factory.annotation.Autowired; + +/** + * Implements a data source for querying Web of Science. + * + * @author Boychuk Mykhaylo (boychuk.mykhaylo at 4Science dot it) + */ +public class WOSImportMetadataSourceServiceImpl extends AbstractImportMetadataSourceService + implements QuerySource { + + private final static Logger log = LogManager.getLogger(); + + private static final String AI_PATTERN = "^AI=(.*)"; + private static final Pattern ISI_PATTERN = Pattern.compile("^\\d{15}$"); + + private int timeout = 1000; + + private String url; + private String urlSearch; + private String apiKey; + + @Autowired + private LiveImportClient liveImportClient; + + @Override + public void init() throws Exception {} + + /** + * The string that identifies this import implementation. Preferable a URI + * + * @return the identifying uri + */ + @Override + public String getImportSource() { + return "wos"; + } + + @Override + public Collection getRecords(String query, int start, int count) throws MetadataSourceException { + return retry(new SearchByQueryCallable(query, count, start)); + } + + @Override + public Collection getRecords(Query query) throws MetadataSourceException { + return retry(new SearchByQueryCallable(query)); + } + + + @Override + public ImportRecord getRecord(Query query) throws MetadataSourceException { + List records = retry(new SearchByQueryCallable(query)); + return records == null || records.isEmpty() ? null : records.get(0); + } + + @Override + public ImportRecord getRecord(String id) throws MetadataSourceException { + List records = retry(new FindByIdCallable(id)); + return records == null || records.isEmpty() ? null : records.get(0); + } + + @Override + public int getRecordsCount(String query) throws MetadataSourceException { + return retry(new SearchNBByQueryCallable(query)); + } + + @Override + public int getRecordsCount(Query query) throws MetadataSourceException { + throw new MethodNotFoundException("This method is not implemented for WOS"); + } + + @Override + public Collection findMatchingRecords(Item item) throws MetadataSourceException { + throw new MethodNotFoundException("This method is not implemented for WOS"); + } + + @Override + public Collection findMatchingRecords(Query query) throws MetadataSourceException { + throw new MethodNotFoundException("This method is not implemented for WOS"); + } + + /** + * This class implements a callable to get the numbers of result + */ + private class SearchNBByQueryCallable implements Callable { + + private String query; + + private SearchNBByQueryCallable(String queryString) { + this.query = queryString; + } + + private SearchNBByQueryCallable(Query query) { + this.query = query.getParameterAsClass("query", String.class); + } + + @Override + public Integer call() throws Exception { + if (StringUtils.isNotBlank(apiKey)) { + String queryString = URLEncoder.encode(checkQuery(query), StandardCharsets.UTF_8); + String url = urlSearch + queryString + "&count=1&firstRecord=1"; + Map> params = new HashMap>(); + params.put(HEADER_PARAMETERS, getRequestParameters()); + String response = liveImportClient.executeHttpGetRequest(timeout, url, params); + + SAXBuilder saxBuilder = new SAXBuilder(); + Document document = saxBuilder.build(new StringReader(response)); + Element root = document.getRootElement(); + XPathExpression xpath = XPathFactory.instance().compile("//*[@name=\"RecordsFound\"]", + Filters.element(), null); + Element tot = xpath.evaluateFirst(root); + return Integer.valueOf(tot.getValue()); + } + return null; + } + } + + /** + * This class is a Callable implementation to get a Web of Science entry using Doi + * + * @author Mykhaylo Boychuk (mykhaylo.boychuk at 4science.com) + */ + private class FindByIdCallable implements Callable> { + + private String doi; + + private FindByIdCallable(String doi) { + this.doi = URLEncoder.encode(doi, StandardCharsets.UTF_8); + } + + @Override + public List call() throws Exception { + List results = new ArrayList<>(); + if (StringUtils.isNotBlank(apiKey)) { + String urlString = url + this.doi + "?databaseId=WOS&lang=en&count=10&firstRecord=1"; + Map> params = new HashMap>(); + params.put(HEADER_PARAMETERS, getRequestParameters()); + String response = liveImportClient.executeHttpGetRequest(timeout, urlString, params); + + List elements = splitToRecords(response); + for (Element record : elements) { + results.add(transformSourceRecords(record)); + } + } + return results; + } + } + + /** + * Find records matching a string query. + * + * @param query A query string to base the search on. + * @param start Offset to start at + * @param count Number of records to retrieve. + * @return A set of records. Fully transformed. + * + * @author Mykhaylo Boychuk (mykhaylo.boychuk at 4science.com) + */ + private class SearchByQueryCallable implements Callable> { + + private Query query; + + private SearchByQueryCallable(String queryString, Integer maxResult, Integer start) { + query = new Query(); + query.addParameter("query", queryString); + query.addParameter("start", start); + query.addParameter("count", maxResult); + } + + private SearchByQueryCallable(Query query) { + this.query = query; + } + + @Override + public List call() throws Exception { + List results = new ArrayList<>(); + String queryString = checkQuery(query.getParameterAsClass("query", String.class)); + Integer start = query.getParameterAsClass("start", Integer.class); + Integer count = query.getParameterAsClass("count", Integer.class); + if (StringUtils.isNotBlank(apiKey)) { + Map> params = new HashMap>(); + params.put(HEADER_PARAMETERS, getRequestParameters()); + String url = urlSearch + URLEncoder.encode(queryString, StandardCharsets.UTF_8) + + "&count=" + count + "&firstRecord=" + (start + 1); + String response = liveImportClient.executeHttpGetRequest(timeout, url, params); + + List omElements = splitToRecords(response); + for (Element el : omElements) { + results.add(transformSourceRecords(el)); + } + } + return results; + } + + } + + private Map getRequestParameters() { + Map params = new HashMap(); + params.put("Accept", "application/xml"); + params.put("X-ApiKey", this.apiKey); + return params; + } + + /** + * This method check if the query contain + * "AI=(...)" Author Identifier or a DOI "DO=(query)" + * or Accession Number "UT=(query)". + * Otherwise the value is placed in TS=(query) tag + * that searches for topic terms in the following fields within a document: + * Title, Abstract, Author keywords, Keywords Plus + * + * @param query + */ + private String checkQuery(String query) { + Pattern risPattern = Pattern.compile(AI_PATTERN); + Matcher risMatcher = risPattern.matcher(query.trim()); + if (risMatcher.matches()) { + return query; + } + if (DoiCheck.isDoi(query)) { + // FIXME: workaround to be removed once fixed by the community the double post of query param + if (query.startsWith(",")) { + query = query.substring(1); + } + return "DO=(" + query + ")"; + } else if (isIsi(query)) { + return "UT=(" + query + ")"; + } + StringBuilder queryBuilder = new StringBuilder("TS=("); + queryBuilder.append(query).append(")"); + return queryBuilder.toString(); + } + + private boolean isIsi(String query) { + if (query.startsWith("WOS:")) { + return true; + } + Matcher matcher = ISI_PATTERN.matcher(query.trim()); + return matcher.matches(); + } + + private List splitToRecords(String recordsSrc) { + try { + SAXBuilder saxBuilder = new SAXBuilder(); + Document document = saxBuilder.build(new StringReader(recordsSrc)); + Element root = document.getRootElement(); + String cData = XPathFactory.instance().compile("//*[@name=\"Records\"]", + Filters.element(), null).evaluate(root).get(0).getValue().trim(); + Document intDocument = saxBuilder.build(new StringReader(cData)); + XPathExpression xPath = XPathFactory.instance().compile("*", Filters.element(), null); + List records = xPath.evaluate(intDocument.getRootElement()); + if (CollectionUtils.isNotEmpty(records)) { + return records; + } + } catch (JDOMException | IOException e) { + log.error(e.getMessage()); + return new ArrayList(); + } + return new ArrayList(); + } + + public String getUrl() { + return url; + } + + public void setUrl(String url) { + this.url = url; + } + + public String getUrlSearch() { + return urlSearch; + } + + public void setUrlSearch(String urlSearch) { + this.urlSearch = urlSearch; + } + + public String getApiKey() { + return apiKey; + } + + public void setApiKey(String apiKey) { + this.apiKey = apiKey; + } + +} \ No newline at end of file diff --git a/dspace-api/src/main/java/org/dspace/license/CCLicenseConnectorService.java b/dspace-api/src/main/java/org/dspace/license/CCLicenseConnectorService.java index 0c061d2d64..64450b796c 100644 --- a/dspace-api/src/main/java/org/dspace/license/CCLicenseConnectorService.java +++ b/dspace-api/src/main/java/org/dspace/license/CCLicenseConnectorService.java @@ -10,7 +10,7 @@ package org.dspace.license; import java.io.IOException; import java.util.Map; -import org.jdom.Document; +import org.jdom2.Document; /** * Service interface class for the Creative commons license connector service. diff --git a/dspace-api/src/main/java/org/dspace/license/CCLicenseConnectorServiceImpl.java b/dspace-api/src/main/java/org/dspace/license/CCLicenseConnectorServiceImpl.java index 792c25d629..cdecadba52 100644 --- a/dspace-api/src/main/java/org/dspace/license/CCLicenseConnectorServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/license/CCLicenseConnectorServiceImpl.java @@ -32,13 +32,14 @@ import org.apache.http.impl.client.HttpClientBuilder; import org.apache.http.util.EntityUtils; import org.apache.logging.log4j.Logger; import org.dspace.services.ConfigurationService; -import org.jaxen.JaxenException; -import org.jaxen.jdom.JDOMXPath; -import org.jdom.Attribute; -import org.jdom.Document; -import org.jdom.Element; -import org.jdom.JDOMException; -import org.jdom.input.SAXBuilder; +import org.jdom2.Attribute; +import org.jdom2.Document; +import org.jdom2.Element; +import org.jdom2.JDOMException; +import org.jdom2.filter.Filters; +import org.jdom2.input.SAXBuilder; +import org.jdom2.xpath.XPathExpression; +import org.jdom2.xpath.XPathFactory; import org.springframework.beans.factory.InitializingBean; import org.springframework.beans.factory.annotation.Autowired; import org.xml.sax.InputSource; @@ -96,7 +97,7 @@ public class CCLicenseConnectorServiceImpl implements CCLicenseConnectorService, List licenses; try (CloseableHttpResponse response = client.execute(httpGet)) { licenses = retrieveLicenses(response); - } catch (JDOMException | JaxenException | IOException e) { + } catch (JDOMException | IOException e) { log.error("Error while retrieving the license details using url: " + uri, e); licenses = Collections.emptyList(); } @@ -105,12 +106,12 @@ public class CCLicenseConnectorServiceImpl implements CCLicenseConnectorService, for (String license : licenses) { - String licenseUri = ccLicenseUrl + "/license/" + license; + String licenseUri = ccLicenseUrl + "/license/" + license + "?locale=" + language; HttpGet licenseHttpGet = new HttpGet(licenseUri); try (CloseableHttpResponse response = client.execute(licenseHttpGet)) { CCLicense ccLicense = retrieveLicenseObject(license, response); ccLicenses.put(ccLicense.getLicenseId(), ccLicense); - } catch (JaxenException | JDOMException | IOException e) { + } catch (JDOMException | IOException e) { log.error("Error while retrieving the license details using url: " + licenseUri, e); } } @@ -125,25 +126,23 @@ public class CCLicenseConnectorServiceImpl implements CCLicenseConnectorService, * @param response The response from the API * @return a list of license identifiers for which details need to be retrieved * @throws IOException - * @throws JaxenException * @throws JDOMException */ private List retrieveLicenses(CloseableHttpResponse response) - throws IOException, JaxenException, JDOMException { + throws IOException, JDOMException { List domains = new LinkedList<>(); String[] excludedLicenses = configurationService.getArrayProperty("cc.license.classfilter"); - String responseString = EntityUtils.toString(response.getEntity()); - JDOMXPath licenseClassXpath = new JDOMXPath("//licenses/license"); - + XPathExpression licenseClassXpath = + XPathFactory.instance().compile("//licenses/license", Filters.element()); try (StringReader stringReader = new StringReader(responseString)) { InputSource is = new InputSource(stringReader); - org.jdom.Document classDoc = this.parser.build(is); + org.jdom2.Document classDoc = this.parser.build(is); - List elements = licenseClassXpath.selectNodes(classDoc); + List elements = licenseClassXpath.evaluate(classDoc); for (Element element : elements) { String licenseId = getSingleNodeValue(element, "@id"); if (StringUtils.isNotBlank(licenseId) && !ArrayUtils.contains(excludedLicenses, licenseId)) { @@ -163,30 +162,29 @@ public class CCLicenseConnectorServiceImpl implements CCLicenseConnectorService, * @param response for a specific CC License response * @return the corresponding CC License Object * @throws IOException - * @throws JaxenException * @throws JDOMException */ private CCLicense retrieveLicenseObject(final String licenseId, CloseableHttpResponse response) - throws IOException, JaxenException, JDOMException { + throws IOException, JDOMException { String responseString = EntityUtils.toString(response.getEntity()); - - JDOMXPath licenseClassXpath = new JDOMXPath("//licenseclass"); - JDOMXPath licenseFieldXpath = new JDOMXPath("field"); - + XPathExpression licenseClassXpath = + XPathFactory.instance().compile("//licenseclass", Filters.fpassthrough()); + XPathExpression licenseFieldXpath = + XPathFactory.instance().compile("field", Filters.element()); try (StringReader stringReader = new StringReader(responseString)) { InputSource is = new InputSource(stringReader); - org.jdom.Document classDoc = this.parser.build(is); + org.jdom2.Document classDoc = this.parser.build(is); - Object element = licenseClassXpath.selectSingleNode(classDoc); + Object element = licenseClassXpath.evaluateFirst(classDoc); String licenseLabel = getSingleNodeValue(element, "label"); List ccLicenseFields = new LinkedList<>(); - List licenseFields = licenseFieldXpath.selectNodes(element); + List licenseFields = licenseFieldXpath.evaluate(element); for (Element licenseField : licenseFields) { CCLicenseField ccLicenseField = parseLicenseField(licenseField); ccLicenseFields.add(ccLicenseField); @@ -196,13 +194,14 @@ public class CCLicenseConnectorServiceImpl implements CCLicenseConnectorService, } } - private CCLicenseField parseLicenseField(final Element licenseField) throws JaxenException { + private CCLicenseField parseLicenseField(final Element licenseField) { String id = getSingleNodeValue(licenseField, "@id"); String label = getSingleNodeValue(licenseField, "label"); String description = getSingleNodeValue(licenseField, "description"); - JDOMXPath enumXpath = new JDOMXPath("enum"); - List enums = enumXpath.selectNodes(licenseField); + XPathExpression enumXpath = + XPathFactory.instance().compile("enum", Filters.element()); + List enums = enumXpath.evaluate(licenseField); List ccLicenseFieldEnumList = new LinkedList<>(); @@ -215,7 +214,7 @@ public class CCLicenseConnectorServiceImpl implements CCLicenseConnectorService, } - private CCLicenseFieldEnum parseEnum(final Element enumElement) throws JaxenException { + private CCLicenseFieldEnum parseEnum(final Element enumElement) { String id = getSingleNodeValue(enumElement, "@id"); String label = getSingleNodeValue(enumElement, "label"); String description = getSingleNodeValue(enumElement, "description"); @@ -236,9 +235,10 @@ public class CCLicenseConnectorServiceImpl implements CCLicenseConnectorService, } } - private String getSingleNodeValue(final Object t, String query) throws JaxenException { - JDOMXPath xpath = new JDOMXPath(query); - Object singleNode = xpath.selectSingleNode(t); + private String getSingleNodeValue(final Object t, String query) { + XPathExpression xpath = + XPathFactory.instance().compile(query, Filters.fpassthrough()); + Object singleNode = xpath.evaluateFirst(t); return getNodeValue(singleNode); } @@ -273,7 +273,7 @@ public class CCLicenseConnectorServiceImpl implements CCLicenseConnectorService, try (CloseableHttpResponse response = client.execute(httpPost)) { return retrieveLicenseUri(response); - } catch (JDOMException | JaxenException | IOException e) { + } catch (JDOMException | IOException e) { log.error("Error while retrieving the license uri for license : " + licenseId + " with answers " + answerMap.toString(), e); } @@ -286,21 +286,20 @@ public class CCLicenseConnectorServiceImpl implements CCLicenseConnectorService, * @param response for a specific CC License URI response * @return the corresponding CC License URI as a string * @throws IOException - * @throws JaxenException * @throws JDOMException */ private String retrieveLicenseUri(final CloseableHttpResponse response) - throws IOException, JaxenException, JDOMException { + throws IOException, JDOMException { String responseString = EntityUtils.toString(response.getEntity()); - JDOMXPath licenseClassXpath = new JDOMXPath("//result/license-uri"); - + XPathExpression licenseClassXpath = + XPathFactory.instance().compile("//result/license-uri", Filters.fpassthrough()); try (StringReader stringReader = new StringReader(responseString)) { InputSource is = new InputSource(stringReader); - org.jdom.Document classDoc = this.parser.build(is); + org.jdom2.Document classDoc = this.parser.build(is); - Object node = licenseClassXpath.selectSingleNode(classDoc); + Object node = licenseClassXpath.evaluateFirst(classDoc); String nodeValue = getNodeValue(node); if (StringUtils.isNotBlank(nodeValue)) { @@ -364,12 +363,7 @@ public class CCLicenseConnectorServiceImpl implements CCLicenseConnectorService, * @return the license name */ public String retrieveLicenseName(final Document doc) { - try { - return getSingleNodeValue(doc, "//result/license-name"); - } catch (JaxenException e) { - log.error("Error while retrieving the license name from the license document", e); - } - return null; + return getSingleNodeValue(doc, "//result/license-name"); } } diff --git a/dspace-api/src/main/java/org/dspace/license/CreativeCommonsServiceImpl.java b/dspace-api/src/main/java/org/dspace/license/CreativeCommonsServiceImpl.java index ccc660b63b..c9c8127d18 100644 --- a/dspace-api/src/main/java/org/dspace/license/CreativeCommonsServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/license/CreativeCommonsServiceImpl.java @@ -40,8 +40,8 @@ import org.dspace.core.Utils; import org.dspace.license.service.CreativeCommonsService; import org.dspace.services.ConfigurationService; import org.dspace.services.factory.DSpaceServicesFactory; -import org.jdom.Document; -import org.jdom.transform.JDOMSource; +import org.jdom2.Document; +import org.jdom2.transform.JDOMSource; import org.springframework.beans.factory.InitializingBean; import org.springframework.beans.factory.annotation.Autowired; @@ -430,9 +430,10 @@ public class CreativeCommonsServiceImpl implements CreativeCommonsService, Initi } - private void addLicenseField(Context context, Item item, String field, String value) throws SQLException { + private void addLicenseField(Context context, Item item, String field, String language, String value) + throws SQLException { String[] params = splitField(field); - itemService.addMetadata(context, item, params[0], params[1], params[2], params[3], value); + itemService.addMetadata(context, item, params[0], params[1], params[2], language, value); } @@ -605,7 +606,10 @@ public class CreativeCommonsServiceImpl implements CreativeCommonsService, Initi } } - updateJurisdiction(fullParamMap); + // Replace the jurisdiction unless default value is set to none + if (!"none".equals(jurisdiction)) { + updateJurisdiction(fullParamMap); + } return fullParamMap; } @@ -688,12 +692,12 @@ public class CreativeCommonsServiceImpl implements CreativeCommonsService, Initi String uriField = getCCField("uri"); String nameField = getCCField("name"); - addLicenseField(context, item, uriField, licenseUri); + addLicenseField(context, item, uriField, null, licenseUri); if (configurationService.getBooleanProperty("cc.submit.addbitstream")) { setLicenseRDF(context, item, fetchLicenseRDF(doc)); } if (configurationService.getBooleanProperty("cc.submit.setname")) { - addLicenseField(context, item, nameField, licenseName); + addLicenseField(context, item, nameField, "en", licenseName); } } diff --git a/dspace-api/src/main/java/org/dspace/license/service/CreativeCommonsService.java b/dspace-api/src/main/java/org/dspace/license/service/CreativeCommonsService.java index 1f5f1ddd02..0f4911aa3e 100644 --- a/dspace-api/src/main/java/org/dspace/license/service/CreativeCommonsService.java +++ b/dspace-api/src/main/java/org/dspace/license/service/CreativeCommonsService.java @@ -18,7 +18,7 @@ import org.dspace.content.Bitstream; import org.dspace.content.Item; import org.dspace.core.Context; import org.dspace.license.CCLicense; -import org.jdom.Document; +import org.jdom2.Document; /** * Service interface class for the Creative commons licensing. diff --git a/dspace-api/src/main/java/org/dspace/orcid/OrcidHistory.java b/dspace-api/src/main/java/org/dspace/orcid/OrcidHistory.java new file mode 100644 index 0000000000..33edea112e --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/orcid/OrcidHistory.java @@ -0,0 +1,211 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.orcid; + +import java.util.Date; +import javax.persistence.Column; +import javax.persistence.Entity; +import javax.persistence.EnumType; +import javax.persistence.Enumerated; +import javax.persistence.GeneratedValue; +import javax.persistence.GenerationType; +import javax.persistence.Id; +import javax.persistence.JoinColumn; +import javax.persistence.Lob; +import javax.persistence.ManyToOne; +import javax.persistence.SequenceGenerator; +import javax.persistence.Table; +import javax.persistence.Temporal; +import javax.persistence.TemporalType; + +import org.dspace.content.Item; +import org.dspace.core.ReloadableEntity; +import org.hibernate.annotations.Type; + +/** + * The ORCID history entity that it contains information relating to an attempt + * to synchronize the DSpace items and information on ORCID. While the entity + * {@link OrcidQueue} contains the data to be synchronized with ORCID, this + * entity instead contains the data synchronized with ORCID, with the result of + * the synchronization. Each record in this table is associated with a profile + * item and the entity synchronized (which can be the profile itself, a + * publication or a project/funding). If the entity is the profile itself then + * the metadata field contains the signature of the information synchronized. + * + * @author Luca Giamminonni (luca.giamminonni at 4science.it) + * + */ +@Entity +@Table(name = "orcid_history") +public class OrcidHistory implements ReloadableEntity { + + @Id + @GeneratedValue(strategy = GenerationType.SEQUENCE, generator = "orcid_history_id_seq") + @SequenceGenerator(name = "orcid_history_id_seq", sequenceName = "orcid_history_id_seq", allocationSize = 1) + private Integer id; + + /** + * The profile item. + */ + @ManyToOne + @JoinColumn(name = "owner_id") + protected Item profileItem; + + /** + * The synchronized item. + */ + @ManyToOne + @JoinColumn(name = "entity_id") + private Item entity; + + /** + * The identifier of the synchronized resource on ORCID side. For more details + * see https://info.orcid.org/faq/what-is-a-put-code/ + */ + @Column(name = "put_code") + private String putCode; + + /** + * The record type. Could be publication, funding or a profile's section. + */ + @Column(name = "record_type") + private String recordType; + + /** + * A description of the synchronized resource. + */ + @Column(name = "description") + private String description; + + /** + * The signature of the synchronized metadata. This is used when the entity is + * the owner itself. + */ + @Lob + @Type(type = "org.dspace.storage.rdbms.hibernate.DatabaseAwareLobType") + @Column(name = "metadata") + private String metadata; + + /** + * The operation performed on ORCID. + */ + @Enumerated(EnumType.STRING) + @Column(name = "operation") + private OrcidOperation operation; + + /** + * The response message incoming from ORCID. + */ + @Lob + @Type(type = "org.dspace.storage.rdbms.hibernate.DatabaseAwareLobType") + @Column(name = "response_message") + private String responseMessage; + + /** + * The timestamp of the synchronization attempt. + */ + @Temporal(TemporalType.TIMESTAMP) + @Column(name = "timestamp_last_attempt") + private Date timestamp = new Date(); + + /** + * The HTTP status incoming from ORCID. + */ + @Column(name = "status") + private Integer status; + + public Integer getStatus() { + return status; + } + + public void setStatus(Integer status) { + this.status = status; + } + + public void setId(Integer id) { + this.id = id; + } + + @Override + public Integer getID() { + return id; + } + + public Item getProfileItem() { + return profileItem; + } + + public void setProfileItem(Item profileItem) { + this.profileItem = profileItem; + } + + public Item getEntity() { + return entity; + } + + public void setEntity(Item entity) { + this.entity = entity; + } + + public String getPutCode() { + return putCode; + } + + public void setPutCode(String putCode) { + this.putCode = putCode; + } + + public String getResponseMessage() { + return responseMessage; + } + + public void setResponseMessage(String responseMessage) { + this.responseMessage = responseMessage; + } + + public String getRecordType() { + return recordType; + } + + public void setRecordType(String recordType) { + this.recordType = recordType; + } + + public String getMetadata() { + return metadata; + } + + public void setMetadata(String metadata) { + this.metadata = metadata; + } + + public OrcidOperation getOperation() { + return operation; + } + + public void setOperation(OrcidOperation operation) { + this.operation = operation; + } + + public String getDescription() { + return description; + } + + public void setDescription(String description) { + this.description = description; + } + + public Date getTimestamp() { + return timestamp; + } + + public void setTimestamp(Date timestamp) { + this.timestamp = timestamp; + } + +} diff --git a/dspace-services/src/main/java/org/dspace/services/caching/package-info.java b/dspace-api/src/main/java/org/dspace/orcid/OrcidOperation.java similarity index 51% rename from dspace-services/src/main/java/org/dspace/services/caching/package-info.java rename to dspace-api/src/main/java/org/dspace/orcid/OrcidOperation.java index a73e0d3eaa..381e35e84d 100644 --- a/dspace-services/src/main/java/org/dspace/services/caching/package-info.java +++ b/dspace-api/src/main/java/org/dspace/orcid/OrcidOperation.java @@ -5,9 +5,16 @@ * * http://www.dspace.org/license/ */ +package org.dspace.orcid; /** - * Implementation of the core Caching service. + * Enum that models an ORCID synchronization operation. + * + * @author Luca Giamminonni (luca.giamminonni at 4science.it) + * */ - -package org.dspace.services.caching; +public enum OrcidOperation { + INSERT, + UPDATE, + DELETE; +} diff --git a/dspace-api/src/main/java/org/dspace/orcid/OrcidQueue.java b/dspace-api/src/main/java/org/dspace/orcid/OrcidQueue.java new file mode 100644 index 0000000000..4794e89008 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/orcid/OrcidQueue.java @@ -0,0 +1,219 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.orcid; + +import static org.apache.commons.lang3.StringUtils.isEmpty; +import static org.apache.commons.lang3.StringUtils.isNotEmpty; + +import java.util.Objects; +import javax.persistence.Column; +import javax.persistence.Entity; +import javax.persistence.EnumType; +import javax.persistence.Enumerated; +import javax.persistence.GeneratedValue; +import javax.persistence.GenerationType; +import javax.persistence.Id; +import javax.persistence.JoinColumn; +import javax.persistence.Lob; +import javax.persistence.ManyToOne; +import javax.persistence.SequenceGenerator; +import javax.persistence.Table; + +import org.dspace.content.Item; +import org.dspace.core.ReloadableEntity; +import org.hibernate.annotations.Type; + +/** + * Entity that model a record on the ORCID synchronization queue. Each record in + * this table is associated with an profile item and the entity to be + * synchronized (which can be the profile itself, a publication or a + * project/funding). If the entity is the profile itself then the metadata field + * contains the signature of the information to be synchronized. + * + * @author Luca Giamminonni (luca.giamminonni at 4science.it) + * + */ +@Entity +@Table(name = "orcid_queue") +public class OrcidQueue implements ReloadableEntity { + + @Id + @GeneratedValue(strategy = GenerationType.SEQUENCE, generator = "orcid_queue_id_seq") + @SequenceGenerator(name = "orcid_queue_id_seq", sequenceName = "orcid_queue_id_seq", allocationSize = 1) + private Integer id; + + /** + * The profile item. + */ + @ManyToOne + @JoinColumn(name = "owner_id") + protected Item profileItem; + + /** + * The entity to be synchronized. + */ + @ManyToOne + @JoinColumn(name = "entity_id") + private Item entity; + + /** + * A description of the resource to be synchronized. + */ + @Column(name = "description") + private String description; + + /** + * The identifier of the resource to be synchronized on ORCID side (in case of + * update or deletion). For more details see + * https://info.orcid.org/faq/what-is-a-put-code/ + */ + @Column(name = "put_code") + private String putCode; + + /** + * The record type. Could be publication, funding or a profile's section. + */ + @Column(name = "record_type") + private String recordType; + + /** + * The signature of the metadata to be synchronized. This is used when the + * entity is the owner itself. + */ + @Lob + @Column(name = "metadata") + @Type(type = "org.dspace.storage.rdbms.hibernate.DatabaseAwareLobType") + private String metadata; + + /** + * The operation to be performed on ORCID. + */ + @Enumerated(EnumType.STRING) + @Column(name = "operation") + private OrcidOperation operation; + + /** + * Synchronization attempts already made for a particular record. + */ + @Column(name = "attempts") + private Integer attempts = 0; + + public boolean isInsertAction() { + return entity != null && isEmpty(putCode); + } + + public boolean isUpdateAction() { + return entity != null && isNotEmpty(putCode); + } + + public boolean isDeleteAction() { + return entity == null && isNotEmpty(putCode); + } + + public void setID(Integer id) { + this.id = id; + } + + @Override + public Integer getID() { + return this.id; + } + + public Item getProfileItem() { + return profileItem; + } + + public void setProfileItem(Item profileItem) { + this.profileItem = profileItem; + } + + public Item getEntity() { + return entity; + } + + public void setEntity(Item entity) { + this.entity = entity; + } + + public String getPutCode() { + return putCode; + } + + public void setPutCode(String putCode) { + this.putCode = putCode; + } + + @Override + public int hashCode() { + return Objects.hash(id); + } + + @Override + public boolean equals(Object obj) { + if (this == obj) { + return true; + } + if (obj == null) { + return false; + } + if (getClass() != obj.getClass()) { + return false; + } + OrcidQueue other = (OrcidQueue) obj; + return Objects.equals(id, other.id); + } + + public String getDescription() { + return description; + } + + public void setDescription(String description) { + this.description = description; + } + + public String getRecordType() { + return recordType; + } + + public void setRecordType(String recordType) { + this.recordType = recordType; + } + + public String getMetadata() { + return metadata; + } + + public void setMetadata(String metadata) { + this.metadata = metadata; + } + + public OrcidOperation getOperation() { + return operation; + } + + public void setOperation(OrcidOperation operation) { + this.operation = operation; + } + + public Integer getAttempts() { + return attempts; + } + + public void setAttempts(Integer attempts) { + this.attempts = attempts; + } + + @Override + public String toString() { + return "OrcidQueue [id=" + id + ", profileItem=" + profileItem + ", entity=" + entity + ", description=" + + description + + ", putCode=" + putCode + ", recordType=" + recordType + ", metadata=" + metadata + ", operation=" + + operation + "]"; + } + +} diff --git a/dspace-api/src/main/java/org/dspace/orcid/OrcidToken.java b/dspace-api/src/main/java/org/dspace/orcid/OrcidToken.java new file mode 100644 index 0000000000..def289daf4 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/orcid/OrcidToken.java @@ -0,0 +1,85 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.orcid; + +import javax.persistence.Column; +import javax.persistence.Entity; +import javax.persistence.FetchType; +import javax.persistence.GeneratedValue; +import javax.persistence.GenerationType; +import javax.persistence.Id; +import javax.persistence.JoinColumn; +import javax.persistence.OneToOne; +import javax.persistence.SequenceGenerator; +import javax.persistence.Table; + +import org.dspace.content.Item; +import org.dspace.core.ReloadableEntity; +import org.dspace.eperson.EPerson; + +/** + * Entity that stores ORCID access-token related to a given eperson or a given + * profile item. + * + * @author Luca Giamminonni (luca.giamminonni at 4science.it) + * + */ +@Entity +@Table(name = "orcid_token") +public class OrcidToken implements ReloadableEntity { + + @Id + @GeneratedValue(strategy = GenerationType.SEQUENCE, generator = "orcid_token_id_seq") + @SequenceGenerator(name = "orcid_token_id_seq", sequenceName = "orcid_token_id_seq", allocationSize = 1) + private Integer id; + + @OneToOne(fetch = FetchType.LAZY) + @JoinColumn(name = "eperson_id") + protected EPerson ePerson; + + @OneToOne(fetch = FetchType.LAZY) + @JoinColumn(name = "profile_item_id") + private Item profileItem; + + @Column(name = "access_token") + private String accessToken; + + @Override + public Integer getID() { + return id; + } + + public void setId(Integer id) { + this.id = id; + } + + public EPerson getEPerson() { + return ePerson; + } + + public void setEPerson(EPerson eperson) { + this.ePerson = eperson; + } + + public Item getProfileItem() { + return profileItem; + } + + public void setProfileItem(Item profileItem) { + this.profileItem = profileItem; + } + + public String getAccessToken() { + return accessToken; + } + + public void setAccessToken(String accessToken) { + this.accessToken = accessToken; + } + +} diff --git a/dspace-api/src/main/java/org/dspace/orcid/client/OrcidClient.java b/dspace-api/src/main/java/org/dspace/orcid/client/OrcidClient.java new file mode 100644 index 0000000000..99d1920aa5 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/orcid/client/OrcidClient.java @@ -0,0 +1,164 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.orcid.client; + +import java.util.List; +import java.util.Optional; + +import org.dspace.orcid.exception.OrcidClientException; +import org.dspace.orcid.model.OrcidTokenResponseDTO; +import org.orcid.jaxb.model.v3.release.record.Person; +import org.orcid.jaxb.model.v3.release.record.WorkBulk; +import org.orcid.jaxb.model.v3.release.record.summary.Works; + +/** + * Interface for classes that allow to contact ORCID. + * + * @author Luca Giamminonni (luca.giamminonni at 4science.it) + * + */ +public interface OrcidClient { + + /** + * Retrieves an /read-public access token using a client-credentials OAuth flow, + * or 2-step OAuth. + * + * @return the ORCID token + * @throws OrcidClientException if some error occurs during the exchange + */ + OrcidTokenResponseDTO getReadPublicAccessToken(); + + /** + * Exchange the authorization code for an ORCID iD and 3-legged access token. + * The authorization code expires upon use. + * + * @param code the authorization code + * @return the ORCID token + * @throws OrcidClientException if some error occurs during the exchange + */ + OrcidTokenResponseDTO getAccessToken(String code); + + /** + * Retrieves a summary of the ORCID person related to the given orcid. + * + * @param accessToken the access token + * @param orcid the orcid id of the record to retrieve + * @return the Person + * @throws OrcidClientException if some error occurs during the search + */ + Person getPerson(String accessToken, String orcid); + + /** + * Retrieves all the works related to the given orcid. + * + * @param accessToken the access token + * @param orcid the orcid id related to the works + * @return the Works + * @throws OrcidClientException if some error occurs during the search + */ + Works getWorks(String accessToken, String orcid); + + /** + * Retrieves all the works related to the given orcid. + * + * @param orcid the orcid id related to the works + * @return the Works + * @throws OrcidClientException if some error occurs during the search + */ + Works getWorks(String orcid); + + /** + * Retrieves all the works with the given putCodes related to the given orcid + * + * @param accessToken the access token + * @param orcid the orcid id + * @param putCodes the putCodes of the works to retrieve + * @return the Works + * @throws OrcidClientException if some error occurs during the search + */ + WorkBulk getWorkBulk(String accessToken, String orcid, List putCodes); + + /** + * Retrieves all the works with the given putCodes related to the given orcid + * + * @param orcid the orcid id + * @param putCodes the putCodes of the works to retrieve + * @return the Works + * @throws OrcidClientException if some error occurs during the search + */ + WorkBulk getWorkBulk(String orcid, List putCodes); + + /** + * Retrieves an object from ORCID with the given putCode related to the given + * orcid. + * + * @param accessToken the access token + * @param orcid the orcid id + * @param putCode the object's put code + * @param clazz the object's class + * @return the Object, if any + * @throws OrcidClientException if some error occurs during the search + * @throws IllegalArgumentException if the given object class is not an valid + * ORCID object + */ + Optional getObject(String accessToken, String orcid, String putCode, Class clazz); + + /** + * Retrieves an object from ORCID with the given putCode related to the given + * orcid using the public API. + * + * @param orcid the orcid id + * @param putCode the object's put code + * @param clazz the object's class + * @return the Object, if any + * @throws OrcidClientException if some error occurs during the search + * @throws IllegalArgumentException if the given object class is not an valid + * ORCID object + */ + Optional getObject(String orcid, String putCode, Class clazz); + + /** + * Push the given object to ORCID. + * + * @param accessToken the access token + * @param orcid the orcid id + * @param object the orcid object to push + * @return the orcid response if no error occurs + * @throws OrcidClientException if some error occurs during the push + * @throws IllegalArgumentException if the given object is not an valid ORCID + * object + */ + OrcidResponse push(String accessToken, String orcid, Object object); + + /** + * Update the object with the given putCode. + * + * @param accessToken the access token + * @param orcid the orcid id + * @param object the orcid object to push + * @param putCode the put code of the resource to delete + * @return the orcid response if no error occurs + * @throws OrcidClientException if some error occurs during the push + * @throws IllegalArgumentException if the given object is not an valid ORCID + * object + */ + OrcidResponse update(String accessToken, String orcid, Object object, String putCode); + + /** + * Delete the ORCID object with the given putCode on the given path. + * + * @param accessToken the access token + * @param orcid the orcid id + * @param putCode the put code of the resource to delete + * @param path the path of the resource to delete + * @return the orcid response if no error occurs + * @throws OrcidClientException if some error occurs during the search + */ + OrcidResponse deleteByPutCode(String accessToken, String orcid, String putCode, String path); + +} diff --git a/dspace-api/src/main/java/org/dspace/orcid/client/OrcidClientImpl.java b/dspace-api/src/main/java/org/dspace/orcid/client/OrcidClientImpl.java new file mode 100644 index 0000000000..3e7ca7b210 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/orcid/client/OrcidClientImpl.java @@ -0,0 +1,394 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.orcid.client; + +import static org.apache.http.client.methods.RequestBuilder.delete; +import static org.apache.http.client.methods.RequestBuilder.get; +import static org.apache.http.client.methods.RequestBuilder.post; +import static org.apache.http.client.methods.RequestBuilder.put; + +import java.io.IOException; +import java.io.StringWriter; +import java.nio.charset.Charset; +import java.nio.charset.StandardCharsets; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Optional; +import javax.xml.bind.JAXBContext; +import javax.xml.bind.JAXBException; +import javax.xml.bind.Marshaller; +import javax.xml.bind.Unmarshaller; +import javax.xml.stream.XMLInputFactory; +import javax.xml.stream.XMLStreamReader; + +import com.fasterxml.jackson.databind.ObjectMapper; +import org.apache.commons.io.IOUtils; +import org.apache.http.Header; +import org.apache.http.HttpEntity; +import org.apache.http.HttpResponse; +import org.apache.http.HttpStatus; +import org.apache.http.NameValuePair; +import org.apache.http.client.HttpClient; +import org.apache.http.client.entity.UrlEncodedFormEntity; +import org.apache.http.client.methods.HttpUriRequest; +import org.apache.http.client.methods.RequestBuilder; +import org.apache.http.entity.StringEntity; +import org.apache.http.impl.client.HttpClientBuilder; +import org.apache.http.message.BasicNameValuePair; +import org.dspace.orcid.exception.OrcidClientException; +import org.dspace.orcid.model.OrcidEntityType; +import org.dspace.orcid.model.OrcidProfileSectionType; +import org.dspace.orcid.model.OrcidTokenResponseDTO; +import org.dspace.util.ThrowingSupplier; +import org.orcid.jaxb.model.v3.release.record.Address; +import org.orcid.jaxb.model.v3.release.record.Funding; +import org.orcid.jaxb.model.v3.release.record.Keyword; +import org.orcid.jaxb.model.v3.release.record.OtherName; +import org.orcid.jaxb.model.v3.release.record.Person; +import org.orcid.jaxb.model.v3.release.record.PersonExternalIdentifier; +import org.orcid.jaxb.model.v3.release.record.ResearcherUrl; +import org.orcid.jaxb.model.v3.release.record.Work; +import org.orcid.jaxb.model.v3.release.record.WorkBulk; +import org.orcid.jaxb.model.v3.release.record.summary.Works; + +/** + * Implementation of {@link OrcidClient}. + * + * @author Luca Giamminonni (luca.giamminonni at 4science.it) + * + */ +public class OrcidClientImpl implements OrcidClient { + + /** + * Mapping between ORCID JAXB models and the sub-paths on ORCID API. + */ + private static final Map, String> PATHS_MAP = initializePathsMap(); + + private final OrcidConfiguration orcidConfiguration; + + private final ObjectMapper objectMapper; + + public OrcidClientImpl(OrcidConfiguration orcidConfiguration) { + this.orcidConfiguration = orcidConfiguration; + this.objectMapper = new ObjectMapper(); + } + + private static Map, String> initializePathsMap() { + Map, String> map = new HashMap, String>(); + map.put(Work.class, OrcidEntityType.PUBLICATION.getPath()); + map.put(Funding.class, OrcidEntityType.FUNDING.getPath()); + map.put(Address.class, OrcidProfileSectionType.COUNTRY.getPath()); + map.put(OtherName.class, OrcidProfileSectionType.OTHER_NAMES.getPath()); + map.put(ResearcherUrl.class, OrcidProfileSectionType.RESEARCHER_URLS.getPath()); + map.put(PersonExternalIdentifier.class, OrcidProfileSectionType.EXTERNAL_IDS.getPath()); + map.put(Keyword.class, OrcidProfileSectionType.KEYWORDS.getPath()); + return map; + } + + @Override + public OrcidTokenResponseDTO getAccessToken(String code) { + + List params = new ArrayList(); + params.add(new BasicNameValuePair("code", code)); + params.add(new BasicNameValuePair("grant_type", "authorization_code")); + params.add(new BasicNameValuePair("client_id", orcidConfiguration.getClientId())); + params.add(new BasicNameValuePair("client_secret", orcidConfiguration.getClientSecret())); + + HttpUriRequest httpUriRequest = RequestBuilder.post(orcidConfiguration.getTokenEndpointUrl()) + .addHeader("Content-Type", "application/x-www-form-urlencoded") + .addHeader("Accept", "application/json") + .setEntity(new UrlEncodedFormEntity(params, Charset.defaultCharset())) + .build(); + + return executeAndParseJson(httpUriRequest, OrcidTokenResponseDTO.class); + + } + + @Override + public Person getPerson(String accessToken, String orcid) { + HttpUriRequest httpUriRequest = buildGetUriRequest(accessToken, "/" + orcid + "/person"); + return executeAndUnmarshall(httpUriRequest, false, Person.class); + } + + @Override + public Works getWorks(String accessToken, String orcid) { + HttpUriRequest httpUriRequest = buildGetUriRequest(accessToken, "/" + orcid + "/works"); + Works works = executeAndUnmarshall(httpUriRequest, true, Works.class); + return works != null ? works : new Works(); + } + + @Override + public Works getWorks(String orcid) { + HttpUriRequest httpUriRequest = buildGetUriRequestToPublicEndpoint("/" + orcid + "/works"); + Works works = executeAndUnmarshall(httpUriRequest, true, Works.class); + return works != null ? works : new Works(); + } + + @Override + public WorkBulk getWorkBulk(String accessToken, String orcid, List putCodes) { + String putCode = String.join(",", putCodes); + HttpUriRequest httpUriRequest = buildGetUriRequest(accessToken, "/" + orcid + "/works/" + putCode); + WorkBulk workBulk = executeAndUnmarshall(httpUriRequest, true, WorkBulk.class); + return workBulk != null ? workBulk : new WorkBulk(); + } + + @Override + public WorkBulk getWorkBulk(String orcid, List putCodes) { + String putCode = String.join(",", putCodes); + HttpUriRequest httpUriRequest = buildGetUriRequestToPublicEndpoint("/" + orcid + "/works/" + putCode); + WorkBulk workBulk = executeAndUnmarshall(httpUriRequest, true, WorkBulk.class); + return workBulk != null ? workBulk : new WorkBulk(); + } + + @Override + public Optional getObject(String accessToken, String orcid, String putCode, Class clazz) { + String path = getOrcidPathFromOrcidObjectType(clazz); + HttpUriRequest httpUriRequest = buildGetUriRequest(accessToken, "/" + orcid + path + "/" + putCode); + return Optional.ofNullable(executeAndUnmarshall(httpUriRequest, true, clazz)); + } + + @Override + public Optional getObject(String orcid, String putCode, Class clazz) { + String path = getOrcidPathFromOrcidObjectType(clazz); + HttpUriRequest httpUriRequest = buildGetUriRequestToPublicEndpoint("/" + orcid + path + "/" + putCode); + return Optional.ofNullable(executeAndUnmarshall(httpUriRequest, true, clazz)); + } + + @Override + public OrcidResponse push(String accessToken, String orcid, Object object) { + String path = getOrcidPathFromOrcidObjectType(object.getClass()); + return execute(buildPostUriRequest(accessToken, "/" + orcid + path, object), false); + } + + @Override + public OrcidResponse update(String accessToken, String orcid, Object object, String putCode) { + String path = getOrcidPathFromOrcidObjectType(object.getClass()); + return execute(buildPutUriRequest(accessToken, "/" + orcid + path + "/" + putCode, object), false); + } + + @Override + public OrcidResponse deleteByPutCode(String accessToken, String orcid, String putCode, String path) { + return execute(buildDeleteUriRequest(accessToken, "/" + orcid + path + "/" + putCode), true); + } + + @Override + public OrcidTokenResponseDTO getReadPublicAccessToken() { + return getClientCredentialsAccessToken("/read-public"); + } + + private OrcidTokenResponseDTO getClientCredentialsAccessToken(String scope) { + List params = new ArrayList(); + params.add(new BasicNameValuePair("scope", scope)); + params.add(new BasicNameValuePair("grant_type", "client_credentials")); + params.add(new BasicNameValuePair("client_id", orcidConfiguration.getClientId())); + params.add(new BasicNameValuePair("client_secret", orcidConfiguration.getClientSecret())); + + HttpUriRequest httpUriRequest = RequestBuilder.post(orcidConfiguration.getTokenEndpointUrl()) + .addHeader("Content-Type", "application/x-www-form-urlencoded") + .addHeader("Accept", "application/json") + .setEntity(new UrlEncodedFormEntity(params, Charset.defaultCharset())) + .build(); + + return executeAndParseJson(httpUriRequest, OrcidTokenResponseDTO.class); + } + + private HttpUriRequest buildGetUriRequest(String accessToken, String relativePath) { + return get(orcidConfiguration.getApiUrl() + relativePath.trim()) + .addHeader("Content-Type", "application/x-www-form-urlencoded") + .addHeader("Authorization", "Bearer " + accessToken) + .build(); + } + + private HttpUriRequest buildGetUriRequestToPublicEndpoint(String relativePath) { + return get(orcidConfiguration.getPublicUrl() + relativePath.trim()) + .addHeader("Content-Type", "application/x-www-form-urlencoded") + .build(); + } + + private HttpUriRequest buildPostUriRequest(String accessToken, String relativePath, Object object) { + return post(orcidConfiguration.getApiUrl() + relativePath.trim()) + .addHeader("Content-Type", "application/vnd.orcid+xml") + .addHeader("Authorization", "Bearer " + accessToken) + .setEntity(convertToEntity(object)) + .build(); + } + + private HttpUriRequest buildPutUriRequest(String accessToken, String relativePath, Object object) { + return put(orcidConfiguration.getApiUrl() + relativePath.trim()) + .addHeader("Content-Type", "application/vnd.orcid+xml") + .addHeader("Authorization", "Bearer " + accessToken) + .setEntity(convertToEntity(object)) + .build(); + } + + private HttpUriRequest buildDeleteUriRequest(String accessToken, String relativePath) { + return delete(orcidConfiguration.getApiUrl() + relativePath.trim()) + .addHeader("Authorization", "Bearer " + accessToken) + .build(); + } + + private T executeAndParseJson(HttpUriRequest httpUriRequest, Class clazz) { + + HttpClient client = HttpClientBuilder.create().build(); + + return executeAndReturns(() -> { + + HttpResponse response = client.execute(httpUriRequest); + + if (isNotSuccessfull(response)) { + throw new OrcidClientException(getStatusCode(response), formatErrorMessage(response)); + } + + return objectMapper.readValue(response.getEntity().getContent(), clazz); + + }); + + } + + /** + * Execute the given httpUriRequest, unmarshalling the content with the given + * class. + * @param httpUriRequest the http request to be executed + * @param handleNotFoundAsNull if true this method returns null if the response + * status is 404, if false throws an + * OrcidClientException + * @param clazz the class to be used for the content unmarshall + * @return the response body + * @throws OrcidClientException if the incoming response is not successfull + */ + private T executeAndUnmarshall(HttpUriRequest httpUriRequest, boolean handleNotFoundAsNull, Class clazz) { + + HttpClient client = HttpClientBuilder.create().build(); + + return executeAndReturns(() -> { + + HttpResponse response = client.execute(httpUriRequest); + + if (handleNotFoundAsNull && isNotFound(response)) { + return null; + } + + if (isNotSuccessfull(response)) { + throw new OrcidClientException(getStatusCode(response), formatErrorMessage(response)); + } + + return unmarshall(response.getEntity(), clazz); + + }); + } + + private OrcidResponse execute(HttpUriRequest httpUriRequest, boolean handleNotFoundAsNull) { + HttpClient client = HttpClientBuilder.create().build(); + + return executeAndReturns(() -> { + + HttpResponse response = client.execute(httpUriRequest); + + if (handleNotFoundAsNull && isNotFound(response)) { + return new OrcidResponse(getStatusCode(response), null, getContent(response)); + } + + if (isNotSuccessfull(response)) { + throw new OrcidClientException(getStatusCode(response), formatErrorMessage(response)); + } + + return new OrcidResponse(getStatusCode(response), getPutCode(response), getContent(response)); + + }); + } + + private T executeAndReturns(ThrowingSupplier supplier) { + try { + return supplier.get(); + } catch (OrcidClientException ex) { + throw ex; + } catch (Exception ex) { + throw new OrcidClientException(ex); + } + } + + private String marshall(Object object) throws JAXBException { + JAXBContext jaxbContext = JAXBContext.newInstance(object.getClass()); + Marshaller marshaller = jaxbContext.createMarshaller(); + marshaller.setProperty(Marshaller.JAXB_FORMATTED_OUTPUT, Boolean.TRUE); + StringWriter stringWriter = new StringWriter(); + marshaller.marshal(object, stringWriter); + return stringWriter.toString(); + } + + @SuppressWarnings("unchecked") + private T unmarshall(HttpEntity entity, Class clazz) throws Exception { + JAXBContext jaxbContext = JAXBContext.newInstance(clazz); + XMLInputFactory xmlInputFactory = XMLInputFactory.newFactory(); + xmlInputFactory.setProperty(XMLInputFactory.SUPPORT_DTD, false); + XMLStreamReader xmlStreamReader = xmlInputFactory.createXMLStreamReader(entity.getContent()); + Unmarshaller unmarshaller = jaxbContext.createUnmarshaller(); + return (T) unmarshaller.unmarshal(xmlStreamReader); + } + + private HttpEntity convertToEntity(Object object) { + try { + return new StringEntity(marshall(object), StandardCharsets.UTF_8); + } catch (JAXBException ex) { + throw new IllegalArgumentException("The given object cannot be sent to ORCID", ex); + } + } + + private String formatErrorMessage(HttpResponse response) { + try { + return IOUtils.toString(response.getEntity().getContent(), Charset.defaultCharset()); + } catch (UnsupportedOperationException | IOException e) { + return "Generic error"; + } + } + + private boolean isNotSuccessfull(HttpResponse response) { + int statusCode = getStatusCode(response); + return statusCode < 200 || statusCode > 299; + } + + private boolean isNotFound(HttpResponse response) { + return getStatusCode(response) == HttpStatus.SC_NOT_FOUND; + } + + private int getStatusCode(HttpResponse response) { + return response.getStatusLine().getStatusCode(); + } + + private String getOrcidPathFromOrcidObjectType(Class clazz) { + String path = PATHS_MAP.get(clazz); + if (path == null) { + throw new IllegalArgumentException("The given class is not an ORCID object's class: " + clazz); + } + return path; + } + + private String getContent(HttpResponse response) throws UnsupportedOperationException, IOException { + HttpEntity entity = response.getEntity(); + return entity != null ? IOUtils.toString(entity.getContent(), StandardCharsets.UTF_8.name()) : null; + } + + /** + * Returns the put code present in the given http response, if any. For more + * details about the put code see For more details see + * https://info.orcid.org/faq/what-is-a-put-code/ + * @param response the http response coming from ORCID + * @return the put code, if any + */ + private String getPutCode(HttpResponse response) { + Header[] headers = response.getHeaders("Location"); + if (headers.length == 0) { + return null; + } + String value = headers[0].getValue(); + return value.substring(value.lastIndexOf("/") + 1); + } + +} diff --git a/dspace-api/src/main/java/org/dspace/orcid/client/OrcidConfiguration.java b/dspace-api/src/main/java/org/dspace/orcid/client/OrcidConfiguration.java new file mode 100644 index 0000000000..550b0215c4 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/orcid/client/OrcidConfiguration.java @@ -0,0 +1,114 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.orcid.client; + +import org.apache.commons.lang3.StringUtils; + +/** + * A class that contains all the configurations related to ORCID. + * + * @author Luca Giamminonni (luca.giamminonni at 4science.it) + * + */ +public final class OrcidConfiguration { + + private String apiUrl; + + private String publicUrl; + + private String domainUrl; + + private String redirectUrl; + + private String clientId; + + private String clientSecret; + + private String tokenEndpointUrl; + + private String authorizeEndpointUrl; + + private String scopes; + + public String getApiUrl() { + return apiUrl; + } + + public void setApiUrl(String apiUrl) { + this.apiUrl = apiUrl; + } + + public String getDomainUrl() { + return domainUrl; + } + + public void setDomainUrl(String domainUrl) { + this.domainUrl = domainUrl; + } + + public String getRedirectUrl() { + return redirectUrl; + } + + public void setRedirectUrl(String redirectUrl) { + this.redirectUrl = redirectUrl; + } + + public String getClientId() { + return clientId; + } + + public void setClientId(String clientId) { + this.clientId = clientId; + } + + public String getClientSecret() { + return clientSecret; + } + + public void setClientSecret(String clientSecret) { + this.clientSecret = clientSecret; + } + + public String getTokenEndpointUrl() { + return tokenEndpointUrl; + } + + public void setTokenEndpointUrl(String tokenEndpointUrl) { + this.tokenEndpointUrl = tokenEndpointUrl; + } + + public String getAuthorizeEndpointUrl() { + return authorizeEndpointUrl; + } + + public void setAuthorizeEndpointUrl(String authorizeEndpointUrl) { + this.authorizeEndpointUrl = authorizeEndpointUrl; + } + + public void setScopes(String scopes) { + this.scopes = scopes; + } + + public String[] getScopes() { + return StringUtils.isNotBlank(scopes) ? StringUtils.split(scopes, ",") : new String[] {}; + } + + public String getPublicUrl() { + return publicUrl; + } + + public void setPublicUrl(String publicUrl) { + this.publicUrl = publicUrl; + } + + public boolean isApiConfigured() { + return !StringUtils.isAnyBlank(clientId, clientSecret); + } + +} diff --git a/dspace-api/src/main/java/org/dspace/orcid/client/OrcidResponse.java b/dspace-api/src/main/java/org/dspace/orcid/client/OrcidResponse.java new file mode 100644 index 0000000000..ef0050cf20 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/orcid/client/OrcidResponse.java @@ -0,0 +1,56 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.orcid.client; + +import org.apache.http.HttpStatus; + +/** + * Model a successfully response incoming from ORCID using {@link OrcidClient}. + * + * @author Luca Giamminonni (luca.giamminonni at 4science.it) + * + */ +public final class OrcidResponse { + + private final int status; + + private final String putCode; + + private final String content; + + /** + * Create an ORCID response instance with the specified HTTP status, putCode and + * content. + * + * @param status the HTTP status incoming from ORCID + * @param putCode the identifier of the resource ORCID side + * @param content the response body content + */ + public OrcidResponse(int status, String putCode, String content) { + this.status = status; + this.putCode = putCode; + this.content = content; + } + + public int getStatus() { + return status; + } + + public String getPutCode() { + return putCode; + } + + public String getContent() { + return content; + } + + public boolean isNotFoundStatus() { + return status == HttpStatus.SC_NOT_FOUND; + } + +} diff --git a/dspace-api/src/main/java/org/dspace/orcid/consumer/OrcidQueueConsumer.java b/dspace-api/src/main/java/org/dspace/orcid/consumer/OrcidQueueConsumer.java new file mode 100644 index 0000000000..d177e61607 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/orcid/consumer/OrcidQueueConsumer.java @@ -0,0 +1,358 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.orcid.consumer; + +import static java.util.Arrays.asList; +import static java.util.Comparator.comparing; +import static java.util.Comparator.naturalOrder; +import static java.util.Comparator.nullsFirst; +import static org.apache.commons.collections.CollectionUtils.isNotEmpty; + +import java.sql.SQLException; +import java.util.ArrayList; +import java.util.List; +import java.util.Optional; +import java.util.UUID; +import java.util.stream.Collectors; +import java.util.stream.Stream; + +import org.apache.commons.lang3.StringUtils; +import org.dspace.content.DSpaceObject; +import org.dspace.content.Item; +import org.dspace.content.MetadataFieldName; +import org.dspace.content.Relationship; +import org.dspace.content.factory.ContentServiceFactory; +import org.dspace.content.service.ItemService; +import org.dspace.content.service.RelationshipService; +import org.dspace.core.Context; +import org.dspace.event.Consumer; +import org.dspace.event.Event; +import org.dspace.orcid.OrcidHistory; +import org.dspace.orcid.OrcidOperation; +import org.dspace.orcid.factory.OrcidServiceFactory; +import org.dspace.orcid.model.OrcidEntityType; +import org.dspace.orcid.model.factory.OrcidProfileSectionFactory; +import org.dspace.orcid.service.OrcidHistoryService; +import org.dspace.orcid.service.OrcidProfileSectionFactoryService; +import org.dspace.orcid.service.OrcidQueueService; +import org.dspace.orcid.service.OrcidSynchronizationService; +import org.dspace.orcid.service.OrcidTokenService; +import org.dspace.profile.OrcidProfileSyncPreference; +import org.dspace.services.ConfigurationService; +import org.dspace.services.factory.DSpaceServicesFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** + * The consumer to fill the ORCID queue. The addition to the queue is made for + * all archived items that meet one of these conditions: + *
      + *
    • are profiles already linked to orcid that have some modified sections to + * be synchronized (based on the preferences set by the user)
    • + *
    • are publications/fundings related to profile items linked to orcid (based + * on the preferences set by the user)
    • + * + *
    + * + * @author Luca Giamminonni (luca.giamminonni at 4science.it) + * + */ +public class OrcidQueueConsumer implements Consumer { + + private static final Logger LOGGER = LoggerFactory.getLogger(OrcidQueueConsumer.class); + + private OrcidQueueService orcidQueueService; + + private OrcidHistoryService orcidHistoryService; + + private OrcidTokenService orcidTokenService; + + private OrcidSynchronizationService orcidSynchronizationService; + + private ItemService itemService; + + private OrcidProfileSectionFactoryService profileSectionFactoryService; + + private ConfigurationService configurationService; + + private RelationshipService relationshipService; + + private List alreadyConsumedItems = new ArrayList<>(); + + @Override + public void initialize() throws Exception { + + OrcidServiceFactory orcidServiceFactory = OrcidServiceFactory.getInstance(); + + this.orcidQueueService = orcidServiceFactory.getOrcidQueueService(); + this.orcidHistoryService = orcidServiceFactory.getOrcidHistoryService(); + this.orcidSynchronizationService = orcidServiceFactory.getOrcidSynchronizationService(); + this.orcidTokenService = orcidServiceFactory.getOrcidTokenService(); + this.profileSectionFactoryService = orcidServiceFactory.getOrcidProfileSectionFactoryService(); + this.configurationService = DSpaceServicesFactory.getInstance().getConfigurationService(); + this.relationshipService = ContentServiceFactory.getInstance().getRelationshipService(); + + this.itemService = ContentServiceFactory.getInstance().getItemService(); + } + + @Override + public void consume(Context context, Event event) throws Exception { + + if (isOrcidSynchronizationDisabled()) { + return; + } + + DSpaceObject dso = event.getSubject(context); + if (!(dso instanceof Item)) { + return; + } + + Item item = (Item) dso; + if (!item.isArchived()) { + return; + } + + if (alreadyConsumedItems.contains(item.getID())) { + return; + } + + context.turnOffAuthorisationSystem(); + try { + consumeItem(context, item); + } finally { + context.restoreAuthSystemState(); + } + + } + + /** + * Consume the item if it is a profile or an ORCID entity. + */ + private void consumeItem(Context context, Item item) throws SQLException { + + String entityType = itemService.getEntityTypeLabel(item); + if (entityType == null) { + return; + } + + if (OrcidEntityType.isValidEntityType(entityType)) { + consumeEntity(context, item); + } else if (entityType.equals(getProfileType())) { + consumeProfile(context, item); + } + + alreadyConsumedItems.add(item.getID()); + + } + + /** + * Search for all related items to the given entity and create a new ORCID queue + * record if one of this is a profile linked with ORCID and the entity item must + * be synchronized with ORCID. + */ + private void consumeEntity(Context context, Item entity) throws SQLException { + + List relatedItems = findAllRelatedItems(context, entity); + + for (Item relatedItem : relatedItems) { + + if (isNotProfileItem(relatedItem) || isNotLinkedToOrcid(context, relatedItem)) { + continue; + } + + if (shouldNotBeSynchronized(relatedItem, entity) || isAlreadyQueued(context, relatedItem, entity)) { + continue; + } + + orcidQueueService.create(context, relatedItem, entity); + + } + + } + + private List findAllRelatedItems(Context context, Item entity) throws SQLException { + return relationshipService.findByItem(context, entity).stream() + .map(relationship -> getRelatedItem(entity, relationship)) + .collect(Collectors.toList()); + } + + private Item getRelatedItem(Item item, Relationship relationship) { + return item.equals(relationship.getLeftItem()) ? relationship.getRightItem() : relationship.getLeftItem(); + } + + /** + * If the given profile item is linked with ORCID recalculate all the ORCID + * queue records of the configured profile sections that can be synchronized. + */ + private void consumeProfile(Context context, Item item) throws SQLException { + + if (isNotLinkedToOrcid(context, item)) { + return; + } + + for (OrcidProfileSectionFactory factory : getAllProfileSectionFactories(item)) { + + String sectionType = factory.getProfileSectionType().name(); + + orcidQueueService.deleteByEntityAndRecordType(context, item, sectionType); + + if (isProfileSectionSynchronizationDisabled(context, item, factory)) { + continue; + } + + List signatures = factory.getMetadataSignatures(context, item); + List historyRecords = findSuccessfullyOrcidHistoryRecords(context, item, sectionType); + + createInsertionRecordForNewSignatures(context, item, historyRecords, factory, signatures); + createDeletionRecordForNoMorePresentSignatures(context, item, historyRecords, factory, signatures); + + } + + } + + private boolean isProfileSectionSynchronizationDisabled(Context context, + Item item, OrcidProfileSectionFactory factory) { + List preferences = this.orcidSynchronizationService.getProfilePreferences(item); + return !preferences.contains(factory.getSynchronizationPreference()); + } + + /** + * Add new INSERTION record in the ORCID queue based on the metadata signatures + * calculated from the current item state. + */ + private void createInsertionRecordForNewSignatures(Context context, Item item, List historyRecords, + OrcidProfileSectionFactory factory, List signatures) throws SQLException { + + String sectionType = factory.getProfileSectionType().name(); + + for (String signature : signatures) { + + if (isNotAlreadySynchronized(historyRecords, signature)) { + String description = factory.getDescription(context, item, signature); + orcidQueueService.createProfileInsertionRecord(context, item, description, sectionType, signature); + } + + } + + } + + /** + * Add new DELETION records in the ORCID queue for metadata signature presents + * in the ORCID history no more present in the metadata signatures calculated + * from the current item state. + */ + private void createDeletionRecordForNoMorePresentSignatures(Context context, Item profile, + List historyRecords, OrcidProfileSectionFactory factory, List signatures) + throws SQLException { + + String sectionType = factory.getProfileSectionType().name(); + + for (OrcidHistory historyRecord : historyRecords) { + String storedSignature = historyRecord.getMetadata(); + String putCode = historyRecord.getPutCode(); + String description = historyRecord.getDescription(); + + if (signatures.contains(storedSignature) || isAlreadyDeleted(historyRecords, historyRecord)) { + continue; + } + + if (StringUtils.isBlank(putCode)) { + LOGGER.warn("The orcid history record with id {} should have a not blank put code", + historyRecord.getID()); + continue; + } + + orcidQueueService.createProfileDeletionRecord(context, profile, description, + sectionType, storedSignature, putCode); + } + + } + + private List findSuccessfullyOrcidHistoryRecords(Context context, Item item, + String sectionType) throws SQLException { + return orcidHistoryService.findSuccessfullyRecordsByEntityAndType(context, item, sectionType); + } + + private boolean isNotAlreadySynchronized(List records, String signature) { + return getLastOperation(records, signature) + .map(operation -> operation == OrcidOperation.DELETE) + .orElse(Boolean.TRUE); + } + + private boolean isAlreadyDeleted(List records, OrcidHistory historyRecord) { + + if (historyRecord.getOperation() == OrcidOperation.DELETE) { + return true; + } + + return findDeletedHistoryRecordsBySignature(records, historyRecord.getMetadata()) + .anyMatch(record -> record.getTimestamp().after(historyRecord.getTimestamp())); + } + + private Stream findDeletedHistoryRecordsBySignature(List records, String signature) { + return records.stream() + .filter(record -> signature.equals(record.getMetadata())) + .filter(record -> record.getOperation() == OrcidOperation.DELETE); + } + + private Optional getLastOperation(List records, String signature) { + return records.stream() + .filter(record -> signature.equals(record.getMetadata())) + .sorted(comparing(OrcidHistory::getTimestamp, nullsFirst(naturalOrder())).reversed()) + .map(OrcidHistory::getOperation) + .findFirst(); + } + + private boolean isAlreadyQueued(Context context, Item profileItem, Item entity) throws SQLException { + return isNotEmpty(orcidQueueService.findByProfileItemAndEntity(context, profileItem, entity)); + } + + private boolean isNotLinkedToOrcid(Context context, Item profileItemItem) { + return hasNotOrcidAccessToken(context, profileItemItem) + || getMetadataValue(profileItemItem, "person.identifier.orcid") == null; + } + + private boolean hasNotOrcidAccessToken(Context context, Item profileItemItem) { + return orcidTokenService.findByProfileItem(context, profileItemItem) == null; + } + + private boolean shouldNotBeSynchronized(Item profileItem, Item entity) { + return !orcidSynchronizationService.isSynchronizationAllowed(profileItem, entity); + } + + private boolean isNotProfileItem(Item profileItemItem) { + return !getProfileType().equals(itemService.getEntityTypeLabel(profileItemItem)); + } + + private String getMetadataValue(Item item, String metadataField) { + return itemService.getMetadataFirstValue(item, new MetadataFieldName(metadataField), Item.ANY); + } + + private List getAllProfileSectionFactories(Item item) { + return this.profileSectionFactoryService.findByPreferences(asList(OrcidProfileSyncPreference.values())); + } + + private String getProfileType() { + return configurationService.getProperty("researcher-profile.entity-type", "Person"); + } + + private boolean isOrcidSynchronizationDisabled() { + return !configurationService.getBooleanProperty("orcid.synchronization-enabled", true); + } + + @Override + public void end(Context context) throws Exception { + alreadyConsumedItems.clear(); + } + + @Override + public void finish(Context context) throws Exception { + // nothing to do + } + +} diff --git a/dspace-api/src/main/java/org/dspace/orcid/dao/OrcidHistoryDAO.java b/dspace-api/src/main/java/org/dspace/orcid/dao/OrcidHistoryDAO.java new file mode 100644 index 0000000000..9e82f3c51d --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/orcid/dao/OrcidHistoryDAO.java @@ -0,0 +1,76 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.orcid.dao; + +import java.sql.SQLException; +import java.util.List; +import java.util.UUID; + +import org.dspace.content.Item; +import org.dspace.core.Context; +import org.dspace.core.GenericDAO; +import org.dspace.orcid.OrcidHistory; + +/** + * Database Access Object interface class for the OrcidHistory object. The + * implementation of this class is responsible for all database calls for the + * OrcidHistory object and is autowired by spring. This class should only be + * accessed from a single service and should never be exposed outside of the API + * + * @author Luca Giamminonni (luca.giamminonni at 4science.it) + * + */ +public interface OrcidHistoryDAO extends GenericDAO { + + /** + * Find all the ORCID history records by the given profileItem and entity uuids. + * + * @param context the DSpace context + * @param profileItemId the profileItem item uuid + * @param entityId the entity item uuid + * @return the records list + * @throws SQLException if an SQL error occurs + */ + List findByProfileItemAndEntity(Context context, UUID profileItemId, UUID entityId) + throws SQLException; + + /** + * Get the OrcidHistory records where the given item is the profileItem or the + * entity + * + * @param context DSpace context object + * @param item the item to search for + * @return the found OrcidHistory entities + * @throws SQLException if database error + */ + public List findByProfileItemOrEntity(Context context, Item item) throws SQLException; + + /** + * Find the OrcidHistory records related to the given entity item. + * + * @param context DSpace context object + * @param entity the entity item + * @return the found put codes + * @throws SQLException if database error + */ + List findByEntity(Context context, Item entity) throws SQLException; + + /** + * Find all the successfully Orcid history records with the given record type + * related to the given entity. An history record is considered successful if + * the status is between 200 and 300. + * + * @param context DSpace context object + * @param entity the entity item + * @param recordType the record type + * @return the found orcid history records + * @throws SQLException if database error + */ + List findSuccessfullyRecordsByEntityAndType(Context context, Item entity, + String recordType) throws SQLException; +} diff --git a/dspace-api/src/main/java/org/dspace/orcid/dao/OrcidQueueDAO.java b/dspace-api/src/main/java/org/dspace/orcid/dao/OrcidQueueDAO.java new file mode 100644 index 0000000000..235443b150 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/orcid/dao/OrcidQueueDAO.java @@ -0,0 +1,107 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.orcid.dao; + +import java.sql.SQLException; +import java.util.List; +import java.util.UUID; + +import org.dspace.content.Item; +import org.dspace.core.Context; +import org.dspace.core.GenericDAO; +import org.dspace.orcid.OrcidQueue; + +/** + * Database Access Object interface class for the OrcidQueue object. The + * implementation of this class is responsible for all database calls for the + * OrcidQueue object and is autowired by spring. This class should only be + * accessed from a single service and should never be exposed outside of the API + * + * @author Luca Giamminonni (luca.giamminonni at 4science.it) + * + */ +public interface OrcidQueueDAO extends GenericDAO { + + /** + * Get the orcid queue records by the profileItem id. + * + * @param context DSpace context object + * @param profileItemId the profileItem item id + * @param limit limit + * @param offset offset + * @return the orcid queue records + * @throws SQLException if an SQL error occurs + */ + public List findByProfileItemId(Context context, UUID profileItemId, Integer limit, Integer offset) + throws SQLException; + + /** + * Count the orcid queue records with the same profileItemId. + * + * @param context DSpace context object + * @param profileItemId the profileItem item id + * @return the count result + * @throws SQLException if an SQL error occurs + */ + long countByProfileItemId(Context context, UUID profileItemId) throws SQLException; + + /** + * Returns all the orcid queue records with the given profileItem and entity + * items. + * + * @param context DSpace context object + * @param profileItem the profileItem item + * @param entity the entity item + * @return the found orcid queue records + * @throws SQLException + */ + public List findByProfileItemAndEntity(Context context, Item profileItem, Item entity) + throws SQLException; + + /** + * Get the OrcidQueue records where the given item is the profileItem OR the + * entity + * + * @param context DSpace context object + * @param item the item to search for + * @return the found OrcidHistory entities + * @throws SQLException if database error + */ + public List findByProfileItemOrEntity(Context context, Item item) throws SQLException; + + /** + * Find all the OrcidQueue records with the given entity and record type. + * + * @param context DSpace context object + * @param entity the entity item + * @param type the record type + * @throws SQLException if database error occurs + */ + public List findByEntityAndRecordType(Context context, Item entity, String type) throws SQLException; + + /** + * Find all the OrcidQueue records with the given profileItem and record type. + * + * @param context DSpace context object + * @param profileItem the profileItem item + * @param type the record type + * @throws SQLException if database error occurs + */ + public List findByProfileItemAndRecordType(Context context, Item profileItem, String type) + throws SQLException; + + /** + * Get all the OrcidQueue records with attempts less than the given attempts. + * + * @param context DSpace context object + * @param attempts the maximum value of attempts + * @return the found OrcidQueue records + * @throws SQLException if database error + */ + public List findByAttemptsLessThan(Context context, int attempts) throws SQLException; +} diff --git a/dspace-api/src/main/java/org/dspace/orcid/dao/OrcidTokenDAO.java b/dspace-api/src/main/java/org/dspace/orcid/dao/OrcidTokenDAO.java new file mode 100644 index 0000000000..00ec3dd274 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/orcid/dao/OrcidTokenDAO.java @@ -0,0 +1,45 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.orcid.dao; + +import org.dspace.content.Item; +import org.dspace.core.Context; +import org.dspace.core.GenericDAO; +import org.dspace.eperson.EPerson; +import org.dspace.orcid.OrcidToken; + +/** + * Database Access Object interface class for the OrcidToken object. The + * implementation of this class is responsible for all database calls for the + * OrcidToken object and is autowired by spring. This class should only be + * accessed from a single service and should never be exposed outside of the API + * + * @author Luca Giamminonni (luca.giamminonni at 4science.it) + * + */ +public interface OrcidTokenDAO extends GenericDAO { + + /** + * Find an OrcidToken by ePerson. + * + * @param context the DSpace context + * @param ePerson the ePerson to search for + * @return the Orcid token, if any + */ + public OrcidToken findByEPerson(Context context, EPerson ePerson); + + /** + * Find an OrcidToken by profileItem. + * + * @param context the DSpace context + * @param profileItem the profile item to search for + * @return the Orcid token, if any + */ + public OrcidToken findByProfileItem(Context context, Item profileItem); + +} diff --git a/dspace-api/src/main/java/org/dspace/orcid/dao/impl/OrcidHistoryDAOImpl.java b/dspace-api/src/main/java/org/dspace/orcid/dao/impl/OrcidHistoryDAOImpl.java new file mode 100644 index 0000000000..0b2c7099ff --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/orcid/dao/impl/OrcidHistoryDAOImpl.java @@ -0,0 +1,64 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.orcid.dao.impl; + +import java.sql.SQLException; +import java.util.List; +import java.util.UUID; +import javax.persistence.Query; + +import org.dspace.content.Item; +import org.dspace.core.AbstractHibernateDAO; +import org.dspace.core.Context; +import org.dspace.orcid.OrcidHistory; +import org.dspace.orcid.dao.OrcidHistoryDAO; + +/** + * Implementation of {@link OrcidHistoryDAO}. + * + * @author Luca Giamminonni (luca.giamminonni at 4science.it) + * + */ +@SuppressWarnings("unchecked") +public class OrcidHistoryDAOImpl extends AbstractHibernateDAO implements OrcidHistoryDAO { + + @Override + public List findByProfileItemAndEntity(Context context, UUID profileItemId, UUID entityId) + throws SQLException { + Query query = createQuery(context, + "FROM OrcidHistory WHERE profileItem.id = :profileItemId AND entity.id = :entityId "); + query.setParameter("profileItemId", profileItemId); + query.setParameter("entityId", entityId); + return query.getResultList(); + } + + @Override + public List findByProfileItemOrEntity(Context context, Item item) throws SQLException { + Query query = createQuery(context, "FROM OrcidHistory WHERE profileItem.id = :itemId OR entity.id = :itemId"); + query.setParameter("itemId", item.getID()); + return query.getResultList(); + } + + @Override + public List findByEntity(Context context, Item entity) throws SQLException { + Query query = createQuery(context, "FROM OrcidHistory WHERE entity.id = :entityId "); + query.setParameter("entityId", entity.getID()); + return query.getResultList(); + } + + @Override + public List findSuccessfullyRecordsByEntityAndType(Context context, Item entity, + String recordType) throws SQLException { + Query query = createQuery(context, "FROM OrcidHistory WHERE entity = :entity AND recordType = :type " + + "AND status BETWEEN 200 AND 300"); + query.setParameter("entity", entity); + query.setParameter("type", recordType); + return query.getResultList(); + } + +} diff --git a/dspace-api/src/main/java/org/dspace/orcid/dao/impl/OrcidQueueDAOImpl.java b/dspace-api/src/main/java/org/dspace/orcid/dao/impl/OrcidQueueDAOImpl.java new file mode 100644 index 0000000000..2114b25357 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/orcid/dao/impl/OrcidQueueDAOImpl.java @@ -0,0 +1,90 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.orcid.dao.impl; + +import java.sql.SQLException; +import java.util.List; +import java.util.UUID; +import javax.persistence.Query; + +import org.dspace.content.Item; +import org.dspace.core.AbstractHibernateDAO; +import org.dspace.core.Context; +import org.dspace.orcid.OrcidQueue; +import org.dspace.orcid.dao.OrcidQueueDAO; + +/** + * Implementation of {@link OrcidQueueDAO}. + * + * @author Luca Giamminonni (luca.giamminonni at 4science.it) + * + */ +@SuppressWarnings("unchecked") +public class OrcidQueueDAOImpl extends AbstractHibernateDAO implements OrcidQueueDAO { + + @Override + public List findByProfileItemId(Context context, UUID profileItemId, Integer limit, Integer offset) + throws SQLException { + Query query = createQuery(context, "FROM OrcidQueue WHERE profileItem.id= :profileItemId"); + query.setParameter("profileItemId", profileItemId); + if (limit != null && limit.intValue() > 0) { + query.setMaxResults(limit); + } + query.setFirstResult(offset); + return query.getResultList(); + } + + @Override + public List findByProfileItemAndEntity(Context context, Item profileItem, Item entity) + throws SQLException { + Query query = createQuery(context, "FROM OrcidQueue WHERE profileItem = :profileItem AND entity = :entity"); + query.setParameter("profileItem", profileItem); + query.setParameter("entity", entity); + return query.getResultList(); + } + + @Override + public long countByProfileItemId(Context context, UUID profileItemId) throws SQLException { + Query query = createQuery(context, + "SELECT COUNT(queue) FROM OrcidQueue queue WHERE profileItem.id= :profileItemId"); + query.setParameter("profileItemId", profileItemId); + return (long) query.getSingleResult(); + } + + @Override + public List findByProfileItemOrEntity(Context context, Item item) throws SQLException { + Query query = createQuery(context, "FROM OrcidQueue WHERE profileItem.id= :itemId OR entity.id = :itemId"); + query.setParameter("itemId", item.getID()); + return query.getResultList(); + } + + @Override + public List findByEntityAndRecordType(Context context, Item entity, String type) throws SQLException { + Query query = createQuery(context, "FROM OrcidQueue WHERE entity = :entity AND recordType = :type"); + query.setParameter("entity", entity); + query.setParameter("type", type); + return query.getResultList(); + } + + @Override + public List findByProfileItemAndRecordType(Context context, Item profileItem, String type) + throws SQLException { + Query query = createQuery(context, "FROM OrcidQueue WHERE profileItem = :profileItem AND recordType = :type"); + query.setParameter("profileItem", profileItem); + query.setParameter("type", type); + return query.getResultList(); + } + + @Override + public List findByAttemptsLessThan(Context context, int attempts) throws SQLException { + Query query = createQuery(context, "FROM OrcidQueue WHERE attempts IS NULL OR attempts < :attempts"); + query.setParameter("attempts", attempts); + return query.getResultList(); + } + +} diff --git a/dspace-api/src/main/java/org/dspace/orcid/dao/impl/OrcidTokenDAOImpl.java b/dspace-api/src/main/java/org/dspace/orcid/dao/impl/OrcidTokenDAOImpl.java new file mode 100644 index 0000000000..01b03fc354 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/orcid/dao/impl/OrcidTokenDAOImpl.java @@ -0,0 +1,50 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.orcid.dao.impl; + +import java.sql.SQLException; +import javax.persistence.Query; + +import org.dspace.content.Item; +import org.dspace.core.AbstractHibernateDAO; +import org.dspace.core.Context; +import org.dspace.eperson.EPerson; +import org.dspace.orcid.OrcidToken; +import org.dspace.orcid.dao.OrcidTokenDAO; + +/** + * Implementation of {@link OrcidTokenDAO}. + * + * @author Luca Giamminonni (luca.giamminonni at 4science.it) + * + */ +public class OrcidTokenDAOImpl extends AbstractHibernateDAO implements OrcidTokenDAO { + + @Override + public OrcidToken findByEPerson(Context context, EPerson ePerson) { + try { + Query query = createQuery(context, "FROM OrcidToken WHERE ePerson = :ePerson"); + query.setParameter("ePerson", ePerson); + return singleResult(query); + } catch (SQLException e) { + throw new RuntimeException(e); + } + } + + @Override + public OrcidToken findByProfileItem(Context context, Item profileItem) { + try { + Query query = createQuery(context, "FROM OrcidToken WHERE profileItem = :profileItem"); + query.setParameter("profileItem", profileItem); + return singleResult(query); + } catch (SQLException e) { + throw new RuntimeException(e); + } + } + +} diff --git a/dspace-api/src/main/java/org/dspace/orcid/exception/OrcidClientException.java b/dspace-api/src/main/java/org/dspace/orcid/exception/OrcidClientException.java new file mode 100644 index 0000000000..9e78ef07b0 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/orcid/exception/OrcidClientException.java @@ -0,0 +1,48 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.orcid.exception; + +/** + * Exception throwable from class that implements {@link OrcidClient} in case of + * error response from the ORCID registry. + * + * @author Luca Giamminonni (luca.giamminonni at 4science.it) + * + */ +public class OrcidClientException extends RuntimeException { + + public static final String INVALID_GRANT_MESSAGE = "invalid_grant"; + + private static final long serialVersionUID = -7618061110212398216L; + + private int status = 0; + + public OrcidClientException(int status, String content) { + super(content); + this.status = status; + } + + public OrcidClientException(Throwable cause) { + super(cause); + } + + public int getStatus() { + return this.status; + } + + /** + * Returns true if the exception is related to an invalid grant error + * (authentication code non valid), false otherwise + * + * @return the check result + */ + public boolean isInvalidGrantException() { + return getMessage() != null && getMessage().contains(INVALID_GRANT_MESSAGE); + } + +} diff --git a/dspace-api/src/main/java/org/dspace/orcid/exception/OrcidValidationException.java b/dspace-api/src/main/java/org/dspace/orcid/exception/OrcidValidationException.java new file mode 100644 index 0000000000..bb35789ab9 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/orcid/exception/OrcidValidationException.java @@ -0,0 +1,52 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.orcid.exception; + +import java.util.List; +import java.util.stream.Collectors; + +import org.dspace.orcid.model.validator.OrcidValidationError; + +/** + * A Runtime exception that occurs when an ORCID object that must be send to + * ORCID is not valid. + * + * @author Luca Giamminonni (luca.giamminonni at 4science.it) + * + */ +public class OrcidValidationException extends RuntimeException { + + private static final long serialVersionUID = 3377335341871311369L; + + private final List errors; + + public OrcidValidationException(OrcidValidationError error) { + this(List.of(error)); + } + + public OrcidValidationException(List errors) { + super("Errors occurs during ORCID object validation"); + this.errors = errors; + } + + public List getErrors() { + return errors; + } + + @Override + public String getMessage() { + return super.getMessage() + ". Error codes: " + formatErrors(); + } + + private String formatErrors() { + return errors.stream() + .map(error -> error.getCode()) + .collect(Collectors.joining(",")); + } + +} diff --git a/dspace-api/src/main/java/org/dspace/orcid/factory/OrcidServiceFactory.java b/dspace-api/src/main/java/org/dspace/orcid/factory/OrcidServiceFactory.java new file mode 100644 index 0000000000..09f43229d6 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/orcid/factory/OrcidServiceFactory.java @@ -0,0 +1,54 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.orcid.factory; + +import org.dspace.orcid.client.OrcidClient; +import org.dspace.orcid.client.OrcidConfiguration; +import org.dspace.orcid.service.MetadataSignatureGenerator; +import org.dspace.orcid.service.OrcidEntityFactoryService; +import org.dspace.orcid.service.OrcidHistoryService; +import org.dspace.orcid.service.OrcidProfileSectionFactoryService; +import org.dspace.orcid.service.OrcidQueueService; +import org.dspace.orcid.service.OrcidSynchronizationService; +import org.dspace.orcid.service.OrcidTokenService; +import org.dspace.services.factory.DSpaceServicesFactory; + +/** + * Abstract factory to get services for the orcid package, use + * OrcidHistoryServiceFactory.getInstance() to retrieve an implementation. + * + * @author Luca Giamminonni (luca.giamminonni at 4Science.it) + * + */ +public abstract class OrcidServiceFactory { + + public abstract OrcidHistoryService getOrcidHistoryService(); + + public abstract OrcidQueueService getOrcidQueueService(); + + public abstract OrcidSynchronizationService getOrcidSynchronizationService(); + + public abstract OrcidTokenService getOrcidTokenService(); + + public abstract OrcidProfileSectionFactoryService getOrcidProfileSectionFactoryService(); + + public abstract MetadataSignatureGenerator getMetadataSignatureGenerator(); + + public abstract OrcidEntityFactoryService getOrcidEntityFactoryService(); + + public abstract OrcidClient getOrcidClient(); + + public abstract OrcidConfiguration getOrcidConfiguration(); + + public static OrcidServiceFactory getInstance() { + return DSpaceServicesFactory.getInstance().getServiceManager().getServiceByName( + "orcidServiceFactory", OrcidServiceFactory.class); + } + + +} diff --git a/dspace-api/src/main/java/org/dspace/orcid/factory/OrcidServiceFactoryImpl.java b/dspace-api/src/main/java/org/dspace/orcid/factory/OrcidServiceFactoryImpl.java new file mode 100644 index 0000000000..78972eba85 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/orcid/factory/OrcidServiceFactoryImpl.java @@ -0,0 +1,105 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.orcid.factory; + +import org.dspace.orcid.client.OrcidClient; +import org.dspace.orcid.client.OrcidConfiguration; +import org.dspace.orcid.service.MetadataSignatureGenerator; +import org.dspace.orcid.service.OrcidEntityFactoryService; +import org.dspace.orcid.service.OrcidHistoryService; +import org.dspace.orcid.service.OrcidProfileSectionFactoryService; +import org.dspace.orcid.service.OrcidQueueService; +import org.dspace.orcid.service.OrcidSynchronizationService; +import org.dspace.orcid.service.OrcidTokenService; +import org.springframework.beans.factory.annotation.Autowired; + +/** + * Implementation of {@link OrcidServiceFactory}. + * + * @author Luca Giamminonni (luca.giamminonni at 4Science.it) + * + */ +public class OrcidServiceFactoryImpl extends OrcidServiceFactory { + + @Autowired + private OrcidHistoryService orcidHistoryService; + + @Autowired + private OrcidSynchronizationService orcidSynchronizationService; + + @Autowired + private OrcidQueueService orcidQueueService; + + @Autowired + private OrcidProfileSectionFactoryService orcidProfileSectionFactoryService; + + @Autowired + private OrcidEntityFactoryService orcidEntityFactoryService; + + @Autowired + private MetadataSignatureGenerator metadataSignatureGenerator; + + @Autowired + private OrcidClient orcidClient; + + @Autowired + private OrcidConfiguration orcidConfiguration; + + @Autowired + private OrcidTokenService orcidTokenService; + + @Override + public OrcidHistoryService getOrcidHistoryService() { + return orcidHistoryService; + } + + @Override + public OrcidQueueService getOrcidQueueService() { + return orcidQueueService; + } + + @Override + public OrcidSynchronizationService getOrcidSynchronizationService() { + return orcidSynchronizationService; + } + + @Override + public OrcidProfileSectionFactoryService getOrcidProfileSectionFactoryService() { + return orcidProfileSectionFactoryService; + } + + @Override + public MetadataSignatureGenerator getMetadataSignatureGenerator() { + return metadataSignatureGenerator; + } + + @Override + public OrcidEntityFactoryService getOrcidEntityFactoryService() { + return orcidEntityFactoryService; + } + + @Override + public OrcidTokenService getOrcidTokenService() { + return orcidTokenService; + } + + @Override + public OrcidClient getOrcidClient() { + return orcidClient; + } + + @Override + public OrcidConfiguration getOrcidConfiguration() { + return orcidConfiguration; + } + + public void setOrcidClient(OrcidClient orcidClient) { + this.orcidClient = orcidClient; + } + +} diff --git a/dspace-api/src/main/java/org/dspace/orcid/model/OrcidEntityType.java b/dspace-api/src/main/java/org/dspace/orcid/model/OrcidEntityType.java new file mode 100644 index 0000000000..6b32818f76 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/orcid/model/OrcidEntityType.java @@ -0,0 +1,75 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.orcid.model; + +import java.util.Arrays; + +/** + * The types of activities defined on ORCID that can be synchronized. + * + * @author Luca Giamminonni (luca.giamminonni at 4science.it) + * + */ +public enum OrcidEntityType { + + /** + * The ORCID publication/work activity. + */ + PUBLICATION("Publication", "/work"), + + /** + * The ORCID funding activity. + */ + FUNDING("Project", "/funding"); + + /** + * The DSpace entity type. + */ + private final String entityType; + + /** + * The subpath of the activity on ORCID API. + */ + private final String path; + + private OrcidEntityType(String entityType, String path) { + this.entityType = entityType; + this.path = path; + } + + public String getEntityType() { + return entityType; + } + + public String getPath() { + return path; + } + + /** + * Check if the given DSpace entity type is valid. + * @param entityType the entity type to check + * @return true if valid, false otherwise + */ + public static boolean isValidEntityType(String entityType) { + return Arrays.stream(OrcidEntityType.values()) + .anyMatch(orcidEntityType -> orcidEntityType.getEntityType().equalsIgnoreCase(entityType)); + } + + /** + * Returns an ORCID entity type from a DSpace entity type. + * + * @param entityType the DSpace entity type to search for + * @return the ORCID entity type, if any + */ + public static OrcidEntityType fromEntityType(String entityType) { + return Arrays.stream(OrcidEntityType.values()) + .filter(orcidEntityType -> orcidEntityType.getEntityType().equalsIgnoreCase(entityType)) + .findFirst() + .orElse(null); + } +} diff --git a/dspace-api/src/main/java/org/dspace/orcid/model/OrcidFundingFieldMapping.java b/dspace-api/src/main/java/org/dspace/orcid/model/OrcidFundingFieldMapping.java new file mode 100644 index 0000000000..1a8333058a --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/orcid/model/OrcidFundingFieldMapping.java @@ -0,0 +1,209 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.orcid.model; + +import static java.util.function.Function.identity; +import static java.util.stream.Collectors.toMap; +import static org.dspace.orcid.model.factory.OrcidFactoryUtils.parseConfigurations; + +import java.util.Arrays; +import java.util.List; +import java.util.Map; +import java.util.stream.Collectors; + +import org.dspace.orcid.model.factory.OrcidFactoryUtils; +import org.dspace.util.SimpleMapConverter; +import org.orcid.jaxb.model.common.FundingContributorRole; + +/** + * Class that contains all the mapping between {@link Funding} and DSpace + * metadata fields. + * + * @author Luca Giamminonni (luca.giamminonni at 4science.it) + * + */ +public class OrcidFundingFieldMapping { + + /** + * The metadata fields related to the funding contributors. + */ + private Map contributorFields; + + /** + * The metadata fields related to the funding external identifiers. + */ + private Map externalIdentifierFields; + + /** + * The metadata field related to the funding title. + */ + private String titleField; + + /** + * The metadata field related to the funding type. + */ + private String typeField; + + /** + * The funding type converter. + */ + private SimpleMapConverter typeConverter; + + /** + * The metadata field related to the funding amount. + */ + private String amountField; + + /** + * The metadata field related to the funding amount's currency. + */ + private String amountCurrencyField; + + /** + * The funding amount's currency converter. + */ + private SimpleMapConverter amountCurrencyConverter; + + /** + * The metadata field related to the funding start date. + */ + private String startDateField; + + /** + * The metadata field related to the funding end date. + */ + private String endDateField; + + /** + * The metadata field related to the funding description. + */ + private String descriptionField; + + /** + * The type of the relationship between the funding and the organization. + */ + private String organizationRelationshipType; + + private Map parseContributors(String contributors) { + Map contributorsMap = parseConfigurations(contributors); + return contributorsMap.keySet().stream() + .collect(toMap(identity(), field -> parseContributorRole(contributorsMap.get(field)))); + } + + private FundingContributorRole parseContributorRole(String contributorRole) { + try { + return FundingContributorRole.fromValue(contributorRole); + } catch (IllegalArgumentException ex) { + throw new IllegalArgumentException("The funding contributor role " + contributorRole + + " is invalid, allowed values are " + getAllowedContributorRoles(), ex); + } + } + + private List getAllowedContributorRoles() { + return Arrays.asList(FundingContributorRole.values()).stream() + .map(FundingContributorRole::value) + .collect(Collectors.toList()); + } + + public Map getExternalIdentifierFields() { + return externalIdentifierFields; + } + + public void setExternalIdentifierFields(String externalIdentifierFields) { + this.externalIdentifierFields = OrcidFactoryUtils.parseConfigurations(externalIdentifierFields); + } + + public Map getContributorFields() { + return contributorFields; + } + + public void setContributorFields(String contributorFields) { + this.contributorFields = parseContributors(contributorFields); + } + + public String getTitleField() { + return titleField; + } + + public void setTitleField(String titleField) { + this.titleField = titleField; + } + + public String getStartDateField() { + return startDateField; + } + + public void setStartDateField(String startDateField) { + this.startDateField = startDateField; + } + + public String getEndDateField() { + return endDateField; + } + + public void setEndDateField(String endDateField) { + this.endDateField = endDateField; + } + + public String getDescriptionField() { + return descriptionField; + } + + public void setDescriptionField(String descriptionField) { + this.descriptionField = descriptionField; + } + + public String getOrganizationRelationshipType() { + return organizationRelationshipType; + } + + public void setOrganizationRelationshipType(String organizationRelationshipType) { + this.organizationRelationshipType = organizationRelationshipType; + } + + public String getTypeField() { + return typeField; + } + + public void setTypeField(String typeField) { + this.typeField = typeField; + } + + public String getAmountField() { + return amountField; + } + + public void setAmountField(String amountField) { + this.amountField = amountField; + } + + public String getAmountCurrencyField() { + return amountCurrencyField; + } + + public void setAmountCurrencyField(String amountCurrencyField) { + this.amountCurrencyField = amountCurrencyField; + } + + public String convertAmountCurrency(String currency) { + return amountCurrencyConverter != null ? amountCurrencyConverter.getValue(currency) : currency; + } + + public void setAmountCurrencyConverter(SimpleMapConverter amountCurrencyConverter) { + this.amountCurrencyConverter = amountCurrencyConverter; + } + + public String convertType(String type) { + return typeConverter != null ? typeConverter.getValue(type) : type; + } + + public void setTypeConverter(SimpleMapConverter typeConverter) { + this.typeConverter = typeConverter; + } + +} diff --git a/dspace-api/src/main/java/org/dspace/orcid/model/OrcidProfileSectionType.java b/dspace-api/src/main/java/org/dspace/orcid/model/OrcidProfileSectionType.java new file mode 100644 index 0000000000..7521844d2d --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/orcid/model/OrcidProfileSectionType.java @@ -0,0 +1,46 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.orcid.model; + +import org.apache.commons.lang3.EnumUtils; + +/** + * Enum that model all the ORCID profile sections that could be synchronized. + * These fields come from the ORCID PERSON schema, see + * https://info.orcid.org/documentation/integration-guide/orcid-record/#PERSON + * + * @author Luca Giamminonni (luca.giamminonni at 4science.it) + * + */ +public enum OrcidProfileSectionType { + + OTHER_NAMES("/other-names"), + COUNTRY("/address"), + KEYWORDS("/keywords"), + EXTERNAL_IDS("/external-identifiers"), + RESEARCHER_URLS("/researcher-urls"); + + private final String path; + + private OrcidProfileSectionType(String path) { + this.path = path; + } + + public String getPath() { + return path; + } + + public static boolean isValid(String type) { + return type != null ? EnumUtils.isValidEnum(OrcidProfileSectionType.class, type.toUpperCase()) : false; + } + + public static OrcidProfileSectionType fromString(String type) { + return isValid(type) ? OrcidProfileSectionType.valueOf(type.toUpperCase()) : null; + } + +} diff --git a/dspace-api/src/main/java/org/dspace/orcid/model/OrcidTokenResponseDTO.java b/dspace-api/src/main/java/org/dspace/orcid/model/OrcidTokenResponseDTO.java new file mode 100644 index 0000000000..6b3594f9b8 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/orcid/model/OrcidTokenResponseDTO.java @@ -0,0 +1,135 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.orcid.model; + +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonProperty; +import org.apache.commons.lang3.StringUtils; + +/** + * This class map the response from and ORCID token endpoint. + * + * Response example: + * + * { + * "access_token":"f5af9f51-07e6-4332-8f1a-c0c11c1e3728", + * "token_type":"bearer", + * "refresh_token":"f725f747-3a65-49f6-a231-3e8944ce464d", + * "expires_in":631138518, + * "scope":"/read-limited", + * "name":"Sofia Garcia", + * "orcid":"0000-0001-2345-6789" + * } + * + * @author Luca Giamminonni (luca.giamminonni at 4Science.it) + * + */ +public class OrcidTokenResponseDTO { + + /** + * The access token release by the authorization server this is the most + * relevant item, because it allow the server to access to the user resources as + * defined in the scopes. + */ + @JsonProperty("access_token") + private String accessToken; + + /** + * The refresh token as defined in the OAuth standard. + */ + @JsonProperty("refresh_token") + private String refreshToken; + + /** + * It will be "bearer". + */ + @JsonProperty("token_type") + private String tokenType; + + /** + * The expiration timestamp in millis. + */ + @JsonProperty("expires_in") + private int expiresIn; + + /** + * List of scopes. + */ + private String scope; + + /** + * The ORCID user name. + */ + private String name; + + /** + * The ORCID user id. + */ + private String orcid; + + public String getName() { + return name; + } + + public void setName(String name) { + this.name = name; + } + + public String getOrcid() { + return orcid; + } + + public void setOrcid(String orcid) { + this.orcid = orcid; + } + + public String getAccessToken() { + return accessToken; + } + + public void setAccessToken(String accessToken) { + this.accessToken = accessToken; + } + + public String getRefreshToken() { + return refreshToken; + } + + public void setRefreshToken(String refreshToken) { + this.refreshToken = refreshToken; + } + + public String getTokenType() { + return tokenType; + } + + public void setTokenType(String tokenType) { + this.tokenType = tokenType; + } + + public int getExpiresIn() { + return expiresIn; + } + + public void setExpiresIn(int expiresIn) { + this.expiresIn = expiresIn; + } + + public String getScope() { + return scope; + } + + public void setScope(String scope) { + this.scope = scope; + } + + @JsonIgnore + public String[] getScopeAsArray() { + return StringUtils.isEmpty(getScope()) ? new String[] {} : getScope().split(" "); + } +} diff --git a/dspace-api/src/main/java/org/dspace/orcid/model/OrcidWorkFieldMapping.java b/dspace-api/src/main/java/org/dspace/orcid/model/OrcidWorkFieldMapping.java new file mode 100644 index 0000000000..781a9dcbd9 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/orcid/model/OrcidWorkFieldMapping.java @@ -0,0 +1,197 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.orcid.model; + +import static java.util.function.Function.identity; +import static java.util.stream.Collectors.toMap; +import static org.dspace.orcid.model.factory.OrcidFactoryUtils.parseConfigurations; + +import java.util.Arrays; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.stream.Collectors; + +import org.dspace.util.SimpleMapConverter; +import org.orcid.jaxb.model.common.ContributorRole; +import org.orcid.jaxb.model.v3.release.record.Work; + +/** + * Class that contains all the mapping between {@link Work} and DSpace metadata + * fields. + * + * @author Luca Giamminonni (luca.giamminonni at 4science.it) + * + */ +public class OrcidWorkFieldMapping { + + /** + * The metadata fields related to the work contributors. + */ + private Map contributorFields = new HashMap<>(); + + /** + * The metadata fields related to the work external identifiers. + */ + private Map externalIdentifierFields = new HashMap<>(); + + /** + * The metadata field related to the work publication date. + */ + private String publicationDateField; + + /** + * The metadata field related to the work title. + */ + private String titleField; + + /** + * The metadata field related to the work type. + */ + private String typeField; + + /** + * The metadata field related to the work journal title. + */ + private String journalTitleField; + + /** + * The metadata field related to the work description. + */ + private String shortDescriptionField; + + /** + * The metadata field related to the work language. + */ + private String languageField; + + /** + * The metadata field related to the work sub title. + */ + private String subTitleField; + + /** + * The work type converter. + */ + private SimpleMapConverter typeConverter; + + /** + * The work language converter. + */ + private SimpleMapConverter languageConverter; + + public String convertType(String type) { + return typeConverter != null ? typeConverter.getValue(type) : type; + } + + public String convertLanguage(String language) { + return languageConverter != null ? languageConverter.getValue(language) : language; + } + + public String getTitleField() { + return titleField; + } + + public void setTitleField(String titleField) { + this.titleField = titleField; + } + + public String getTypeField() { + return typeField; + } + + public void setTypeField(String typeField) { + this.typeField = typeField; + } + + public void setTypeConverter(SimpleMapConverter typeConverter) { + this.typeConverter = typeConverter; + } + + public Map getContributorFields() { + return contributorFields; + } + + public void setContributorFields(String contributorFields) { + this.contributorFields = parseContributors(contributorFields); + } + + public Map getExternalIdentifierFields() { + return externalIdentifierFields; + } + + public void setExternalIdentifierFields(String externalIdentifierFields) { + this.externalIdentifierFields = parseConfigurations(externalIdentifierFields); + } + + public String getPublicationDateField() { + return publicationDateField; + } + + public void setPublicationDateField(String publicationDateField) { + this.publicationDateField = publicationDateField; + } + + public String getJournalTitleField() { + return journalTitleField; + } + + public void setJournalTitleField(String journalTitleField) { + this.journalTitleField = journalTitleField; + } + + public String getShortDescriptionField() { + return shortDescriptionField; + } + + public void setShortDescriptionField(String shortDescriptionField) { + this.shortDescriptionField = shortDescriptionField; + } + + public String getLanguageField() { + return languageField; + } + + public void setLanguageField(String languageField) { + this.languageField = languageField; + } + + public void setLanguageConverter(SimpleMapConverter languageConverter) { + this.languageConverter = languageConverter; + } + + public String getSubTitleField() { + return subTitleField; + } + + public void setSubTitleField(String subTitleField) { + this.subTitleField = subTitleField; + } + + private Map parseContributors(String contributors) { + Map contributorsMap = parseConfigurations(contributors); + return contributorsMap.keySet().stream() + .collect(toMap(identity(), field -> parseContributorRole(contributorsMap.get(field)))); + } + + private ContributorRole parseContributorRole(String contributorRole) { + try { + return ContributorRole.fromValue(contributorRole); + } catch (IllegalArgumentException ex) { + throw new IllegalArgumentException("The contributor role " + contributorRole + + " is invalid, allowed values are " + getAllowedContributorRoles(), ex); + } + } + + private List getAllowedContributorRoles() { + return Arrays.asList(ContributorRole.values()).stream() + .map(ContributorRole::value) + .collect(Collectors.toList()); + } + +} diff --git a/dspace-api/src/main/java/org/dspace/orcid/model/factory/OrcidCommonObjectFactory.java b/dspace-api/src/main/java/org/dspace/orcid/model/factory/OrcidCommonObjectFactory.java new file mode 100644 index 0000000000..4ca36c2169 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/orcid/model/factory/OrcidCommonObjectFactory.java @@ -0,0 +1,93 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.orcid.model.factory; + +import java.util.Optional; + +import org.dspace.content.Item; +import org.dspace.content.MetadataValue; +import org.dspace.core.Context; +import org.dspace.orcid.exception.OrcidValidationException; +import org.orcid.jaxb.model.common.ContributorRole; +import org.orcid.jaxb.model.common.FundingContributorRole; +import org.orcid.jaxb.model.v3.release.common.Contributor; +import org.orcid.jaxb.model.v3.release.common.Country; +import org.orcid.jaxb.model.v3.release.common.FuzzyDate; +import org.orcid.jaxb.model.v3.release.common.Organization; +import org.orcid.jaxb.model.v3.release.common.Url; +import org.orcid.jaxb.model.v3.release.record.FundingContributor; + +/** + * Interface for factory classes that creates common ORCID objects. + * + * @author Luca Giamminonni (luca.giamminonni at 4science.it) + * + */ +public interface OrcidCommonObjectFactory { + + /** + * Creates an instance of {@link FuzzyDate} if the given metadata value + * represent a date with a supported format. + * + * @param metadataValue the metadata value + * @return the FuzzyDate istance, if any + */ + public Optional createFuzzyDate(MetadataValue metadataValue); + + /** + * Creates an instance of {@link Organization} from the given orgUnit item. + * + * @param context the DSpace context + * @param orgUnit the orgUnit item + * @return the created Organization's instance, if any + */ + public Optional createOrganization(Context context, Item orgUnit); + + /** + * Creates an instance of {@link Contributor} from the given metadata value. + * + * @param context the DSpace context + * @param metadataValue the metadata value + * @param role the contributor role + * @return the created Contributor instance, if any + */ + public Optional createContributor(Context context, MetadataValue metadataValue, ContributorRole role); + + /** + * Creates an instance of {@link FundingContributor} from the given metadata + * value. + * + * @param context the DSpace context + * @param metadataValue the metadata value + * @param role the contributor role + * @return the created FundingContributor instance, if any + */ + public Optional createFundingContributor(Context context, MetadataValue metadataValue, + FundingContributorRole role); + + /** + * Creates an instance of {@link Url} from the given item. + * @param context the DSpace context + * @param item the item + * @return the created Url instance, if any + */ + public Optional createUrl(Context context, Item item); + + /** + * Creates an instance of {@link Country} from the given metadata value. + * + * @param context the DSpace context + * @param metadataValue the metadata value + * @return the created Country instance, if any + * @throws OrcidValidationException if the given metadata value is not a valid + * ISO 3611 country + */ + public Optional createCountry(Context context, MetadataValue metadataValue) + throws OrcidValidationException; + +} diff --git a/dspace-api/src/main/java/org/dspace/orcid/model/factory/OrcidEntityFactory.java b/dspace-api/src/main/java/org/dspace/orcid/model/factory/OrcidEntityFactory.java new file mode 100644 index 0000000000..3fbad15911 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/orcid/model/factory/OrcidEntityFactory.java @@ -0,0 +1,43 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.orcid.model.factory; + +import org.dspace.content.Item; +import org.dspace.core.Context; +import org.dspace.orcid.model.OrcidEntityType; +import org.orcid.jaxb.model.v3.release.record.Activity; + +/** + * Interface to mark factories of Orcid entities. + * + * @author Luca Giamminonni (luca.giamminonni at 4science.it) + * + */ +public interface OrcidEntityFactory { + + /** + * Placeholder used to refer the item handle on fields mapping. + */ + String SIMPLE_HANDLE_PLACEHOLDER = "$simple-handle"; + + /** + * Returns the entity type created from this factory. + * + * @return the entity type + */ + public OrcidEntityType getEntityType(); + + /** + * Creates an ORCID activity from the given object. + * + * @param context the DSpace context + * @param item the item + * @return the created activity instance + */ + public Activity createOrcidObject(Context context, Item item); +} diff --git a/dspace-api/src/main/java/org/dspace/orcid/model/factory/OrcidFactoryUtils.java b/dspace-api/src/main/java/org/dspace/orcid/model/factory/OrcidFactoryUtils.java new file mode 100644 index 0000000000..4b8c1178ef --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/orcid/model/factory/OrcidFactoryUtils.java @@ -0,0 +1,68 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.orcid.model.factory; + +import java.util.HashMap; +import java.util.Map; + +import org.apache.commons.lang3.StringUtils; + +/** + * Utility class for Orcid factory classes. This is used to parse the + * configuration of ORCID entities defined in orcid.cfg (for example see + * contributors and external ids configuration). + * + * @author Luca Giamminonni (luca.giamminonni at 4science.it) + * + */ +public final class OrcidFactoryUtils { + + private OrcidFactoryUtils() { + + } + + /** + * Parse the given configurations value and returns a map with metadata fields + * as keys and types/sources as values. The expected configuration syntax is a + * list of values field::type separated by commas. + * + * @param configurations the configurations to parse + * @return the configurations parsing result as map + */ + public static Map parseConfigurations(String configurations) { + Map configurationMap = new HashMap(); + if (StringUtils.isBlank(configurations)) { + return configurationMap; + } + + for (String configuration : configurations.split(",")) { + String[] configurationSections = parseConfiguration(configuration); + configurationMap.put(configurationSections[0], configurationSections[1]); + } + + return configurationMap; + } + + /** + * Parse the given configuration value and returns it's section. The expected + * configuration syntax is field::type. + * + * @param configuration the configuration to parse + * @return the configuration sections + * @throws IllegalStateException if the given configuration is not valid + */ + private static String[] parseConfiguration(String configuration) { + String[] configurations = configuration.split("::"); + if (configurations.length != 2) { + throw new IllegalStateException( + "The configuration '" + configuration + "' is not valid. Expected field::type"); + } + return configurations; + } + +} diff --git a/dspace-api/src/main/java/org/dspace/orcid/model/factory/OrcidProfileSectionFactory.java b/dspace-api/src/main/java/org/dspace/orcid/model/factory/OrcidProfileSectionFactory.java new file mode 100644 index 0000000000..731b6f84a3 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/orcid/model/factory/OrcidProfileSectionFactory.java @@ -0,0 +1,78 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.orcid.model.factory; + +import java.util.List; + +import org.dspace.content.Item; +import org.dspace.content.MetadataValue; +import org.dspace.core.Context; +import org.dspace.orcid.model.OrcidProfileSectionType; +import org.dspace.profile.OrcidProfileSyncPreference; + +/** + * Interface for classes that creates ORCID profile section object. + * + * @author Luca Giamminonni (luca.giamminonni at 4science.it) + * + */ +public interface OrcidProfileSectionFactory { + + /** + * Creates an instance of an ORCID object starting from the metadata values + * + * @param context the DSpace Context + * @param metadataValues the metadata values + * @return the ORCID object + */ + public Object create(Context context, List metadataValues); + + /** + * Returns the profile section type related to this factory. + * + * @return the profile section type + */ + public OrcidProfileSectionType getProfileSectionType(); + + /** + * Returns the profile synchronization preference related to this factory. + * + * @return the synchronization preference + */ + public OrcidProfileSyncPreference getSynchronizationPreference(); + + /** + * Returns all the metadata fields involved in the profile section + * configuration. + * + * @return the metadataFields + */ + public List getMetadataFields(); + + /** + * Given the input item's metadata values generate a metadata signature for each + * metadata field groups handled by this factory or for each metadata fields if + * the factory is configured with single metadata fields. + * + * @param context the DSpace context + * @param item the item + * @return the metadata signatures + */ + public List getMetadataSignatures(Context context, Item item); + + /** + * Returns a description of the item's metadata values related to the given + * signature. + * + * @param context the DSpace context + * @param item the item + * @param signature the metadata signature + * @return the metadata values description + */ + public String getDescription(Context context, Item item, String signature); +} diff --git a/dspace-api/src/main/java/org/dspace/orcid/model/factory/impl/AbstractOrcidProfileSectionFactory.java b/dspace-api/src/main/java/org/dspace/orcid/model/factory/impl/AbstractOrcidProfileSectionFactory.java new file mode 100644 index 0000000000..2c272e620c --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/orcid/model/factory/impl/AbstractOrcidProfileSectionFactory.java @@ -0,0 +1,73 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.orcid.model.factory.impl; + +import static java.lang.String.format; + +import java.util.List; + +import org.dspace.content.Item; +import org.dspace.content.MetadataValue; +import org.dspace.content.service.ItemService; +import org.dspace.orcid.model.OrcidProfileSectionType; +import org.dspace.orcid.model.factory.OrcidCommonObjectFactory; +import org.dspace.orcid.model.factory.OrcidProfileSectionFactory; +import org.dspace.orcid.service.MetadataSignatureGenerator; +import org.dspace.profile.OrcidProfileSyncPreference; +import org.springframework.beans.factory.annotation.Autowired; + +/** + * Abstract class for that handle commons behaviors of all the available orcid + * profile section factories. + * + * @author Luca Giamminonni (luca.giamminonni at 4science.it) + * + */ +public abstract class AbstractOrcidProfileSectionFactory implements OrcidProfileSectionFactory { + + protected final OrcidProfileSectionType sectionType; + + protected final OrcidProfileSyncPreference preference; + + @Autowired + protected ItemService itemService; + + @Autowired + protected OrcidCommonObjectFactory orcidCommonObjectFactory; + + @Autowired + protected MetadataSignatureGenerator metadataSignatureGenerator; + + public AbstractOrcidProfileSectionFactory(OrcidProfileSectionType sectionType, + OrcidProfileSyncPreference preference) { + this.sectionType = sectionType; + this.preference = preference; + + if (!getSupportedTypes().contains(sectionType)) { + throw new IllegalArgumentException(format("The ORCID configuration does not support " + + "the section type %s. Supported types are %s", sectionType, getSupportedTypes())); + } + } + + protected abstract List getSupportedTypes(); + + @Override + public OrcidProfileSectionType getProfileSectionType() { + return sectionType; + } + + @Override + public OrcidProfileSyncPreference getSynchronizationPreference() { + return preference; + } + + protected List getMetadataValues(Item item, String metadataField) { + return itemService.getMetadataByMetadataString(item, metadataField); + } + +} diff --git a/dspace-api/src/main/java/org/dspace/orcid/model/factory/impl/OrcidCommonObjectFactoryImpl.java b/dspace-api/src/main/java/org/dspace/orcid/model/factory/impl/OrcidCommonObjectFactoryImpl.java new file mode 100644 index 0000000000..2f47aa53d6 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/orcid/model/factory/impl/OrcidCommonObjectFactoryImpl.java @@ -0,0 +1,308 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.orcid.model.factory.impl; + +import static java.util.Optional.empty; +import static java.util.Optional.of; +import static java.util.Optional.ofNullable; +import static org.apache.commons.lang3.EnumUtils.isValidEnum; +import static org.apache.commons.lang3.StringUtils.isBlank; +import static org.apache.commons.lang3.StringUtils.isNotBlank; +import static org.dspace.orcid.model.factory.OrcidFactoryUtils.parseConfigurations; +import static org.orcid.jaxb.model.common.SequenceType.ADDITIONAL; +import static org.orcid.jaxb.model.common.SequenceType.FIRST; + +import java.time.LocalDate; +import java.time.ZoneId; +import java.util.Date; +import java.util.HashMap; +import java.util.Map; +import java.util.Optional; + +import org.apache.commons.lang3.StringUtils; +import org.dspace.content.Item; +import org.dspace.content.MetadataFieldName; +import org.dspace.content.MetadataValue; +import org.dspace.content.service.ItemService; +import org.dspace.core.Context; +import org.dspace.handle.service.HandleService; +import org.dspace.orcid.client.OrcidConfiguration; +import org.dspace.orcid.exception.OrcidValidationException; +import org.dspace.orcid.model.factory.OrcidCommonObjectFactory; +import org.dspace.orcid.model.validator.OrcidValidationError; +import org.dspace.util.MultiFormatDateParser; +import org.dspace.util.SimpleMapConverter; +import org.orcid.jaxb.model.common.ContributorRole; +import org.orcid.jaxb.model.common.FundingContributorRole; +import org.orcid.jaxb.model.common.Iso3166Country; +import org.orcid.jaxb.model.v3.release.common.Contributor; +import org.orcid.jaxb.model.v3.release.common.ContributorAttributes; +import org.orcid.jaxb.model.v3.release.common.Country; +import org.orcid.jaxb.model.v3.release.common.CreditName; +import org.orcid.jaxb.model.v3.release.common.DisambiguatedOrganization; +import org.orcid.jaxb.model.v3.release.common.FuzzyDate; +import org.orcid.jaxb.model.v3.release.common.Organization; +import org.orcid.jaxb.model.v3.release.common.OrganizationAddress; +import org.orcid.jaxb.model.v3.release.common.Url; +import org.orcid.jaxb.model.v3.release.record.FundingContributor; +import org.orcid.jaxb.model.v3.release.record.FundingContributorAttributes; +import org.springframework.beans.factory.annotation.Autowired; + +/** + * Implementation of {@link OrcidCommonObjectFactory}. + * + * @author Luca Giamminonni (luca.giamminonni at 4science.it) + * + */ +public class OrcidCommonObjectFactoryImpl implements OrcidCommonObjectFactory { + + @Autowired + private ItemService itemService; + + @Autowired + private OrcidConfiguration orcidConfiguration; + + @Autowired + private HandleService handleService; + + private SimpleMapConverter countryConverter; + + private String organizationTitleField; + + private String organizationCityField; + + private String organizationCountryField; + + private String contributorEmailField; + + private String contributorOrcidField; + + private Map disambiguatedOrganizationIdentifierFields = new HashMap<>(); + + @Override + public Optional createFuzzyDate(MetadataValue metadataValue) { + + if (isUnprocessableValue(metadataValue)) { + return empty(); + } + + Date date = MultiFormatDateParser.parse(metadataValue.getValue()); + if (date == null) { + return empty(); + } + + LocalDate localDate = convertToLocalDate(date); + return of(FuzzyDate.valueOf(localDate.getYear(), localDate.getMonthValue(), localDate.getDayOfMonth())); + } + + @Override + public Optional createOrganization(Context context, Item orgUnit) { + + if (orgUnit == null) { + return Optional.empty(); + } + + Organization organization = new Organization(); + + organization.setName(getMetadataValue(orgUnit, organizationTitleField)); + organization.setAddress(createOrganizationAddress(orgUnit)); + organization.setDisambiguatedOrganization(createDisambiguatedOrganization(orgUnit)); + + return of(organization); + } + + @Override + public Optional createContributor(Context context, MetadataValue metadataValue, ContributorRole role) { + if (isUnprocessableValue(metadataValue)) { + return empty(); + } + + Contributor contributor = new Contributor(); + contributor.setCreditName(new CreditName(metadataValue.getValue())); + contributor.setContributorAttributes(getContributorAttributes(metadataValue, role)); + + return of(contributor); + } + + @Override + public Optional createFundingContributor(Context context, MetadataValue metadataValue, + FundingContributorRole role) { + + if (isUnprocessableValue(metadataValue)) { + return empty(); + } + + FundingContributor contributor = new FundingContributor(); + contributor.setCreditName(new CreditName(metadataValue.getValue())); + contributor.setContributorAttributes(getFundingContributorAttributes(metadataValue, role)); + + return of(contributor); + } + + @Override + public Optional createUrl(Context context, Item item) { + String handle = item.getHandle(); + if (StringUtils.isBlank(handle)) { + return empty(); + } + + return of(new Url(handleService.getCanonicalForm(handle))); + } + + @Override + public Optional createCountry(Context context, MetadataValue metadataValue) { + + if (isUnprocessableValue(metadataValue)) { + return empty(); + } + + Optional country = convertToIso3166Country(metadataValue.getValue()); + + if (country.isEmpty()) { + throw new OrcidValidationException(OrcidValidationError.INVALID_COUNTRY); + } + + return country.map(isoCountry -> new Country(isoCountry)); + } + + private ContributorAttributes getContributorAttributes(MetadataValue metadataValue, ContributorRole role) { + ContributorAttributes attributes = new ContributorAttributes(); + attributes.setContributorRole(role != null ? role : null); + attributes.setContributorSequence(metadataValue.getPlace() == 0 ? FIRST : ADDITIONAL); + return attributes; + } + + private OrganizationAddress createOrganizationAddress(Item organizationItem) { + OrganizationAddress address = new OrganizationAddress(); + + address.setCity(getMetadataValue(organizationItem, organizationCityField)); + + convertToIso3166Country(getMetadataValue(organizationItem, organizationCountryField)) + .ifPresent(address::setCountry); + + return address; + } + + private FundingContributorAttributes getFundingContributorAttributes(MetadataValue metadataValue, + FundingContributorRole role) { + FundingContributorAttributes attributes = new FundingContributorAttributes(); + attributes.setContributorRole(role != null ? role : null); + return attributes; + } + + private DisambiguatedOrganization createDisambiguatedOrganization(Item organizationItem) { + + for (String identifierField : disambiguatedOrganizationIdentifierFields.keySet()) { + + String source = disambiguatedOrganizationIdentifierFields.get(identifierField); + String identifier = getMetadataValue(organizationItem, identifierField); + + if (isNotBlank(identifier)) { + DisambiguatedOrganization disambiguatedOrganization = new DisambiguatedOrganization(); + disambiguatedOrganization.setDisambiguatedOrganizationIdentifier(identifier); + disambiguatedOrganization.setDisambiguationSource(source); + return disambiguatedOrganization; + } + + } + + return null; + } + + private Optional convertToIso3166Country(String countryValue) { + return ofNullable(countryValue) + .map(value -> countryConverter != null ? countryConverter.getValue(value) : value) + .filter(value -> isValidEnum(Iso3166Country.class, value)) + .map(value -> Iso3166Country.fromValue(value)); + } + + private boolean isUnprocessableValue(MetadataValue value) { + return value == null || isBlank(value.getValue()); + } + + private String getMetadataValue(Item item, String metadataField) { + if (StringUtils.isNotBlank(metadataField)) { + return itemService.getMetadataFirstValue(item, new MetadataFieldName(metadataField), Item.ANY); + } else { + return null; + } + } + + private LocalDate convertToLocalDate(Date date) { + return date.toInstant().atZone(ZoneId.systemDefault()).toLocalDate(); + } + + public String getOrganizationCityField() { + return organizationCityField; + } + + public String getOrganizationCountryField() { + return organizationCountryField; + } + + public Map getDisambiguatedOrganizationIdentifierFields() { + return disambiguatedOrganizationIdentifierFields; + } + + public String getContributorEmailField() { + return contributorEmailField; + } + + public String getContributorOrcidField() { + return contributorOrcidField; + } + + public void setItemService(ItemService itemService) { + this.itemService = itemService; + } + + public OrcidConfiguration getOrcidConfiguration() { + return orcidConfiguration; + } + + public void setOrcidConfiguration(OrcidConfiguration orcidConfiguration) { + this.orcidConfiguration = orcidConfiguration; + } + + public void setOrganizationCityField(String organizationCityField) { + this.organizationCityField = organizationCityField; + } + + public void setOrganizationCountryField(String organizationCountryField) { + this.organizationCountryField = organizationCountryField; + } + + public void setContributorEmailField(String contributorEmailField) { + this.contributorEmailField = contributorEmailField; + } + + public void setContributorOrcidField(String contributorOrcidField) { + this.contributorOrcidField = contributorOrcidField; + } + + public void setDisambiguatedOrganizationIdentifierFields(String disambiguatedOrganizationIds) { + this.disambiguatedOrganizationIdentifierFields = parseConfigurations(disambiguatedOrganizationIds); + } + + public SimpleMapConverter getCountryConverter() { + return countryConverter; + } + + public void setCountryConverter(SimpleMapConverter countryConverter) { + this.countryConverter = countryConverter; + } + + public String getOrganizationTitleField() { + return organizationTitleField; + } + + public void setOrganizationTitleField(String organizationTitleField) { + this.organizationTitleField = organizationTitleField; + } + +} diff --git a/dspace-api/src/main/java/org/dspace/orcid/model/factory/impl/OrcidFundingFactory.java b/dspace-api/src/main/java/org/dspace/orcid/model/factory/impl/OrcidFundingFactory.java new file mode 100644 index 0000000000..890b54f12b --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/orcid/model/factory/impl/OrcidFundingFactory.java @@ -0,0 +1,301 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.orcid.model.factory.impl; + +import static org.apache.commons.lang3.StringUtils.isBlank; +import static org.apache.commons.lang3.StringUtils.isNotBlank; + +import java.sql.SQLException; +import java.util.Collection; +import java.util.Currency; +import java.util.List; +import java.util.Optional; +import java.util.stream.Collectors; +import java.util.stream.Stream; + +import org.dspace.content.Item; +import org.dspace.content.MetadataValue; +import org.dspace.content.Relationship; +import org.dspace.content.RelationshipType; +import org.dspace.content.service.ItemService; +import org.dspace.content.service.RelationshipService; +import org.dspace.content.service.RelationshipTypeService; +import org.dspace.core.Context; +import org.dspace.orcid.model.OrcidEntityType; +import org.dspace.orcid.model.OrcidFundingFieldMapping; +import org.dspace.orcid.model.factory.OrcidCommonObjectFactory; +import org.dspace.orcid.model.factory.OrcidEntityFactory; +import org.orcid.jaxb.model.common.FundingContributorRole; +import org.orcid.jaxb.model.common.FundingType; +import org.orcid.jaxb.model.v3.release.common.Amount; +import org.orcid.jaxb.model.v3.release.common.FuzzyDate; +import org.orcid.jaxb.model.v3.release.common.Organization; +import org.orcid.jaxb.model.v3.release.common.Title; +import org.orcid.jaxb.model.v3.release.common.Url; +import org.orcid.jaxb.model.v3.release.record.Activity; +import org.orcid.jaxb.model.v3.release.record.ExternalID; +import org.orcid.jaxb.model.v3.release.record.ExternalIDs; +import org.orcid.jaxb.model.v3.release.record.Funding; +import org.orcid.jaxb.model.v3.release.record.FundingContributor; +import org.orcid.jaxb.model.v3.release.record.FundingContributors; +import org.orcid.jaxb.model.v3.release.record.FundingTitle; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.beans.factory.annotation.Autowired; + +/** + * Implementation of {@link OrcidEntityFactory} that creates instances of + * {@link Funding}. + * + * @author Luca Giamminonni (luca.giamminonni at 4science.it) + * + */ +public class OrcidFundingFactory implements OrcidEntityFactory { + + private static final Logger LOGGER = LoggerFactory.getLogger(OrcidFundingFactory.class); + + @Autowired + private ItemService itemService; + + @Autowired + private OrcidCommonObjectFactory orcidCommonObjectFactory; + + @Autowired + private RelationshipTypeService relationshipTypeService; + + @Autowired + private RelationshipService relationshipService; + + private OrcidFundingFieldMapping fieldMapping; + + @Override + public OrcidEntityType getEntityType() { + return OrcidEntityType.FUNDING; + } + + @Override + public Activity createOrcidObject(Context context, Item item) { + Funding funding = new Funding(); + funding.setContributors(getContributors(context, item)); + funding.setDescription(getDescription(context, item)); + funding.setEndDate(getEndDate(context, item)); + funding.setExternalIdentifiers(getExternalIds(context, item)); + funding.setOrganization(getOrganization(context, item)); + funding.setStartDate(getStartDate(context, item)); + funding.setTitle(getTitle(context, item)); + funding.setType(getType(context, item)); + funding.setUrl(getUrl(context, item)); + funding.setAmount(getAmount(context, item)); + return funding; + } + + private FundingContributors getContributors(Context context, Item item) { + FundingContributors fundingContributors = new FundingContributors(); + getMetadataValues(context, item, fieldMapping.getContributorFields().keySet()).stream() + .map(metadataValue -> getFundingContributor(context, metadataValue)) + .filter(Optional::isPresent) + .map(Optional::get) + .forEach(fundingContributors.getContributor()::add); + return fundingContributors; + } + + private Optional getFundingContributor(Context context, MetadataValue metadataValue) { + String metadataField = metadataValue.getMetadataField().toString('.'); + FundingContributorRole role = fieldMapping.getContributorFields().get(metadataField); + return orcidCommonObjectFactory.createFundingContributor(context, metadataValue, role); + } + + + private String getDescription(Context context, Item item) { + return getMetadataValue(context, item, fieldMapping.getDescriptionField()) + .map(MetadataValue::getValue) + .orElse(null); + } + + private FuzzyDate getEndDate(Context context, Item item) { + return getMetadataValue(context, item, fieldMapping.getEndDateField()) + .flatMap(metadataValue -> orcidCommonObjectFactory.createFuzzyDate(metadataValue)) + .orElse(null); + } + + private ExternalIDs getExternalIds(Context context, Item item) { + ExternalIDs externalIdentifiers = new ExternalIDs(); + + getMetadataValues(context, item, fieldMapping.getExternalIdentifierFields().keySet()).stream() + .map(this::getExternalId) + .forEach(externalIdentifiers.getExternalIdentifier()::add); + + return externalIdentifiers; + } + + private ExternalID getExternalId(MetadataValue metadataValue) { + String metadataField = metadataValue.getMetadataField().toString('.'); + return getExternalId(fieldMapping.getExternalIdentifierFields().get(metadataField), metadataValue.getValue()); + } + + private ExternalID getExternalId(String type, String value) { + ExternalID externalID = new ExternalID(); + externalID.setType(type); + externalID.setValue(value); + externalID.setRelationship(org.orcid.jaxb.model.common.Relationship.SELF); + return externalID; + } + + /** + * Returns an Organization ORCID entity related to the given item. The + * relationship type configured with + * orcid.mapping.funding.organization-relationship-type is the relationship used + * to search the Organization of the given project item. + */ + private Organization getOrganization(Context context, Item item) { + + try { + + return relationshipTypeService.findByLeftwardOrRightwardTypeName(context, + fieldMapping.getOrganizationRelationshipType()).stream() + .flatMap(relationshipType -> getRelationships(context, item, relationshipType)) + .map(relationship -> getRelatedItem(item, relationship)) + .flatMap(orgUnit -> orcidCommonObjectFactory.createOrganization(context, orgUnit).stream()) + .findFirst() + .orElse(null); + + } catch (SQLException e) { + throw new RuntimeException(e); + } + + } + + private Stream getRelationships(Context context, Item item, RelationshipType relationshipType) { + try { + return relationshipService.findByItemAndRelationshipType(context, item, relationshipType).stream(); + } catch (SQLException e) { + throw new RuntimeException(e); + } + } + + private Item getRelatedItem(Item item, Relationship relationship) { + return item.equals(relationship.getLeftItem()) ? relationship.getRightItem() : relationship.getLeftItem(); + } + + private FuzzyDate getStartDate(Context context, Item item) { + return getMetadataValue(context, item, fieldMapping.getStartDateField()) + .flatMap(metadataValue -> orcidCommonObjectFactory.createFuzzyDate(metadataValue)) + .orElse(null); + } + + private FundingTitle getTitle(Context context, Item item) { + return getMetadataValue(context, item, fieldMapping.getTitleField()) + .map(metadataValue -> getFundingTitle(context, metadataValue)) + .orElse(null); + } + + private FundingTitle getFundingTitle(Context context, MetadataValue metadataValue) { + FundingTitle fundingTitle = new FundingTitle(); + fundingTitle.setTitle(new Title(metadataValue.getValue())); + return fundingTitle; + } + + /** + * Returns an instance of FundingType taking the type from the given item. The + * metadata field to be used to retrieve the item's type is related to the + * configured typeField (orcid.mapping.funding.type). + */ + private FundingType getType(Context context, Item item) { + return getMetadataValue(context, item, fieldMapping.getTypeField()) + .map(type -> fieldMapping.convertType(type.getValue())) + .flatMap(this::getFundingType) + .orElse(FundingType.CONTRACT); + } + + private Optional getFundingType(String type) { + try { + return Optional.ofNullable(FundingType.fromValue(type)); + } catch (IllegalArgumentException ex) { + LOGGER.warn("The type {} is not valid for ORCID fundings", type); + return Optional.empty(); + } + } + + private Url getUrl(Context context, Item item) { + return orcidCommonObjectFactory.createUrl(context, item).orElse(null); + } + + /** + * Returns an Amount instance taking the amount and currency value from the + * configured metadata values of the given item, if any. + */ + private Amount getAmount(Context context, Item item) { + + Optional amount = getAmountValue(context, item); + Optional currency = getCurrencyValue(context, item); + + if (amount.isEmpty() || currency.isEmpty()) { + return null; + } + + return getAmount(amount.get(), currency.get()); + } + + /** + * Returns the amount value of the configured metadata field + * orcid.mapping.funding.amount + */ + private Optional getAmountValue(Context context, Item item) { + return getMetadataValue(context, item, fieldMapping.getAmountField()) + .map(MetadataValue::getValue); + } + + /** + * Returns the amount value of the configured metadata field + * orcid.mapping.funding.amount.currency (if configured using the converter + * orcid.mapping.funding.amount.currency.converter). + */ + private Optional getCurrencyValue(Context context, Item item) { + return getMetadataValue(context, item, fieldMapping.getAmountCurrencyField()) + .map(currency -> fieldMapping.convertAmountCurrency(currency.getValue())) + .filter(currency -> isValidCurrency(currency)); + } + + private boolean isValidCurrency(String currency) { + try { + return currency != null && Currency.getInstance(currency) != null; + } catch (IllegalArgumentException ex) { + return false; + } + } + + private Amount getAmount(String amount, String currency) { + Amount amountObj = new Amount(); + amountObj.setContent(amount); + amountObj.setCurrencyCode(currency); + return amountObj; + } + + private List getMetadataValues(Context context, Item item, Collection metadataFields) { + return metadataFields.stream() + .flatMap(metadataField -> itemService.getMetadataByMetadataString(item, metadataField).stream()) + .collect(Collectors.toList()); + } + + private Optional getMetadataValue(Context context, Item item, String metadataField) { + if (isBlank(metadataField)) { + return Optional.empty(); + } + return itemService.getMetadataByMetadataString(item, metadataField).stream().findFirst() + .filter(metadataValue -> isNotBlank(metadataValue.getValue())); + } + + public OrcidFundingFieldMapping getFieldMapping() { + return fieldMapping; + } + + public void setFieldMapping(OrcidFundingFieldMapping fieldMapping) { + this.fieldMapping = fieldMapping; + } + +} diff --git a/dspace-api/src/main/java/org/dspace/orcid/model/factory/impl/OrcidPersonExternalIdentifierFactory.java b/dspace-api/src/main/java/org/dspace/orcid/model/factory/impl/OrcidPersonExternalIdentifierFactory.java new file mode 100644 index 0000000000..077bb195a6 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/orcid/model/factory/impl/OrcidPersonExternalIdentifierFactory.java @@ -0,0 +1,75 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.orcid.model.factory.impl; + +import static org.dspace.orcid.model.OrcidProfileSectionType.EXTERNAL_IDS; +import static org.dspace.orcid.model.factory.OrcidFactoryUtils.parseConfigurations; + +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.stream.Collectors; + +import org.dspace.content.MetadataValue; +import org.dspace.core.Context; +import org.dspace.orcid.model.OrcidProfileSectionType; +import org.dspace.profile.OrcidProfileSyncPreference; +import org.orcid.jaxb.model.common.Relationship; +import org.orcid.jaxb.model.v3.release.common.Url; +import org.orcid.jaxb.model.v3.release.record.PersonExternalIdentifier; + +/** + * Implementation of {@link OrcidProfileSectionFactory} that model an personal + * external id. + * + * @author Luca Giamminonni (luca.giamminonni at 4science.it) + * + */ +public class OrcidPersonExternalIdentifierFactory extends OrcidSimpleValueObjectFactory { + + private Map externalIds = new HashMap<>(); + + public OrcidPersonExternalIdentifierFactory(OrcidProfileSectionType sectionType, + OrcidProfileSyncPreference preference) { + super(sectionType, preference); + } + + @Override + public List getSupportedTypes() { + return List.of(EXTERNAL_IDS); + } + + @Override + protected Object create(Context context, MetadataValue metadataValue) { + + String currentMetadataField = metadataValue.getMetadataField().toString('.'); + String externalIdType = externalIds.get(currentMetadataField); + + if (externalIdType == null) { + throw new IllegalArgumentException("Metadata field not supported: " + currentMetadataField); + } + + PersonExternalIdentifier externalId = new PersonExternalIdentifier(); + externalId.setValue(metadataValue.getValue()); + externalId.setType(externalIdType); + externalId.setRelationship(Relationship.SELF); + externalId.setUrl(new Url(metadataValue.getValue())); + + return externalId; + } + + public Map getExternalIds() { + return externalIds; + } + + public void setExternalIds(String externalIds) { + this.externalIds = parseConfigurations(externalIds); + setMetadataFields(this.externalIds.keySet().stream().collect(Collectors.joining(","))); + } + +} diff --git a/dspace-api/src/main/java/org/dspace/orcid/model/factory/impl/OrcidSimpleValueObjectFactory.java b/dspace-api/src/main/java/org/dspace/orcid/model/factory/impl/OrcidSimpleValueObjectFactory.java new file mode 100644 index 0000000000..4ddfbe47a3 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/orcid/model/factory/impl/OrcidSimpleValueObjectFactory.java @@ -0,0 +1,149 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.orcid.model.factory.impl; + +import static java.util.Arrays.asList; +import static java.util.Collections.emptyList; +import static org.dspace.orcid.model.OrcidProfileSectionType.COUNTRY; +import static org.dspace.orcid.model.OrcidProfileSectionType.KEYWORDS; +import static org.dspace.orcid.model.OrcidProfileSectionType.OTHER_NAMES; +import static org.dspace.orcid.model.OrcidProfileSectionType.RESEARCHER_URLS; + +import java.util.ArrayList; +import java.util.List; +import java.util.stream.Collectors; + +import org.apache.commons.collections.CollectionUtils; +import org.dspace.content.Item; +import org.dspace.content.MetadataValue; +import org.dspace.core.Context; +import org.dspace.orcid.model.OrcidProfileSectionType; +import org.dspace.profile.OrcidProfileSyncPreference; +import org.orcid.jaxb.model.v3.release.common.Country; +import org.orcid.jaxb.model.v3.release.common.Url; +import org.orcid.jaxb.model.v3.release.record.Address; +import org.orcid.jaxb.model.v3.release.record.Keyword; +import org.orcid.jaxb.model.v3.release.record.OtherName; +import org.orcid.jaxb.model.v3.release.record.ResearcherUrl; + +/** + * Implementation of {@link OrcidProfileSectionFactory} that creates ORCID + * objects with a single value. + * + * @author Luca Giamminonni (luca.giamminonni at 4science.it) + * + */ +public class OrcidSimpleValueObjectFactory extends AbstractOrcidProfileSectionFactory { + + private List metadataFields = new ArrayList(); + + public OrcidSimpleValueObjectFactory(OrcidProfileSectionType sectionType, OrcidProfileSyncPreference preference) { + super(sectionType, preference); + } + + @Override + public List getSupportedTypes() { + return List.of(COUNTRY, KEYWORDS, OTHER_NAMES, RESEARCHER_URLS); + } + + @Override + public Object create(Context context, List metadataValues) { + + if (CollectionUtils.isEmpty(metadataValues)) { + throw new IllegalArgumentException("No metadata values provided to create ORCID object with simple value"); + } + + if (metadataValues.size() > 1) { + throw new IllegalArgumentException("Multiple metadata values not supported: " + metadataValues); + } + + MetadataValue metadataValue = metadataValues.get(0); + String currentMetadataField = metadataValue.getMetadataField().toString('.'); + + if (!metadataFields.contains(currentMetadataField)) { + throw new IllegalArgumentException("Metadata field not supported: " + currentMetadataField); + } + + return create(context, metadataValue); + } + + @Override + public List getMetadataSignatures(Context context, Item item) { + return metadataFields.stream() + .flatMap(metadataField -> getMetadataValues(item, metadataField).stream()) + .map(metadataValue -> metadataSignatureGenerator.generate(context, List.of(metadataValue))) + .collect(Collectors.toList()); + } + + @Override + public String getDescription(Context context, Item item, String signature) { + List metadataValues = metadataSignatureGenerator.findBySignature(context, item, signature); + return CollectionUtils.isNotEmpty(metadataValues) ? metadataValues.get(0).getValue() : null; + } + + /** + * Create an instance of ORCID profile section based on the configured profile + * section type, taking the value from the given metadataValue. + */ + protected Object create(Context context, MetadataValue metadataValue) { + switch (getProfileSectionType()) { + case COUNTRY: + return createAddress(context, metadataValue); + case KEYWORDS: + return createKeyword(metadataValue); + case OTHER_NAMES: + return createOtherName(metadataValue); + case RESEARCHER_URLS: + return createResearcherUrl(metadataValue); + default: + throw new IllegalStateException("OrcidSimpleValueObjectFactory does not support type " + + getProfileSectionType()); + } + } + + private ResearcherUrl createResearcherUrl(MetadataValue metadataValue) { + ResearcherUrl researcherUrl = new ResearcherUrl(); + researcherUrl.setUrl(new Url(metadataValue.getValue())); + return researcherUrl; + } + + private OtherName createOtherName(MetadataValue metadataValue) { + OtherName otherName = new OtherName(); + otherName.setContent(metadataValue.getValue()); + return otherName; + } + + private Keyword createKeyword(MetadataValue metadataValue) { + Keyword keyword = new Keyword(); + keyword.setContent(metadataValue.getValue()); + return keyword; + } + + private Address createAddress(Context context, MetadataValue metadataValue) { + return orcidCommonObjectFactory.createCountry(context, metadataValue) + .map(this::createAddress) + .orElseThrow(() -> new IllegalArgumentException("No address creatable " + + "from value " + metadataValue.getValue())); + } + + private Address createAddress(Country country) { + Address address = new Address(); + address.setCountry(country); + return address; + } + + public void setMetadataFields(String metadataFields) { + this.metadataFields = metadataFields != null ? asList(metadataFields.split(",")) : emptyList(); + } + + @Override + public List getMetadataFields() { + return metadataFields; + } + +} diff --git a/dspace-api/src/main/java/org/dspace/orcid/model/factory/impl/OrcidWorkFactory.java b/dspace-api/src/main/java/org/dspace/orcid/model/factory/impl/OrcidWorkFactory.java new file mode 100644 index 0000000000..53b46d8256 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/orcid/model/factory/impl/OrcidWorkFactory.java @@ -0,0 +1,283 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.orcid.model.factory.impl; + +import static org.apache.commons.lang3.StringUtils.isBlank; +import static org.apache.commons.lang3.StringUtils.isNotBlank; +import static org.orcid.jaxb.model.common.Relationship.SELF; + +import java.util.ArrayList; +import java.util.Collection; +import java.util.List; +import java.util.Map; +import java.util.Optional; +import java.util.stream.Collectors; + +import org.apache.commons.lang3.EnumUtils; +import org.dspace.content.Item; +import org.dspace.content.MetadataValue; +import org.dspace.content.service.ItemService; +import org.dspace.core.Context; +import org.dspace.orcid.model.OrcidEntityType; +import org.dspace.orcid.model.OrcidWorkFieldMapping; +import org.dspace.orcid.model.factory.OrcidCommonObjectFactory; +import org.dspace.orcid.model.factory.OrcidEntityFactory; +import org.orcid.jaxb.model.common.ContributorRole; +import org.orcid.jaxb.model.common.LanguageCode; +import org.orcid.jaxb.model.common.Relationship; +import org.orcid.jaxb.model.common.WorkType; +import org.orcid.jaxb.model.v3.release.common.Contributor; +import org.orcid.jaxb.model.v3.release.common.PublicationDate; +import org.orcid.jaxb.model.v3.release.common.Subtitle; +import org.orcid.jaxb.model.v3.release.common.Title; +import org.orcid.jaxb.model.v3.release.common.Url; +import org.orcid.jaxb.model.v3.release.record.Activity; +import org.orcid.jaxb.model.v3.release.record.ExternalID; +import org.orcid.jaxb.model.v3.release.record.ExternalIDs; +import org.orcid.jaxb.model.v3.release.record.Work; +import org.orcid.jaxb.model.v3.release.record.WorkContributors; +import org.orcid.jaxb.model.v3.release.record.WorkTitle; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.beans.factory.annotation.Autowired; + +/** + * Implementation of {@link OrcidEntityFactory} that creates instances of + * {@link Work}. + * + * @author Luca Giamminonni (luca.giamminonni at 4science.it) + * + */ +public class OrcidWorkFactory implements OrcidEntityFactory { + + private static final Logger LOGGER = LoggerFactory.getLogger(OrcidWorkFactory.class); + + @Autowired + private ItemService itemService; + + @Autowired + private OrcidCommonObjectFactory orcidCommonObjectFactory; + + private OrcidWorkFieldMapping fieldMapping; + + @Override + public OrcidEntityType getEntityType() { + return OrcidEntityType.PUBLICATION; + } + + @Override + public Activity createOrcidObject(Context context, Item item) { + Work work = new Work(); + work.setJournalTitle(getJournalTitle(context, item)); + work.setWorkContributors(getWorkContributors(context, item)); + work.setWorkTitle(getWorkTitle(context, item)); + work.setPublicationDate(getPublicationDate(context, item)); + work.setWorkExternalIdentifiers(getWorkExternalIds(context, item)); + work.setWorkType(getWorkType(context, item)); + work.setShortDescription(getShortDescription(context, item)); + work.setLanguageCode(getLanguageCode(context, item)); + work.setUrl(getUrl(context, item)); + return work; + } + + private Title getJournalTitle(Context context, Item item) { + return getMetadataValue(context, item, fieldMapping.getJournalTitleField()) + .map(metadataValue -> new Title(metadataValue.getValue())) + .orElse(null); + } + + private WorkContributors getWorkContributors(Context context, Item item) { + Map contributorFields = fieldMapping.getContributorFields(); + List contributors = getMetadataValues(context, item, contributorFields.keySet()).stream() + .map(metadataValue -> getContributor(context, metadataValue)) + .filter(Optional::isPresent) + .map(Optional::get) + .collect(Collectors.toList()); + return new WorkContributors(contributors); + } + + private Optional getContributor(Context context, MetadataValue metadataValue) { + Map contributorFields = fieldMapping.getContributorFields(); + ContributorRole role = contributorFields.get(metadataValue.getMetadataField().toString('.')); + return orcidCommonObjectFactory.createContributor(context, metadataValue, role); + } + + /** + * Create an instance of WorkTitle from the given item. + */ + private WorkTitle getWorkTitle(Context context, Item item) { + Optional workTitleValue = getWorkTitleValue(context, item); + if (workTitleValue.isEmpty()) { + return null; + } + + WorkTitle workTitle = new WorkTitle(); + workTitle.setTitle(new Title(workTitleValue.get())); + getSubTitle(context, item).ifPresent(workTitle::setSubtitle); + return workTitle; + } + + /** + * Take the work title from the configured metadata field of the given item + * (orcid.mapping.work.title), if any. + */ + private Optional getWorkTitleValue(Context context, Item item) { + return getMetadataValue(context, item, fieldMapping.getTitleField()) + .map(MetadataValue::getValue); + } + + /** + * Take the work title from the configured metadata field of the given item + * (orcid.mapping.work.sub-title), if any. + */ + private Optional getSubTitle(Context context, Item item) { + return getMetadataValue(context, item, fieldMapping.getSubTitleField()) + .map(MetadataValue::getValue) + .map(Subtitle::new); + } + + private PublicationDate getPublicationDate(Context context, Item item) { + return getMetadataValue(context, item, fieldMapping.getPublicationDateField()) + .flatMap(orcidCommonObjectFactory::createFuzzyDate) + .map(PublicationDate::new) + .orElse(null); + } + + /** + * Creates an instance of ExternalIDs from the metadata values of the given + * item, using the orcid.mapping.funding.external-ids configuration. + */ + private ExternalIDs getWorkExternalIds(Context context, Item item) { + ExternalIDs externalIdentifiers = new ExternalIDs(); + externalIdentifiers.getExternalIdentifier().addAll(getWorkSelfExternalIds(context, item)); + return externalIdentifiers; + } + + /** + * Creates a list of ExternalID, one for orcid.mapping.funding.external-ids + * value, taking the values from the given item. + */ + private List getWorkSelfExternalIds(Context context, Item item) { + + List selfExternalIds = new ArrayList(); + + Map externalIdentifierFields = fieldMapping.getExternalIdentifierFields(); + + if (externalIdentifierFields.containsKey(SIMPLE_HANDLE_PLACEHOLDER)) { + String handleType = externalIdentifierFields.get(SIMPLE_HANDLE_PLACEHOLDER); + selfExternalIds.add(getExternalId(handleType, item.getHandle(), SELF)); + } + + getMetadataValues(context, item, externalIdentifierFields.keySet()).stream() + .map(this::getSelfExternalId) + .forEach(selfExternalIds::add); + + return selfExternalIds; + } + + /** + * Creates an instance of ExternalID taking the value from the given + * metadataValue. The type of the ExternalID is calculated using the + * orcid.mapping.funding.external-ids configuration. The relationship of the + * ExternalID is SELF. + */ + private ExternalID getSelfExternalId(MetadataValue metadataValue) { + Map externalIdentifierFields = fieldMapping.getExternalIdentifierFields(); + String metadataField = metadataValue.getMetadataField().toString('.'); + return getExternalId(externalIdentifierFields.get(metadataField), metadataValue.getValue(), SELF); + } + + /** + * Creates an instance of ExternalID with the given type, value and + * relationship. + */ + private ExternalID getExternalId(String type, String value, Relationship relationship) { + ExternalID externalID = new ExternalID(); + externalID.setType(type); + externalID.setValue(value); + externalID.setRelationship(relationship); + return externalID; + } + + /** + * Creates an instance of WorkType from the given item, taking the value fom the + * configured metadata field (orcid.mapping.work.type). + */ + private WorkType getWorkType(Context context, Item item) { + return getMetadataValue(context, item, fieldMapping.getTypeField()) + .map(MetadataValue::getValue) + .map(type -> fieldMapping.convertType(type)) + .flatMap(this::getWorkType) + .orElse(WorkType.UNDEFINED); + } + + /** + * Creates an instance of WorkType from the given workType value, if valid. + */ + private Optional getWorkType(String workType) { + try { + return Optional.ofNullable(WorkType.fromValue(workType)); + } catch (IllegalArgumentException ex) { + LOGGER.warn("The type {} is not valid for ORCID works", workType); + return Optional.empty(); + } + } + + private String getShortDescription(Context context, Item item) { + return getMetadataValue(context, item, fieldMapping.getShortDescriptionField()) + .map(MetadataValue::getValue) + .orElse(null); + } + + private String getLanguageCode(Context context, Item item) { + return getMetadataValue(context, item, fieldMapping.getLanguageField()) + .map(MetadataValue::getValue) + .map(language -> fieldMapping.convertLanguage(language)) + .filter(language -> isValidLanguage(language)) + .orElse(null); + } + + private boolean isValidLanguage(String language) { + + if (isBlank(language)) { + return false; + } + + boolean isValid = EnumUtils.isValidEnum(LanguageCode.class, language); + if (!isValid) { + LOGGER.warn("The language {} is not a valid language code for ORCID works", language); + } + return isValid; + } + + private Url getUrl(Context context, Item item) { + return orcidCommonObjectFactory.createUrl(context, item).orElse(null); + } + + private List getMetadataValues(Context context, Item item, Collection metadataFields) { + return metadataFields.stream() + .flatMap(metadataField -> itemService.getMetadataByMetadataString(item, metadataField).stream()) + .collect(Collectors.toList()); + } + + private Optional getMetadataValue(Context context, Item item, String metadataField) { + + if (isBlank(metadataField)) { + return Optional.empty(); + } + + return itemService.getMetadataByMetadataString(item, metadataField).stream() + .filter(metadataValue -> isNotBlank(metadataValue.getValue())) + .findFirst(); + } + + public void setFieldMapping(OrcidWorkFieldMapping fieldMapping) { + this.fieldMapping = fieldMapping; + } + +} diff --git a/dspace-api/src/main/java/org/dspace/orcid/model/validator/OrcidValidationError.java b/dspace-api/src/main/java/org/dspace/orcid/model/validator/OrcidValidationError.java new file mode 100644 index 0000000000..36f92cf1c5 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/orcid/model/validator/OrcidValidationError.java @@ -0,0 +1,49 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.orcid.model.validator; + +/** + * Enum that model all the errors that could occurs during an ORCID object + * validation. These codes are used by the {@link OrcidValidator} to returns the + * validation error related to a specific ORCID entity. The values of this enum + * are returned from the OrcidHistoryRestRepository and can be used to show an + * error message to the users when they tries to synchronize some data with + * ORCID. + * + * @author Luca Giamminonni (luca.giamminonni at 4science.it) + * + */ +public enum OrcidValidationError { + + AMOUNT_CURRENCY_REQUIRED("amount-currency.required"), + EXTERNAL_ID_REQUIRED("external-id.required"), + TITLE_REQUIRED("title.required"), + TYPE_REQUIRED("type.required"), + FUNDER_REQUIRED("funder.required"), + INVALID_COUNTRY("country.invalid"), + ORGANIZATION_NAME_REQUIRED("organization.name-required"), + PUBLICATION_DATE_INVALID("publication.date-invalid"), + ORGANIZATION_ADDRESS_REQUIRED("organization.address-required"), + ORGANIZATION_CITY_REQUIRED("organization.city-required"), + ORGANIZATION_COUNTRY_REQUIRED("organization.country-required"), + DISAMBIGUATED_ORGANIZATION_REQUIRED("disambiguated-organization.required"), + DISAMBIGUATED_ORGANIZATION_VALUE_REQUIRED("disambiguated-organization.value-required"), + DISAMBIGUATION_SOURCE_REQUIRED("disambiguation-source.required"), + DISAMBIGUATION_SOURCE_INVALID("disambiguation-source.invalid"); + + private final String code; + + private OrcidValidationError(String code) { + this.code = code; + } + + public String getCode() { + return code; + } + +} diff --git a/dspace-api/src/main/java/org/dspace/orcid/model/validator/OrcidValidator.java b/dspace-api/src/main/java/org/dspace/orcid/model/validator/OrcidValidator.java new file mode 100644 index 0000000000..7b30717e2d --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/orcid/model/validator/OrcidValidator.java @@ -0,0 +1,46 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.orcid.model.validator; + +import java.util.List; + +import org.orcid.jaxb.model.v3.release.record.Funding; +import org.orcid.jaxb.model.v3.release.record.Work; + +/** + * Interface for classes that validate the ORCID entity objects. + * + * @author Luca Giamminonni (luca.giamminonni at 4science.it) + * + */ +public interface OrcidValidator { + + /** + * Validate the given orcid object and returns the validation errors, if any. + * + * @param object the ORCID object to validate + * @return the validation errors, if any + */ + List validate(Object object); + + /** + * Validate the given work and returns the validation errors, if any. + * + * @param work the work to validate + * @return the validation errors, if any + */ + List validateWork(Work work); + + /** + * Validate the given funding and returns the validation errors, if any. + * + * @param funding the funding to validate + * @return the validation errors, if any + */ + List validateFunding(Funding funding); +} diff --git a/dspace-api/src/main/java/org/dspace/orcid/model/validator/impl/OrcidValidatorImpl.java b/dspace-api/src/main/java/org/dspace/orcid/model/validator/impl/OrcidValidatorImpl.java new file mode 100644 index 0000000000..a599695c07 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/orcid/model/validator/impl/OrcidValidatorImpl.java @@ -0,0 +1,235 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.orcid.model.validator.impl; + +import static org.apache.commons.collections.CollectionUtils.isEmpty; +import static org.apache.commons.lang3.ArrayUtils.contains; +import static org.apache.commons.lang3.StringUtils.isBlank; +import static org.dspace.orcid.model.validator.OrcidValidationError.AMOUNT_CURRENCY_REQUIRED; +import static org.dspace.orcid.model.validator.OrcidValidationError.DISAMBIGUATED_ORGANIZATION_REQUIRED; +import static org.dspace.orcid.model.validator.OrcidValidationError.DISAMBIGUATED_ORGANIZATION_VALUE_REQUIRED; +import static org.dspace.orcid.model.validator.OrcidValidationError.DISAMBIGUATION_SOURCE_INVALID; +import static org.dspace.orcid.model.validator.OrcidValidationError.DISAMBIGUATION_SOURCE_REQUIRED; +import static org.dspace.orcid.model.validator.OrcidValidationError.EXTERNAL_ID_REQUIRED; +import static org.dspace.orcid.model.validator.OrcidValidationError.FUNDER_REQUIRED; +import static org.dspace.orcid.model.validator.OrcidValidationError.ORGANIZATION_ADDRESS_REQUIRED; +import static org.dspace.orcid.model.validator.OrcidValidationError.ORGANIZATION_CITY_REQUIRED; +import static org.dspace.orcid.model.validator.OrcidValidationError.ORGANIZATION_COUNTRY_REQUIRED; +import static org.dspace.orcid.model.validator.OrcidValidationError.ORGANIZATION_NAME_REQUIRED; +import static org.dspace.orcid.model.validator.OrcidValidationError.PUBLICATION_DATE_INVALID; +import static org.dspace.orcid.model.validator.OrcidValidationError.TITLE_REQUIRED; +import static org.dspace.orcid.model.validator.OrcidValidationError.TYPE_REQUIRED; + +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; + +import org.dspace.orcid.model.validator.OrcidValidationError; +import org.dspace.orcid.model.validator.OrcidValidator; +import org.dspace.services.ConfigurationService; +import org.orcid.jaxb.model.v3.release.common.DisambiguatedOrganization; +import org.orcid.jaxb.model.v3.release.common.Organization; +import org.orcid.jaxb.model.v3.release.common.OrganizationAddress; +import org.orcid.jaxb.model.v3.release.common.PublicationDate; +import org.orcid.jaxb.model.v3.release.common.Year; +import org.orcid.jaxb.model.v3.release.record.ExternalIDs; +import org.orcid.jaxb.model.v3.release.record.Funding; +import org.orcid.jaxb.model.v3.release.record.FundingTitle; +import org.orcid.jaxb.model.v3.release.record.Work; +import org.orcid.jaxb.model.v3.release.record.WorkTitle; + +/** + * Implementation of {@link OrcidValidator}. + * + * @author Luca Giamminonni (luca.giamminonni at 4science.it) + * + */ +public class OrcidValidatorImpl implements OrcidValidator { + + private final ConfigurationService configurationService; + + public OrcidValidatorImpl(ConfigurationService configurationService) { + this.configurationService = configurationService; + } + + @Override + public List validate(Object object) { + + if (object instanceof Work && isWorkValidationEnabled()) { + return validateWork((Work) object); + } + + if (object instanceof Funding && isFundingValidationEnabled()) { + return validateFunding((Funding) object); + } + + return Collections.emptyList(); + } + + /** + * A work is valid if has title, type, a valid publication date and at least one + * external id. + */ + @Override + public List validateWork(Work work) { + List errors = new ArrayList(); + + WorkTitle title = work.getWorkTitle(); + if (title == null || title.getTitle() == null || isBlank(title.getTitle().getContent())) { + errors.add(TITLE_REQUIRED); + } + + if (work.getWorkType() == null) { + errors.add(TYPE_REQUIRED); + } + + ExternalIDs externalIdentifiers = work.getExternalIdentifiers(); + + if (externalIdentifiers == null || isEmpty(externalIdentifiers.getExternalIdentifier())) { + errors.add(EXTERNAL_ID_REQUIRED); + } + + PublicationDate publicationDate = work.getPublicationDate(); + if (publicationDate != null && isYearNotValid(publicationDate)) { + errors.add(PUBLICATION_DATE_INVALID); + } + + return errors; + } + + /** + * A funding is valid if has title, a valid funder organization and at least one + * external id. If it has an amount, the amount currency is required. + */ + @Override + public List validateFunding(Funding funding) { + + List errors = new ArrayList(); + + FundingTitle title = funding.getTitle(); + if (title == null || title.getTitle() == null || isBlank(title.getTitle().getContent())) { + errors.add(TITLE_REQUIRED); + } + + ExternalIDs externalIdentifiers = funding.getExternalIdentifiers(); + + if (externalIdentifiers == null || isEmpty(externalIdentifiers.getExternalIdentifier())) { + errors.add(EXTERNAL_ID_REQUIRED); + } + + if (funding.getOrganization() == null) { + errors.add(FUNDER_REQUIRED); + } else { + errors.addAll(validate(funding.getOrganization())); + } + + if (funding.getAmount() != null && isBlank(funding.getAmount().getCurrencyCode())) { + errors.add(AMOUNT_CURRENCY_REQUIRED); + } + + return errors; + } + + /** + * The organization is valid if it has a name, a valid address and a valid + * disambiguated-organization complex type. + */ + private List validate(Organization organization) { + List errors = new ArrayList(); + if (isBlank(organization.getName())) { + errors.add(ORGANIZATION_NAME_REQUIRED); + } + + errors.addAll(validate(organization.getAddress())); + errors.addAll(validate(organization.getDisambiguatedOrganization())); + + return errors; + } + + /** + * A disambiguated-organization type is valid if it has an identifier and a + * valid source (the valid values for sources are configured with + * orcid.validation.organization.identifier-sources) + */ + private List validate(DisambiguatedOrganization disambiguatedOrganization) { + + List errors = new ArrayList(); + + + if (disambiguatedOrganization == null) { + errors.add(DISAMBIGUATED_ORGANIZATION_REQUIRED); + return errors; + } + + if (isBlank(disambiguatedOrganization.getDisambiguatedOrganizationIdentifier())) { + errors.add(DISAMBIGUATED_ORGANIZATION_VALUE_REQUIRED); + } + + String disambiguationSource = disambiguatedOrganization.getDisambiguationSource(); + + if (isBlank(disambiguationSource)) { + errors.add(DISAMBIGUATION_SOURCE_REQUIRED); + } else if (isInvalidDisambiguationSource(disambiguationSource)) { + errors.add(DISAMBIGUATION_SOURCE_INVALID); + } + + return errors; + } + + /** + * An organization address is valid if it has a city and a country. + */ + private List validate(OrganizationAddress address) { + List errors = new ArrayList(); + + if (address == null) { + errors.add(ORGANIZATION_ADDRESS_REQUIRED); + return errors; + } + + if (isBlank(address.getCity())) { + errors.add(ORGANIZATION_CITY_REQUIRED); + } + + if (address.getCountry() == null) { + errors.add(ORGANIZATION_COUNTRY_REQUIRED); + } + + return errors; + } + + private boolean isYearNotValid(PublicationDate publicationDate) { + Year year = publicationDate.getYear(); + if (year == null) { + return true; + } + + try { + return Integer.valueOf(year.getValue()) < 1900; + } catch (NumberFormatException ex) { + return true; + } + } + + private boolean isInvalidDisambiguationSource(String disambiguationSource) { + return !contains(getDisambiguedOrganizationSources(), disambiguationSource); + } + + private String[] getDisambiguedOrganizationSources() { + return configurationService.getArrayProperty("orcid.validation.organization.identifier-sources"); + } + + private boolean isWorkValidationEnabled() { + return configurationService.getBooleanProperty("orcid.validation.work.enabled", true); + } + + private boolean isFundingValidationEnabled() { + return configurationService.getBooleanProperty("orcid.validation.funding.enabled", true); + } + +} diff --git a/dspace-api/src/main/java/org/dspace/orcid/script/OrcidBulkPush.java b/dspace-api/src/main/java/org/dspace/orcid/script/OrcidBulkPush.java new file mode 100644 index 0000000000..0e6f856bfc --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/orcid/script/OrcidBulkPush.java @@ -0,0 +1,331 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.orcid.script; + +import static org.apache.commons.lang3.StringUtils.isNotEmpty; +import static org.dspace.profile.OrcidSynchronizationMode.BATCH; +import static org.dspace.profile.OrcidSynchronizationMode.MANUAL; + +import java.sql.SQLException; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.UUID; +import java.util.stream.Collectors; + +import org.apache.commons.cli.ParseException; +import org.apache.commons.lang3.exception.ExceptionUtils; +import org.dspace.content.Item; +import org.dspace.core.Context; +import org.dspace.eperson.EPerson; +import org.dspace.eperson.factory.EPersonServiceFactory; +import org.dspace.orcid.OrcidHistory; +import org.dspace.orcid.OrcidQueue; +import org.dspace.orcid.exception.OrcidValidationException; +import org.dspace.orcid.factory.OrcidServiceFactory; +import org.dspace.orcid.service.OrcidHistoryService; +import org.dspace.orcid.service.OrcidQueueService; +import org.dspace.orcid.service.OrcidSynchronizationService; +import org.dspace.profile.OrcidSynchronizationMode; +import org.dspace.scripts.DSpaceRunnable; +import org.dspace.services.ConfigurationService; +import org.dspace.services.factory.DSpaceServicesFactory; +import org.dspace.utils.DSpace; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** + * Script that perform the bulk synchronization with ORCID registry of all the + * ORCID queue records that has an profileItem that configure the + * synchronization mode equals to BATCH. + * @author Luca Giamminonni (luca.giamminonni at 4science.it) + * + */ +public class OrcidBulkPush extends DSpaceRunnable> { + + private static final Logger LOGGER = LoggerFactory.getLogger(OrcidBulkPush.class); + + private OrcidQueueService orcidQueueService; + + private OrcidHistoryService orcidHistoryService; + + private OrcidSynchronizationService orcidSynchronizationService; + + private ConfigurationService configurationService; + + private Context context; + + /** + * Cache that stores the synchronization mode set for a specific profile item. + */ + private Map synchronizationModeByProfileItem = new HashMap<>(); + + private boolean ignoreMaxAttempts = false; + + @Override + public void setup() throws ParseException { + OrcidServiceFactory orcidServiceFactory = OrcidServiceFactory.getInstance(); + this.orcidQueueService = orcidServiceFactory.getOrcidQueueService(); + this.orcidHistoryService = orcidServiceFactory.getOrcidHistoryService(); + this.orcidSynchronizationService = orcidServiceFactory.getOrcidSynchronizationService(); + this.configurationService = DSpaceServicesFactory.getInstance().getConfigurationService(); + + if (commandLine.hasOption('f')) { + ignoreMaxAttempts = true; + } + + } + + @Override + public void internalRun() throws Exception { + + if (isOrcidSynchronizationDisabled()) { + handler.logWarning("The ORCID synchronization is disabled. The script cannot proceed"); + return; + } + + context = new Context(); + assignCurrentUserInContext(); + + try { + context.turnOffAuthorisationSystem(); + performBulkSynchronization(); + context.complete(); + } catch (Exception e) { + handler.handleException(e); + context.abort(); + } finally { + context.restoreAuthSystemState(); + } + } + + /** + * Find all the Orcid Queue records that need to be synchronized and perfom the + * synchronization. + */ + private void performBulkSynchronization() throws SQLException { + + List queueRecords = findQueueRecordsToSynchronize(); + handler.logInfo("Found " + queueRecords.size() + " queue records to synchronize with ORCID"); + + for (OrcidQueue queueRecord : queueRecords) { + performSynchronization(queueRecord); + } + + } + + /** + * Returns all the stored Orcid Queue records (ignoring or not the max attempts) + * related to a profile that has the synchronization mode set to BATCH. + */ + private List findQueueRecordsToSynchronize() throws SQLException { + return findQueueRecords().stream() + .filter(record -> getProfileItemSynchronizationMode(record.getProfileItem()) == BATCH) + .collect(Collectors.toList()); + } + + /** + * If the current script execution is configued to ignore the max attemps, + * returns all the ORCID Queue records, otherwise returns the ORCID Queue + * records that has an attempts value less than the configured max attempts + * value. + */ + private List findQueueRecords() throws SQLException { + if (ignoreMaxAttempts) { + return orcidQueueService.findAll(context); + } else { + int attempts = configurationService.getIntProperty("orcid.bulk-synchronization.max-attempts"); + return orcidQueueService.findByAttemptsLessThan(context, attempts); + } + } + + /** + * Try to synchronize the given queue record with ORCID, handling any errors. + */ + private void performSynchronization(OrcidQueue queueRecord) { + + try { + + queueRecord = reload(queueRecord); + + handler.logInfo(getOperationInfoMessage(queueRecord)); + + OrcidHistory orcidHistory = orcidHistoryService.synchronizeWithOrcid(context, queueRecord, false); + + handler.logInfo(getSynchronizationResultMessage(orcidHistory)); + + commitTransaction(); + + } catch (OrcidValidationException ex) { + rollbackTransaction(); + handler.logError(getValidationErrorMessage(ex)); + } catch (Exception ex) { + rollbackTransaction(); + String errorMessage = getUnexpectedErrorMessage(ex); + LOGGER.error(errorMessage, ex); + handler.logError(errorMessage); + } finally { + incrementAttempts(queueRecord); + } + + } + + /** + * Returns the Synchronization mode related to the given profile item. + */ + private OrcidSynchronizationMode getProfileItemSynchronizationMode(Item profileItem) { + OrcidSynchronizationMode synchronizationMode = synchronizationModeByProfileItem.get(profileItem); + if (synchronizationMode == null) { + synchronizationMode = orcidSynchronizationService.getSynchronizationMode(profileItem).orElse(MANUAL); + synchronizationModeByProfileItem.put(profileItem, synchronizationMode); + } + return synchronizationMode; + } + + /** + * Returns an info log message with the details of the given record's operation. + * This message is logged before ORCID synchronization. + */ + private String getOperationInfoMessage(OrcidQueue record) { + + UUID profileItemId = record.getProfileItem().getID(); + String putCode = record.getPutCode(); + String type = record.getRecordType(); + + if (record.getOperation() == null) { + return "Synchronization of " + type + " data for profile with ID: " + profileItemId; + } + + switch (record.getOperation()) { + case INSERT: + return "Addition of " + type + " for profile with ID: " + profileItemId; + case UPDATE: + return "Update of " + type + " for profile with ID: " + profileItemId + " by put code " + putCode; + case DELETE: + return "Deletion of " + type + " for profile with ID: " + profileItemId + " by put code " + putCode; + default: + return "Synchronization of " + type + " data for profile with ID: " + profileItemId; + } + + } + + /** + * Returns an info log message with the details of the synchronization result. + * This message is logged after ORCID synchronization. + */ + private String getSynchronizationResultMessage(OrcidHistory orcidHistory) { + + String message = "History record created with status " + orcidHistory.getStatus(); + + switch (orcidHistory.getStatus()) { + case 201: + case 200: + case 204: + message += ". The operation was completed successfully"; + break; + case 400: + message += ". The resource sent to ORCID registry is not valid"; + break; + case 404: + message += ". The resource does not exists anymore on the ORCID registry"; + break; + case 409: + message += ". The resource is already present on the ORCID registry"; + break; + case 500: + message += ". An internal server error on ORCID registry side occurs"; + break; + default: + message += ". Details: " + orcidHistory.getResponseMessage(); + break; + } + + return message; + + } + + private String getValidationErrorMessage(OrcidValidationException ex) { + return ex.getMessage(); + } + + private String getUnexpectedErrorMessage(Exception ex) { + return "An unexpected error occurs during the synchronization: " + getRootMessage(ex); + } + + private void incrementAttempts(OrcidQueue queueRecord) { + queueRecord = reload(queueRecord); + if (queueRecord == null) { + return; + } + + try { + queueRecord.setAttempts(queueRecord.getAttempts() != null ? queueRecord.getAttempts() + 1 : 1); + orcidQueueService.update(context, queueRecord); + commitTransaction(); + } catch (SQLException e) { + throw new RuntimeException(e); + } + + } + + /** + * This method will assign the currentUser to the {@link Context}. The instance + * of the method in this class will fetch the EPersonIdentifier from this class, + * this identifier was given to this class upon instantiation, it'll then be + * used to find the {@link EPerson} associated with it and this {@link EPerson} + * will be set as the currentUser of the created {@link Context} + */ + private void assignCurrentUserInContext() throws SQLException { + UUID uuid = getEpersonIdentifier(); + if (uuid != null) { + EPerson ePerson = EPersonServiceFactory.getInstance().getEPersonService().find(context, uuid); + context.setCurrentUser(ePerson); + } + } + + private OrcidQueue reload(OrcidQueue queueRecord) { + try { + return context.reloadEntity(queueRecord); + } catch (SQLException e) { + throw new RuntimeException(e); + } + } + + private void commitTransaction() { + try { + context.commit(); + } catch (SQLException e) { + throw new RuntimeException(e); + } + } + + private void rollbackTransaction() { + try { + context.rollback(); + } catch (SQLException e) { + throw new RuntimeException(e); + } + } + + private String getRootMessage(Exception ex) { + String message = ExceptionUtils.getRootCauseMessage(ex); + return isNotEmpty(message) ? message.substring(message.indexOf(":") + 1).trim() : "Generic error"; + } + + private boolean isOrcidSynchronizationDisabled() { + return !configurationService.getBooleanProperty("orcid.synchronization-enabled", true); + } + + @Override + @SuppressWarnings("unchecked") + public OrcidBulkPushScriptConfiguration getScriptConfiguration() { + return new DSpace().getServiceManager().getServiceByName("orcid-bulk-push", + OrcidBulkPushScriptConfiguration.class); + } + +} diff --git a/dspace-api/src/main/java/org/dspace/orcid/script/OrcidBulkPushScriptConfiguration.java b/dspace-api/src/main/java/org/dspace/orcid/script/OrcidBulkPushScriptConfiguration.java new file mode 100644 index 0000000000..1a657343c0 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/orcid/script/OrcidBulkPushScriptConfiguration.java @@ -0,0 +1,65 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.orcid.script; + +import java.sql.SQLException; + +import org.apache.commons.cli.Options; +import org.dspace.authorize.service.AuthorizeService; +import org.dspace.core.Context; +import org.dspace.scripts.configuration.ScriptConfiguration; +import org.springframework.beans.factory.annotation.Autowired; + +/** + * Script configuration for {@link OrcidBulkPush}. + * + * @author Luca Giamminonni (luca.giamminonni at 4science.it) + * + * @param the OrcidBulkPush type + */ +public class OrcidBulkPushScriptConfiguration extends ScriptConfiguration { + + @Autowired + private AuthorizeService authorizeService; + + private Class dspaceRunnableClass; + + @Override + public boolean isAllowedToExecute(Context context) { + try { + return authorizeService.isAdmin(context); + } catch (SQLException e) { + throw new RuntimeException("SQLException occurred when checking if the current user is an admin", e); + } + } + + @Override + public Class getDspaceRunnableClass() { + return dspaceRunnableClass; + } + + @Override + public void setDspaceRunnableClass(Class dspaceRunnableClass) { + this.dspaceRunnableClass = dspaceRunnableClass; + } + + @Override + public Options getOptions() { + if (options == null) { + Options options = new Options(); + + options.addOption("f", "force", false, "force the synchronization ignoring maximum attempts"); + options.getOption("f").setType(boolean.class); + options.getOption("f").setRequired(false); + + super.options = options; + } + return options; + } + +} diff --git a/dspace-api/src/main/java/org/dspace/orcid/service/MetadataSignatureGenerator.java b/dspace-api/src/main/java/org/dspace/orcid/service/MetadataSignatureGenerator.java new file mode 100644 index 0000000000..28a270faa7 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/orcid/service/MetadataSignatureGenerator.java @@ -0,0 +1,48 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.orcid.service; + +import java.util.List; + +import org.dspace.content.Item; +import org.dspace.content.MetadataValue; +import org.dspace.core.Context; + +/** + * Interface that mark classes that can be used to generate a signature for + * metadata values. The signature must be a unique identification of a metadata, + * based on the attributes that compose it (such as field, value and authority). + * It is possible to generate a signature for a single metadata value and also + * for a list of values. Given an item, a signature can for example be used to + * check if the associated metadata is present in the item. + * + * @author Luca Giamminonni (luca.giamminonni at 4science.it) + * + */ +public interface MetadataSignatureGenerator { + + /** + * Generate a signature related to the given metadata values. + * + * @param context the DSpace context + * @param metadataValues the metadata values to sign + * @return the generated signature + */ + public String generate(Context context, List metadataValues); + + /** + * Returns the metadata values traceable by the given item related with the + * given signature. + * + * @param context the DSpace context + * @param item the item + * @param signature the metadata signature + * @return the founded metadata + */ + public List findBySignature(Context context, Item item, String signature); +} diff --git a/dspace-api/src/main/java/org/dspace/orcid/service/OrcidEntityFactoryService.java b/dspace-api/src/main/java/org/dspace/orcid/service/OrcidEntityFactoryService.java new file mode 100644 index 0000000000..78f2c1331d --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/orcid/service/OrcidEntityFactoryService.java @@ -0,0 +1,32 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.orcid.service; + +import org.dspace.content.Item; +import org.dspace.core.Context; +import org.orcid.jaxb.model.v3.release.record.Activity; + +/** + * Interface that mark classes that handle the configured instance of + * {@link OrcidEntityFactory}. + * + * @author Luca Giamminonni (luca.giamminonni at 4science.it) + * + */ +public interface OrcidEntityFactoryService { + + /** + * Builds an ORCID Activity object starting from the given item. The actual type + * of Activity constructed depends on the entity type of the input item. + * + * @param context the DSpace context + * @param item the item + * @return the created object + */ + Activity createOrcidObject(Context context, Item item); +} diff --git a/dspace-api/src/main/java/org/dspace/orcid/service/OrcidHistoryService.java b/dspace-api/src/main/java/org/dspace/orcid/service/OrcidHistoryService.java new file mode 100644 index 0000000000..13e1a52b6f --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/orcid/service/OrcidHistoryService.java @@ -0,0 +1,152 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.orcid.service; + +import java.sql.SQLException; +import java.util.List; +import java.util.Map; +import java.util.Optional; + +import org.dspace.content.Item; +import org.dspace.core.Context; +import org.dspace.orcid.OrcidHistory; +import org.dspace.orcid.OrcidQueue; +import org.dspace.orcid.exception.OrcidValidationException; + +/** + * Interface of service to manage OrcidHistory. + * + * @author Mykhaylo Boychuk (mykhaylo.boychuk at 4science.it) + * @author Luca Giamminonni (luca.giamminonni at 4science.it) + */ +public interface OrcidHistoryService { + + /** + * Get an OrcidHistory from the database. + * + * @param context DSpace context object + * @param id ID of the OrcidHistory + * @return the OrcidHistory format, or null if the ID is invalid. + * @throws SQLException if database error + */ + public OrcidHistory find(Context context, int id) throws SQLException; + + /** + * Find all the ORCID history records. + * + * @param context DSpace context object + * @return the ORCID history records + * @throws SQLException if an SQL error occurs + */ + public List findAll(Context context) throws SQLException; + + /** + * Get the OrcidHistory records where the given item is the profile item OR the + * entity + * + * @param context DSpace context object + * @param item the item to search for + * @return the found OrcidHistory entities + * @throws SQLException if database error + */ + public List findByProfileItemOrEntity(Context context, Item item) throws SQLException; + + /** + * Find the OrcidHistory records related to the given entity item. + * + * @param context DSpace context object + * @param entity the entity item + * @return the found put codes + * @throws SQLException if database error + */ + public List findByEntity(Context context, Item entity) throws SQLException; + + /** + * Create a new OrcidHistory records related to the given profileItem and entity + * items. + * + * @param context DSpace context object + * @param profileItem the profileItem item + * @param entity the entity item + * @return the created orcid history record + * @throws SQLException if database error + */ + public OrcidHistory create(Context context, Item profileItem, Item entity) throws SQLException; + + /** + * Delete an OrcidHistory + * + * @param context context + * @param orcidHistory the OrcidHistory entity to delete + * @throws SQLException if database error + */ + public void delete(Context context, OrcidHistory orcidHistory) throws SQLException; + + /** + * Update the OrcidHistory + * + * @param context context + * @param orcidHistory the OrcidHistory entity to update + * @throws SQLException if database error + */ + public void update(Context context, OrcidHistory orcidHistory) throws SQLException; + + /** + * Find the last put code related to the given profileItem and entity item. + * + * @param context DSpace context object + * @param profileItem the profileItem item + * @param entity the entity item + * @return the found put code, if any + * @throws SQLException if database error + */ + public Optional findLastPutCode(Context context, Item profileItem, Item entity) throws SQLException; + + /** + * Find all the last put code related to the entity item each associated with + * the profileItem to which it refers. + * + * @param context DSpace context object + * @param entity the entity item + * @return a map that relates the profileItems with the identified + * putCode + * @throws SQLException if database error + */ + public Map findLastPutCodes(Context context, Item entity) throws SQLException; + + /** + * Find all the successfully Orcid history records with the given record type + * related to the given entity. An history record is considered successful if + * the status is between 200 and 300. + * + * @param context DSpace context object + * @param entity the entity item + * @param recordType the record type + * @return the found orcid history records + * @throws SQLException if database error + */ + List findSuccessfullyRecordsByEntityAndType(Context context, Item entity, String recordType) + throws SQLException; + + /** + * Synchronize the entity related to the given orcidQueue record with ORCID. + * + * @param context DSpace context object + * @param orcidQueue the orcid queue record that has the + * references of the data to be synchronized + * @param forceAddition to force the insert on the ORCID registry + * @return the created orcid history record with the + * synchronization result + * @throws SQLException if database error + * @throws OrcidValidationException if the data to synchronize with ORCID is not + * valid + */ + public OrcidHistory synchronizeWithOrcid(Context context, OrcidQueue orcidQueue, boolean forceAddition) + throws SQLException, OrcidValidationException; + +} diff --git a/dspace-api/src/main/java/org/dspace/orcid/service/OrcidProfileSectionFactoryService.java b/dspace-api/src/main/java/org/dspace/orcid/service/OrcidProfileSectionFactoryService.java new file mode 100644 index 0000000000..603d33ddf5 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/orcid/service/OrcidProfileSectionFactoryService.java @@ -0,0 +1,55 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.orcid.service; + +import java.util.List; +import java.util.Optional; + +import org.dspace.content.MetadataValue; +import org.dspace.core.Context; +import org.dspace.orcid.model.OrcidProfileSectionType; +import org.dspace.orcid.model.factory.OrcidProfileSectionFactory; +import org.dspace.profile.OrcidProfileSyncPreference; + +/** + * Interface that mark classes that handle the configured instance of + * {@link OrcidProfileSectionFactory}. + * + * @author Luca Giamminonni (luca.giamminonni at 4science.it) + * + */ +public interface OrcidProfileSectionFactoryService { + + /** + * Returns the profile section factory of the given type. + * + * @param type the type of the section configurations to retrieve + * @return the section configurations of the given type + */ + Optional findBySectionType(OrcidProfileSectionType type); + + /** + * Returns all the profile section configurations relative to the given + * preferences. + * + * @param preferences the preferences to search for + * @return the section configurations + */ + List findByPreferences(List preferences); + + /** + * Builds an ORCID object starting from the given metadata values compliance to + * the given profile section type. + * + * @param context the DSpace context + * @param metadataValues the metadata values + * @param type the profile section type + * @return the created object + */ + Object createOrcidObject(Context context, List metadataValues, OrcidProfileSectionType type); +} diff --git a/dspace-api/src/main/java/org/dspace/orcid/service/OrcidQueueService.java b/dspace-api/src/main/java/org/dspace/orcid/service/OrcidQueueService.java new file mode 100644 index 0000000000..8de25e9caf --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/orcid/service/OrcidQueueService.java @@ -0,0 +1,260 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.orcid.service; + +import java.sql.SQLException; +import java.util.List; +import java.util.UUID; + +import org.dspace.authorize.AuthorizeException; +import org.dspace.content.Item; +import org.dspace.core.Context; +import org.dspace.orcid.OrcidQueue; +import org.dspace.orcid.model.OrcidEntityType; +import org.dspace.profile.OrcidEntitySyncPreference; + +/** + * Service that handles ORCID queue records. + * + * @author Luca Giamminonni (luca.giamminonni at 4science.it) + * + */ +public interface OrcidQueueService { + + /** + * Create an OrcidQueue record with the given profileItem and entity. The type + * of operation is calculated based on whether or not the given entity was + * already pushed to the ORCID registry. + * + * @param context DSpace context object + * @param profileItem the profileItem item + * @param entity the entity item + * @return the stored record + * @throws SQLException if an SQL error occurs + */ + public OrcidQueue create(Context context, Item profileItem, Item entity) throws SQLException; + + /** + * Create an OrcidQueue record with the given profileItem and entity to push new + * data to ORCID. + * + * @param context DSpace context object + * @param profileItem the profileItem item + * @param entity the entity item + * @return the stored record + * @throws SQLException if an SQL error occurs + */ + public OrcidQueue createEntityInsertionRecord(Context context, Item profileItem, Item entity) throws SQLException; + + /** + * Create an OrcidQueue record with the given profileItem to update a record on + * ORCID with the given putCode. + * + * @param context DSpace context object + * @param profileItem the profileItem item + * @param entity the entity item + * @param putCode the putCode related to the given entity item + * @return the stored record + * @throws SQLException if an SQL error occurs + */ + public OrcidQueue createEntityUpdateRecord(Context context, Item profileItem, Item entity, String putCode) + throws SQLException; + + /** + * Create an OrcidQueue record with the given profileItem to delete a record on + * ORCID related to the given entity type with the given putCode. + * + * @param context DSpace context object + * @param profileItem the profileItem item + * @param description the orcid queue record description + * @param type the type of the entity item + * @param putCode the putCode related to the given entity item + * @return the stored record + * @throws SQLException if an SQL error occurs + */ + OrcidQueue createEntityDeletionRecord(Context context, Item profileItem, String description, String type, + String putCode) + throws SQLException; + + /** + * Create an OrcidQueue record with the profile to add data to ORCID. + * + * @param context DSpace context object + * @param profile the profile item + * @param description the record description + * @param recordType the record type + * @param metadata the metadata signature + * @return the stored record + * @throws SQLException if an SQL error occurs + */ + OrcidQueue createProfileInsertionRecord(Context context, Item profile, String description, String recordType, + String metadata) throws SQLException; + + /** + * Create an OrcidQueue record with the profile to remove data from ORCID. + * + * @param context DSpace context object + * @param profile the profile item + * @param description the record description + * @param recordType the record type + * @param putCode the putCode + * @return the stored record + * @throws SQLException if an SQL error occurs + */ + OrcidQueue createProfileDeletionRecord(Context context, Item profile, String description, String recordType, + String metadata, String putCode) throws SQLException; + + /** + * Find all the ORCID queue records. + * + * @param context DSpace context object + * @return the ORCID queue records + * @throws SQLException if an SQL error occurs + */ + public List findAll(Context context) throws SQLException; + + /** + * Get the orcid queue records by the profileItem id. + * + * @param context DSpace context object + * @param profileItemId the profileItem item id + * @return the orcid queue records + * @throws SQLException if an SQL error occurs + */ + public List findByProfileItemId(Context context, UUID profileItemId) throws SQLException; + + /** + * Get the orcid queue records by the profileItem id. + * + * @param context DSpace context object + * @param profileItemId the profileItem item id + * @param limit limit + * @param offset offset + * @return the orcid queue records + * @throws SQLException if an SQL error occurs + */ + public List findByProfileItemId(Context context, UUID profileItemId, Integer limit, Integer offset) + throws SQLException; + + /** + * Get the orcid queue records by the profileItem and entity. + * + * @param context DSpace context object + * @param profileItem the profileItem item + * @param entity the entity item + * @return the found OrcidQueue records + * @throws SQLException if an SQL error occurs + */ + public List findByProfileItemAndEntity(Context context, Item profileItem, Item entity) + throws SQLException; + + /** + * Get the OrcidQueue records where the given item is the profileItem OR the + * entity + * + * @param context DSpace context object + * @param item the item to search for + * @return the found OrcidQueue records + * @throws SQLException if database error + */ + public List findByProfileItemOrEntity(Context context, Item item) throws SQLException; + + /** + * Get all the OrcidQueue records with attempts less than the given attempts. + * + * @param context DSpace context object + * @param attempts the maximum value of attempts + * @return the found OrcidQueue records + * @throws SQLException if database error + */ + public List findByAttemptsLessThan(Context context, int attempts) throws SQLException; + + /** + * Returns the number of records on the OrcidQueue associated with the given + * profileItemId. + * + * @param context DSpace context object + * @param profileItemId the profileItem item id + * @return the record's count + * @throws SQLException if an SQL error occurs + */ + long countByProfileItemId(Context context, UUID profileItemId) throws SQLException; + + /** + * Delete the OrcidQueue record with the given id. + * + * @param context DSpace context object + * @param id the id of the record to be deleted + * @throws SQLException if an SQL error occurs + */ + public void deleteById(Context context, Integer id) throws SQLException; + + /** + * Delete an OrcidQueue + * + * @param context DSpace context object + * @param orcidQueue the orcidQueue record to delete + * @throws SQLException if database error + * @throws AuthorizeException if authorization error + */ + public void delete(Context context, OrcidQueue orcidQueue) throws SQLException; + + /** + * Delete all the OrcidQueue records with the given entity and record type. + * + * @param context DSpace context object + * @param entity the entity item + * @param recordType the record type + * @throws SQLException if database error occurs + */ + public void deleteByEntityAndRecordType(Context context, Item entity, String recordType) throws SQLException; + + /** + * Delete all the OrcidQueue records with the given profileItem and record type. + * + * @param context DSpace context object + * @param profileItem the profileItem item + * @param recordType the record type + * @throws SQLException if database error occurs + */ + public void deleteByProfileItemAndRecordType(Context context, Item profileItem, String recordType) + throws SQLException; + + /** + * Get an OrcidQueue from the database. + * + * @param context DSpace context object + * @param id ID of the OrcidQueue + * @return the OrcidQueue format, or null if the ID is invalid. + * @throws SQLException if database error + */ + public OrcidQueue find(Context context, int id) throws SQLException; + + /** + * Update the OrcidQueue + * + * @param context context + * @param orcidQueue the OrcidQueue to update + * @throws SQLException if database error + */ + public void update(Context context, OrcidQueue orcidQueue) throws SQLException; + + /** + * Recalculates the ORCID queue records linked to the given profileItem as + * regards the entities of the given type. The recalculation is done based on + * the preference indicated. + * + * @param context context + * @param profileItem the profileItem + * @param entityType the entity type related to the records to recalculate + * @param preference the preference value on which to base the recalculation + * @throws SQLException if database error + */ + public void recalculateOrcidQueue(Context context, Item profileItem, OrcidEntityType entityType, + OrcidEntitySyncPreference preference) throws SQLException; +} diff --git a/dspace-api/src/main/java/org/dspace/orcid/service/OrcidSynchronizationService.java b/dspace-api/src/main/java/org/dspace/orcid/service/OrcidSynchronizationService.java new file mode 100644 index 0000000000..575ce6811b --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/orcid/service/OrcidSynchronizationService.java @@ -0,0 +1,167 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.orcid.service; + +import java.sql.SQLException; +import java.util.List; +import java.util.Optional; + +import org.dspace.content.Item; +import org.dspace.core.Context; +import org.dspace.orcid.model.OrcidEntityType; +import org.dspace.orcid.model.OrcidTokenResponseDTO; +import org.dspace.profile.OrcidEntitySyncPreference; +import org.dspace.profile.OrcidProfileDisconnectionMode; +import org.dspace.profile.OrcidProfileSyncPreference; +import org.dspace.profile.OrcidSynchronizationMode; + +/** + * Service that handle the the syncronization between a DSpace profile and the + * relative ORCID profile, if any. + * + * @author Luca Giamminonni (luca.giamminonni at 4science.it) + */ +public interface OrcidSynchronizationService { + + /** + * Check if the given item is linked to an ORCID profile. + * + * @param context the relevant DSpace Context. + * @param item the item to check + * @return true if the given item is linked to ORCID + */ + boolean isLinkedToOrcid(Context context, Item item); + + /** + * Configure the given profile with the data present in the given ORCID token. + * This action is required to synchronize profile and related entities with + * ORCID. No security check is done, it is therefore the caller's responsibility + * to verify for example that the current user has permission to connect the + * profile to ORCID (if necessary). + * + * @param context the relevant DSpace Context. + * @param profile the profile to configure + * @param token the ORCID token + * @throws SQLException if a SQL error occurs during the profile update + */ + public void linkProfile(Context context, Item profile, OrcidTokenResponseDTO token) throws SQLException; + + /** + * Disconnect the given profile from ORCID. + * + * @param context the relevant DSpace Context. + * @param profile the profile to disconnect + * @throws SQLException if a SQL error occurs during the profile update + */ + public void unlinkProfile(Context context, Item profile) throws SQLException; + + /** + * Set the synchronization preference for the given profile related to the given + * ORCID entity type. + * + * @param context the relevant DSpace Context. + * @param profile the researcher profile to update + * @param entityType the orcid entity type + * @param value the new synchronization preference value + * @return true if the value has actually been updated, + * false if the value to be set is the same as + * the one already configured + * @throws SQLException if a SQL error occurs during the profile + * update + * @throws IllegalArgumentException if the given researcher profile is no linked + * with an ORCID account + */ + public boolean setEntityPreference(Context context, Item profile, OrcidEntityType entityType, + OrcidEntitySyncPreference value) throws SQLException; + + /** + * Update the profile's synchronization preference for the given profile. + * + * @param context the relevant DSpace Context. + * @param profile the researcher profile to update + * @param values the new synchronization preference values + * @return true if the value has actually been updated, + * false if the value to be set is the same as + * the one already configured + * @throws SQLException if a SQL error occurs during the profile + * update + * @throws IllegalArgumentException if the given researcher profile is no linked + * with an ORCID account + */ + public boolean setProfilePreference(Context context, Item profile, + List values) throws SQLException; + + /** + * Set the ORCID synchronization mode for the given profile. + * + * @param context the relevant DSpace Context. + * @param profile the researcher profile to update + * @param value the new synchronization mode value + * @return true if the value has actually been updated, false if + * the value to be set is the same as the one already + * configured + * @throws SQLException if a SQL error occurs during the profile update + */ + public boolean setSynchronizationMode(Context context, Item profile, OrcidSynchronizationMode value) + throws SQLException; + + /** + * Check if the given researcher profile item is configured to synchronize the + * given item with ORCID. + * + * @param profile the researcher profile item + * @param item the entity type to check + * @return true if the given entity type can be synchronize with ORCID, + * false otherwise + */ + public boolean isSynchronizationAllowed(Item profile, Item item); + + /** + * Returns the ORCID synchronization mode configured for the given profile item. + * + * @param profile the researcher profile item + * @return the synchronization mode + */ + Optional getSynchronizationMode(Item profile); + + /** + * Returns the ORCID synchronization preference related to the given entity type + * configured for the given profile item. + * + * @param profile the researcher profile item + * @param entityType the orcid entity type + * @return the configured preference + */ + Optional getEntityPreference(Item profile, OrcidEntityType entityType); + + /** + * Returns the ORCID synchronization preferences related to the profile itself + * configured for the given profile item. + * + * @param profile the researcher profile item + * @return the synchronization mode + */ + List getProfilePreferences(Item profile); + + /** + * Returns the configuration ORCID profile's disconnection mode. If that mode is + * not configured or the configuration is wrong, the value DISABLED is returned. + * + * @return the disconnection mode + */ + OrcidProfileDisconnectionMode getDisconnectionMode(); + + /** + * Returns all the profiles with the given orcid id. + * + * @param context the relevant DSpace Context. + * @param orcid the orcid id to search for + * @return the found profile items + */ + List findProfilesByOrcid(Context context, String orcid); +} diff --git a/dspace-api/src/main/java/org/dspace/orcid/service/OrcidTokenService.java b/dspace-api/src/main/java/org/dspace/orcid/service/OrcidTokenService.java new file mode 100644 index 0000000000..ead9682971 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/orcid/service/OrcidTokenService.java @@ -0,0 +1,92 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.orcid.service; + +import org.dspace.content.Item; +import org.dspace.core.Context; +import org.dspace.eperson.EPerson; +import org.dspace.orcid.OrcidToken; + +/** + * Service that handle {@link OrcidToken} entities. + * + * @author Luca Giamminonni (luca.giamminonni at 4science.it) + * + */ +public interface OrcidTokenService { + + /** + * Creates a new OrcidToken entity for the given ePerson and accessToken. + * + * @param context the DSpace context + * @param ePerson the EPerson + * @param accessToken the access token + * @return the created entity instance + */ + public OrcidToken create(Context context, EPerson ePerson, String accessToken); + + /** + * Creates a new OrcidToken entity for the given ePerson and accessToken. + * + * @param context the DSpace context + * @param ePerson the EPerson + * @param profileItem the profile item + * @param accessToken the access token + * @return the created entity instance + */ + public OrcidToken create(Context context, EPerson ePerson, Item profileItem, String accessToken); + + /** + * Find an OrcidToken by ePerson. + * + * @param context the DSpace context + * @param ePerson the ePerson to search for + * @return the Orcid token, if any + */ + public OrcidToken findByEPerson(Context context, EPerson ePerson); + + /** + * Find an OrcidToken by profileItem. + * + * @param context the DSpace context + * @param profileItem the profile item to search for + * @return the Orcid token, if any + */ + public OrcidToken findByProfileItem(Context context, Item profileItem); + + /** + * Delete the given ORCID token entity. + * + * @param context the DSpace context + * @param orcidToken the ORCID token entity to delete + */ + public void delete(Context context, OrcidToken orcidToken); + + /** + * Delete all the ORCID token entities. + * + * @param context the DSpace context + */ + public void deleteAll(Context context); + + /** + * Deletes the ORCID token entity related to the given EPerson. + * + * @param context the DSpace context + * @param ePerson the ePerson for the deletion + */ + public void deleteByEPerson(Context context, EPerson ePerson); + + /** + * Deletes the ORCID token entity related to the given profile item. + * + * @param context the DSpace context + * @param profileItem the item for the deletion + */ + public void deleteByProfileItem(Context context, Item profileItem); +} diff --git a/dspace-api/src/main/java/org/dspace/orcid/service/impl/OrcidEntityFactoryServiceImpl.java b/dspace-api/src/main/java/org/dspace/orcid/service/impl/OrcidEntityFactoryServiceImpl.java new file mode 100644 index 0000000000..c02185b430 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/orcid/service/impl/OrcidEntityFactoryServiceImpl.java @@ -0,0 +1,62 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.orcid.service.impl; + +import static java.util.stream.Collectors.toMap; + +import java.util.List; +import java.util.Map; +import java.util.Optional; +import java.util.function.Function; + +import org.dspace.content.Item; +import org.dspace.content.service.ItemService; +import org.dspace.core.Context; +import org.dspace.orcid.model.OrcidEntityType; +import org.dspace.orcid.model.factory.OrcidEntityFactory; +import org.dspace.orcid.service.OrcidEntityFactoryService; +import org.orcid.jaxb.model.v3.release.record.Activity; + +/** + * Implementation of {@link OrcidEntityFactoryService}. + * + * @author Luca Giamminonni (luca.giamminonni at 4science.it) + * + */ +public class OrcidEntityFactoryServiceImpl implements OrcidEntityFactoryService { + + /** + * Message of the exception thrown if the given item is not a valid entity for + * ORCID (defined with the entityFactories map). + */ + private final String INVALID_ENTITY_MSG = "The item with id %s is not a configured Orcid entity"; + + private final Map entityFactories; + + private final ItemService itemService; + + private OrcidEntityFactoryServiceImpl(List entityFactories, ItemService itemService) { + this.itemService = itemService; + this.entityFactories = entityFactories.stream() + .collect(toMap(OrcidEntityFactory::getEntityType, Function.identity())); + } + + @Override + public Activity createOrcidObject(Context context, Item item) { + OrcidEntityFactory factory = getOrcidEntityType(item) + .map(entityType -> entityFactories.get(entityType)) + .orElseThrow(() -> new IllegalArgumentException(String.format(INVALID_ENTITY_MSG, item.getID()))); + + return factory.createOrcidObject(context, item); + } + + private Optional getOrcidEntityType(Item item) { + return Optional.ofNullable(OrcidEntityType.fromEntityType(itemService.getEntityTypeLabel(item))); + } + +} diff --git a/dspace-api/src/main/java/org/dspace/orcid/service/impl/OrcidHistoryServiceImpl.java b/dspace-api/src/main/java/org/dspace/orcid/service/impl/OrcidHistoryServiceImpl.java new file mode 100644 index 0000000000..0bec9a12e0 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/orcid/service/impl/OrcidHistoryServiceImpl.java @@ -0,0 +1,360 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.orcid.service.impl; + +import static java.lang.String.format; +import static java.util.Comparator.comparing; +import static java.util.Comparator.naturalOrder; +import static java.util.Comparator.nullsFirst; +import static java.util.Optional.ofNullable; +import static org.apache.commons.lang3.StringUtils.isNotBlank; +import static org.apache.commons.lang3.math.NumberUtils.isCreatable; + +import java.sql.SQLException; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Optional; + +import org.apache.commons.collections.CollectionUtils; +import org.apache.commons.lang3.StringUtils; +import org.apache.http.HttpStatus; +import org.dspace.content.Item; +import org.dspace.content.MetadataFieldName; +import org.dspace.content.MetadataValue; +import org.dspace.content.service.ItemService; +import org.dspace.core.Context; +import org.dspace.orcid.OrcidHistory; +import org.dspace.orcid.OrcidOperation; +import org.dspace.orcid.OrcidQueue; +import org.dspace.orcid.client.OrcidClient; +import org.dspace.orcid.client.OrcidResponse; +import org.dspace.orcid.dao.OrcidHistoryDAO; +import org.dspace.orcid.dao.OrcidQueueDAO; +import org.dspace.orcid.exception.OrcidClientException; +import org.dspace.orcid.exception.OrcidValidationException; +import org.dspace.orcid.model.OrcidEntityType; +import org.dspace.orcid.model.OrcidProfileSectionType; +import org.dspace.orcid.model.validator.OrcidValidationError; +import org.dspace.orcid.model.validator.OrcidValidator; +import org.dspace.orcid.service.MetadataSignatureGenerator; +import org.dspace.orcid.service.OrcidEntityFactoryService; +import org.dspace.orcid.service.OrcidHistoryService; +import org.dspace.orcid.service.OrcidProfileSectionFactoryService; +import org.dspace.orcid.service.OrcidTokenService; +import org.orcid.jaxb.model.v3.release.record.Activity; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.beans.factory.annotation.Autowired; + +/** + * Implementation of {@link OrcidHistoryService}. + * + * @author Mykhaylo Boychuk (mykhaylo.boychuk at 4science.it) + * @author Luca Giamminonni (luca.giamminonni at 4science.it) + * + */ +public class OrcidHistoryServiceImpl implements OrcidHistoryService { + + private static final Logger LOGGER = LoggerFactory.getLogger(OrcidHistoryServiceImpl.class); + + @Autowired + private OrcidHistoryDAO orcidHistoryDAO; + + @Autowired + private OrcidQueueDAO orcidQueueDAO; + + @Autowired + private ItemService itemService; + + @Autowired + private OrcidProfileSectionFactoryService profileFactoryService; + + @Autowired + private OrcidEntityFactoryService activityFactoryService; + + @Autowired + private MetadataSignatureGenerator metadataSignatureGenerator; + + @Autowired + private OrcidClient orcidClient; + + @Autowired + private OrcidValidator orcidValidator; + + @Autowired + private OrcidTokenService orcidTokenService; + + @Override + public OrcidHistory find(Context context, int id) throws SQLException { + return orcidHistoryDAO.findByID(context, OrcidHistory.class, id); + } + + @Override + public List findAll(Context context) throws SQLException { + return orcidHistoryDAO.findAll(context, OrcidHistory.class); + } + + @Override + public List findByProfileItemOrEntity(Context context, Item profileItem) throws SQLException { + return orcidHistoryDAO.findByProfileItemOrEntity(context, profileItem); + } + + @Override + public OrcidHistory create(Context context, Item profileItem, Item entity) throws SQLException { + OrcidHistory orcidHistory = new OrcidHistory(); + orcidHistory.setEntity(entity); + orcidHistory.setProfileItem(profileItem); + return orcidHistoryDAO.create(context, orcidHistory); + } + + @Override + public void delete(Context context, OrcidHistory orcidHistory) throws SQLException { + orcidHistoryDAO.delete(context, orcidHistory); + } + + @Override + public void update(Context context, OrcidHistory orcidHistory) throws SQLException { + if (orcidHistory != null) { + orcidHistoryDAO.save(context, orcidHistory); + } + } + + @Override + public Optional findLastPutCode(Context context, Item profileItem, Item entity) throws SQLException { + List records = orcidHistoryDAO.findByProfileItemAndEntity(context, profileItem.getID(), + entity.getID()); + return findLastPutCode(records, profileItem); + } + + @Override + public Map findLastPutCodes(Context context, Item entity) throws SQLException { + Map profileItemAndPutCodeMap = new HashMap(); + + List orcidHistoryRecords = findByEntity(context, entity); + for (OrcidHistory orcidHistoryRecord : orcidHistoryRecords) { + Item profileItem = orcidHistoryRecord.getProfileItem(); + if (profileItemAndPutCodeMap.containsKey(profileItem)) { + continue; + } + + findLastPutCode(orcidHistoryRecords, profileItem) + .ifPresent(putCode -> profileItemAndPutCodeMap.put(profileItem, putCode)); + } + + return profileItemAndPutCodeMap; + } + + @Override + public List findByEntity(Context context, Item entity) throws SQLException { + return orcidHistoryDAO.findByEntity(context, entity); + } + + @Override + public List findSuccessfullyRecordsByEntityAndType(Context context, + Item entity, String recordType) throws SQLException { + return orcidHistoryDAO.findSuccessfullyRecordsByEntityAndType(context, entity, recordType); + } + + @Override + public OrcidHistory synchronizeWithOrcid(Context context, OrcidQueue orcidQueue, boolean forceAddition) + throws SQLException { + + Item profileItem = orcidQueue.getProfileItem(); + + String orcid = getMetadataValue(profileItem, "person.identifier.orcid") + .orElseThrow(() -> new IllegalArgumentException( + format("The related profileItem item (id = %s) does not have an orcid", profileItem.getID()))); + + String token = getAccessToken(context, profileItem) + .orElseThrow(() -> new IllegalArgumentException( + format("The related profileItem item (id = %s) does not have an access token", profileItem.getID()))); + + OrcidOperation operation = calculateOperation(orcidQueue, forceAddition); + + try { + + OrcidResponse response = synchronizeWithOrcid(context, orcidQueue, orcid, token, operation); + OrcidHistory orcidHistory = createHistoryRecordFromOrcidResponse(context, orcidQueue, operation, response); + orcidQueueDAO.delete(context, orcidQueue); + return orcidHistory; + + } catch (OrcidValidationException ex) { + throw ex; + } catch (OrcidClientException ex) { + LOGGER.error("An error occurs during the orcid synchronization of ORCID queue " + orcidQueue, ex); + return createHistoryRecordFromOrcidError(context, orcidQueue, operation, ex); + } catch (RuntimeException ex) { + LOGGER.warn("An unexpected error occurs during the orcid synchronization of ORCID queue " + orcidQueue, ex); + return createHistoryRecordFromGenericError(context, orcidQueue, operation, ex); + } + + } + + private OrcidResponse synchronizeWithOrcid(Context context, OrcidQueue orcidQueue, String orcid, String token, + OrcidOperation operation) throws SQLException { + if (isProfileSectionType(orcidQueue)) { + return synchronizeProfileDataWithOrcid(context, orcidQueue, orcid, token, operation); + } else if (isEntityType(orcidQueue)) { + return synchronizeEntityWithOrcid(context, orcidQueue, orcid, token, operation); + } else { + throw new IllegalArgumentException("The type of the given queue record could not be determined"); + } + } + + private OrcidOperation calculateOperation(OrcidQueue orcidQueue, boolean forceAddition) { + OrcidOperation operation = orcidQueue.getOperation(); + if (operation == null) { + throw new IllegalArgumentException("The orcid queue record with id " + orcidQueue.getID() + + " has no operation defined"); + } + return operation != OrcidOperation.DELETE && forceAddition ? OrcidOperation.INSERT : operation; + } + + private OrcidResponse synchronizeEntityWithOrcid(Context context, OrcidQueue orcidQueue, + String orcid, String token, OrcidOperation operation) throws SQLException { + if (operation == OrcidOperation.DELETE) { + return deleteEntityOnOrcid(context, orcid, token, orcidQueue); + } else { + return sendEntityToOrcid(context, orcid, token, orcidQueue, operation == OrcidOperation.UPDATE); + } + } + + private OrcidResponse synchronizeProfileDataWithOrcid(Context context, OrcidQueue orcidQueue, + String orcid, String token, OrcidOperation operation) throws SQLException { + + if (operation == OrcidOperation.INSERT) { + return sendProfileDataToOrcid(context, orcid, token, orcidQueue); + } else { + return deleteProfileDataOnOrcid(context, orcid, token, orcidQueue); + } + + } + + private OrcidResponse sendEntityToOrcid(Context context, String orcid, String token, OrcidQueue orcidQueue, + boolean toUpdate) { + + Activity activity = activityFactoryService.createOrcidObject(context, orcidQueue.getEntity()); + + List validationErrors = orcidValidator.validate(activity); + if (CollectionUtils.isNotEmpty(validationErrors)) { + throw new OrcidValidationException(validationErrors); + } + + if (toUpdate) { + activity.setPutCode(getPutCode(orcidQueue)); + return orcidClient.update(token, orcid, activity, orcidQueue.getPutCode()); + } else { + return orcidClient.push(token, orcid, activity); + } + + } + + private OrcidResponse sendProfileDataToOrcid(Context context, String orcid, String token, OrcidQueue orcidQueue) { + + OrcidProfileSectionType recordType = OrcidProfileSectionType.fromString(orcidQueue.getRecordType()); + String signature = orcidQueue.getMetadata(); + Item person = orcidQueue.getEntity(); + + List metadataValues = metadataSignatureGenerator.findBySignature(context, person, signature); + Object orcidObject = profileFactoryService.createOrcidObject(context, metadataValues, recordType); + + List validationErrors = orcidValidator.validate(orcidObject); + if (CollectionUtils.isNotEmpty(validationErrors)) { + throw new OrcidValidationException(validationErrors); + } + + return orcidClient.push(token, orcid, orcidObject); + } + + private OrcidResponse deleteProfileDataOnOrcid(Context context, String orcid, String token, OrcidQueue orcidQueue) { + OrcidProfileSectionType recordType = OrcidProfileSectionType.fromString(orcidQueue.getRecordType()); + return orcidClient.deleteByPutCode(token, orcid, orcidQueue.getPutCode(), recordType.getPath()); + } + + private OrcidResponse deleteEntityOnOrcid(Context context, String orcid, String token, OrcidQueue orcidQueue) { + OrcidEntityType recordType = OrcidEntityType.fromEntityType(orcidQueue.getRecordType()); + return orcidClient.deleteByPutCode(token, orcid, orcidQueue.getPutCode(), recordType.getPath()); + } + + private OrcidHistory createHistoryRecordFromGenericError(Context context, OrcidQueue orcidQueue, + OrcidOperation operation, RuntimeException ex) throws SQLException { + return create(context, orcidQueue, ex.getMessage(), operation, 500, null); + } + + private OrcidHistory createHistoryRecordFromOrcidError(Context context, OrcidQueue orcidQueue, + OrcidOperation operation, OrcidClientException ex) throws SQLException { + return create(context, orcidQueue, ex.getMessage(), operation, ex.getStatus(), null); + } + + private OrcidHistory createHistoryRecordFromOrcidResponse(Context context, OrcidQueue orcidQueue, + OrcidOperation operation, OrcidResponse orcidResponse) throws SQLException { + + int status = orcidResponse.getStatus(); + if (operation == OrcidOperation.DELETE && orcidResponse.isNotFoundStatus()) { + status = HttpStatus.SC_NO_CONTENT; + } + + return create(context, orcidQueue, orcidResponse.getContent(), operation, status, orcidResponse.getPutCode()); + } + + private OrcidHistory create(Context context, OrcidQueue orcidQueue, String responseMessage, + OrcidOperation operation, int status, String putCode) throws SQLException { + OrcidHistory history = new OrcidHistory(); + history.setEntity(orcidQueue.getEntity()); + history.setProfileItem(orcidQueue.getProfileItem()); + history.setResponseMessage(responseMessage); + history.setStatus(status); + history.setPutCode(putCode); + history.setRecordType(orcidQueue.getRecordType()); + history.setMetadata(orcidQueue.getMetadata()); + history.setOperation(operation); + history.setDescription(orcidQueue.getDescription()); + return orcidHistoryDAO.create(context, history); + } + + private Optional getMetadataValue(Item item, String metadataField) { + return ofNullable(itemService.getMetadataFirstValue(item, new MetadataFieldName(metadataField), Item.ANY)) + .filter(StringUtils::isNotBlank); + } + + private Optional getAccessToken(Context context, Item item) { + return ofNullable(orcidTokenService.findByProfileItem(context, item)) + .map(orcidToken -> orcidToken.getAccessToken()); + } + + private boolean isProfileSectionType(OrcidQueue orcidQueue) { + return OrcidProfileSectionType.isValid(orcidQueue.getRecordType()); + } + + private boolean isEntityType(OrcidQueue orcidQueue) { + return OrcidEntityType.isValidEntityType(orcidQueue.getRecordType()); + } + + private Optional findLastPutCode(List orcidHistoryRecords, Item profileItem) { + return orcidHistoryRecords.stream() + .filter(orcidHistoryRecord -> profileItem.equals(orcidHistoryRecord.getProfileItem())) + .sorted(comparing(OrcidHistory::getTimestamp, nullsFirst(naturalOrder())).reversed()) + .map(history -> history.getPutCode()) + .filter(putCode -> isNotBlank(putCode)) + .findFirst(); + } + + private Long getPutCode(OrcidQueue orcidQueue) { + return isCreatable(orcidQueue.getPutCode()) ? Long.valueOf(orcidQueue.getPutCode()) : null; + } + + public OrcidClient getOrcidClient() { + return orcidClient; + } + + public void setOrcidClient(OrcidClient orcidClient) { + this.orcidClient = orcidClient; + } + +} diff --git a/dspace-api/src/main/java/org/dspace/orcid/service/impl/OrcidProfileSectionFactoryServiceImpl.java b/dspace-api/src/main/java/org/dspace/orcid/service/impl/OrcidProfileSectionFactoryServiceImpl.java new file mode 100644 index 0000000000..fad5a6657d --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/orcid/service/impl/OrcidProfileSectionFactoryServiceImpl.java @@ -0,0 +1,61 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.orcid.service.impl; + +import static java.util.stream.Collectors.toMap; + +import java.util.List; +import java.util.Map; +import java.util.Optional; +import java.util.function.Function; +import java.util.function.Predicate; +import java.util.stream.Collectors; + +import org.dspace.content.MetadataValue; +import org.dspace.core.Context; +import org.dspace.orcid.model.OrcidProfileSectionType; +import org.dspace.orcid.model.factory.OrcidProfileSectionFactory; +import org.dspace.orcid.service.OrcidProfileSectionFactoryService; +import org.dspace.profile.OrcidProfileSyncPreference; + +/** + * Implementation of {@link OrcidProfileSectionFactoryService}. + * + * @author Luca Giamminonni (luca.giamminonni at 4science.it) + * + */ +public class OrcidProfileSectionFactoryServiceImpl implements OrcidProfileSectionFactoryService { + + private final Map sectionFactories; + + private OrcidProfileSectionFactoryServiceImpl(List sectionFactories) { + this.sectionFactories = sectionFactories.stream() + .collect(toMap(OrcidProfileSectionFactory::getProfileSectionType, Function.identity())); + } + + @Override + public Optional findBySectionType(OrcidProfileSectionType type) { + return Optional.ofNullable(this.sectionFactories.get(type)); + } + + @Override + public List findByPreferences(List preferences) { + return filterBy(configuration -> preferences.contains(configuration.getSynchronizationPreference())); + } + + @Override + public Object createOrcidObject(Context context, List metadataValues, OrcidProfileSectionType type) { + OrcidProfileSectionFactory profileSectionFactory = findBySectionType(type) + .orElseThrow(() -> new IllegalArgumentException("No ORCID profile section factory configured for " + type)); + return profileSectionFactory.create(context, metadataValues); + } + + private List filterBy(Predicate predicate) { + return sectionFactories.values().stream().filter(predicate).collect(Collectors.toList()); + } +} diff --git a/dspace-api/src/main/java/org/dspace/orcid/service/impl/OrcidQueueServiceImpl.java b/dspace-api/src/main/java/org/dspace/orcid/service/impl/OrcidQueueServiceImpl.java new file mode 100644 index 0000000000..d3300fea66 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/orcid/service/impl/OrcidQueueServiceImpl.java @@ -0,0 +1,242 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.orcid.service.impl; + +import java.sql.SQLException; +import java.util.List; +import java.util.Optional; +import java.util.UUID; +import java.util.stream.Collectors; + +import org.dspace.content.Item; +import org.dspace.content.MetadataFieldName; +import org.dspace.content.Relationship; +import org.dspace.content.service.ItemService; +import org.dspace.content.service.RelationshipService; +import org.dspace.core.Context; +import org.dspace.orcid.OrcidOperation; +import org.dspace.orcid.OrcidQueue; +import org.dspace.orcid.dao.OrcidQueueDAO; +import org.dspace.orcid.model.OrcidEntityType; +import org.dspace.orcid.service.OrcidHistoryService; +import org.dspace.orcid.service.OrcidQueueService; +import org.dspace.profile.OrcidEntitySyncPreference; +import org.springframework.beans.factory.annotation.Autowired; + +/** + * Implementation of {@link OrcidQueueService}. + * + * @author Luca Giamminonni (luca.giamminonni at 4science.it) + * + */ +public class OrcidQueueServiceImpl implements OrcidQueueService { + + @Autowired + private OrcidQueueDAO orcidQueueDAO; + + @Autowired + private OrcidHistoryService orcidHistoryService; + + @Autowired + private ItemService itemService; + + @Autowired + private RelationshipService relationshipService; + + @Override + public List findByProfileItemId(Context context, UUID profileItemId) throws SQLException { + return orcidQueueDAO.findByProfileItemId(context, profileItemId, -1, 0); + } + + @Override + public List findByProfileItemId(Context context, UUID profileItemId, Integer limit, Integer offset) + throws SQLException { + return orcidQueueDAO.findByProfileItemId(context, profileItemId, limit, offset); + } + + @Override + public List findByProfileItemAndEntity(Context context, Item profileItem, Item entity) + throws SQLException { + return orcidQueueDAO.findByProfileItemAndEntity(context, profileItem, entity); + } + + @Override + public List findByProfileItemOrEntity(Context context, Item item) throws SQLException { + return orcidQueueDAO.findByProfileItemOrEntity(context, item); + } + + @Override + public long countByProfileItemId(Context context, UUID profileItemId) throws SQLException { + return orcidQueueDAO.countByProfileItemId(context, profileItemId); + } + + @Override + public List findAll(Context context) throws SQLException { + return orcidQueueDAO.findAll(context, OrcidQueue.class); + } + + @Override + public OrcidQueue create(Context context, Item profileItem, Item entity) throws SQLException { + Optional putCode = orcidHistoryService.findLastPutCode(context, profileItem, entity); + if (putCode.isPresent()) { + return createEntityUpdateRecord(context, profileItem, entity, putCode.get()); + } else { + return createEntityInsertionRecord(context, profileItem, entity); + } + } + + @Override + public OrcidQueue createEntityInsertionRecord(Context context, Item profileItem, Item entity) throws SQLException { + OrcidQueue orcidQueue = new OrcidQueue(); + orcidQueue.setEntity(entity); + orcidQueue.setRecordType(itemService.getEntityTypeLabel(entity)); + orcidQueue.setProfileItem(profileItem); + orcidQueue.setDescription(getMetadataValue(entity, "dc.title")); + orcidQueue.setOperation(OrcidOperation.INSERT); + return orcidQueueDAO.create(context, orcidQueue); + } + + @Override + public OrcidQueue createEntityUpdateRecord(Context context, Item profileItem, Item entity, String putCode) + throws SQLException { + OrcidQueue orcidQueue = new OrcidQueue(); + orcidQueue.setProfileItem(profileItem); + orcidQueue.setEntity(entity); + orcidQueue.setPutCode(putCode); + orcidQueue.setRecordType(itemService.getEntityTypeLabel(entity)); + orcidQueue.setDescription(getMetadataValue(entity, "dc.title")); + orcidQueue.setOperation(OrcidOperation.UPDATE); + return orcidQueueDAO.create(context, orcidQueue); + } + + @Override + public OrcidQueue createEntityDeletionRecord(Context context, Item profileItem, String description, String type, + String putCode) + throws SQLException { + OrcidQueue orcidQueue = new OrcidQueue(); + orcidQueue.setRecordType(type); + orcidQueue.setProfileItem(profileItem); + orcidQueue.setPutCode(putCode); + orcidQueue.setDescription(description); + orcidQueue.setOperation(OrcidOperation.DELETE); + return orcidQueueDAO.create(context, orcidQueue); + } + + @Override + public OrcidQueue createProfileInsertionRecord(Context context, Item profile, String description, String recordType, + String metadata) throws SQLException { + OrcidQueue orcidQueue = new OrcidQueue(); + orcidQueue.setEntity(profile); + orcidQueue.setRecordType(recordType); + orcidQueue.setProfileItem(profile); + orcidQueue.setDescription(description); + orcidQueue.setMetadata(metadata); + orcidQueue.setOperation(OrcidOperation.INSERT); + return orcidQueueDAO.create(context, orcidQueue); + } + + @Override + public OrcidQueue createProfileDeletionRecord(Context context, Item profile, String description, String recordType, + String metadata, String putCode) throws SQLException { + OrcidQueue orcidQueue = new OrcidQueue(); + orcidQueue.setEntity(profile); + orcidQueue.setRecordType(recordType); + orcidQueue.setProfileItem(profile); + orcidQueue.setDescription(description); + orcidQueue.setPutCode(putCode); + orcidQueue.setMetadata(metadata); + orcidQueue.setOperation(OrcidOperation.DELETE); + return orcidQueueDAO.create(context, orcidQueue); + } + + @Override + public void deleteById(Context context, Integer id) throws SQLException { + OrcidQueue orcidQueue = orcidQueueDAO.findByID(context, OrcidQueue.class, id); + if (orcidQueue != null) { + delete(context, orcidQueue); + } + } + + @Override + public List findByAttemptsLessThan(Context context, int attempts) throws SQLException { + return orcidQueueDAO.findByAttemptsLessThan(context, attempts); + } + + @Override + public void delete(Context context, OrcidQueue orcidQueue) throws SQLException { + orcidQueueDAO.delete(context, orcidQueue); + } + + @Override + public void deleteByEntityAndRecordType(Context context, Item entity, String recordType) throws SQLException { + List records = orcidQueueDAO.findByEntityAndRecordType(context, entity, recordType); + for (OrcidQueue record : records) { + orcidQueueDAO.delete(context, record); + } + } + + @Override + public void deleteByProfileItemAndRecordType(Context context, Item profileItem, String recordType) + throws SQLException { + List records = orcidQueueDAO.findByProfileItemAndRecordType(context, profileItem, recordType); + for (OrcidQueue record : records) { + orcidQueueDAO.delete(context, record); + } + } + + @Override + public OrcidQueue find(Context context, int id) throws SQLException { + return orcidQueueDAO.findByID(context, OrcidQueue.class, id); + } + + @Override + public void update(Context context, OrcidQueue orcidQueue) throws SQLException { + orcidQueueDAO.save(context, orcidQueue); + } + + @Override + public void recalculateOrcidQueue(Context context, Item profileItem, OrcidEntityType orcidEntityType, + OrcidEntitySyncPreference preference) throws SQLException { + + String entityType = orcidEntityType.getEntityType(); + if (preference == OrcidEntitySyncPreference.DISABLED) { + deleteByProfileItemAndRecordType(context, profileItem, entityType); + } else { + List entities = findAllEntitiesLinkableWith(context, profileItem, entityType); + for (Item entity : entities) { + create(context, profileItem, entity); + } + } + + } + + private List findAllEntitiesLinkableWith(Context context, Item profile, String entityType) { + + return findRelationshipsByItem(context, profile).stream() + .map(relationship -> getRelatedItem(relationship, profile)) + .filter(item -> entityType.equals(itemService.getEntityTypeLabel(item))) + .collect(Collectors.toList()); + + } + + private List findRelationshipsByItem(Context context, Item item) { + try { + return relationshipService.findByItem(context, item); + } catch (SQLException e) { + throw new RuntimeException(e); + } + } + + private Item getRelatedItem(Relationship relationship, Item item) { + return relationship.getLeftItem().equals(item) ? relationship.getRightItem() : relationship.getLeftItem(); + } + + private String getMetadataValue(Item item, String metadatafield) { + return itemService.getMetadataFirstValue(item, new MetadataFieldName(metadatafield), Item.ANY); + } +} diff --git a/dspace-api/src/main/java/org/dspace/orcid/service/impl/OrcidSynchronizationServiceImpl.java b/dspace-api/src/main/java/org/dspace/orcid/service/impl/OrcidSynchronizationServiceImpl.java new file mode 100644 index 0000000000..97d832d3de --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/orcid/service/impl/OrcidSynchronizationServiceImpl.java @@ -0,0 +1,331 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.orcid.service.impl; + +import static java.time.LocalDateTime.now; +import static java.time.format.DateTimeFormatter.ISO_DATE_TIME; +import static java.util.List.of; +import static java.util.Optional.ofNullable; +import static org.apache.commons.collections.CollectionUtils.isEmpty; +import static org.apache.commons.lang3.EnumUtils.isValidEnum; +import static org.apache.commons.lang3.StringUtils.isBlank; +import static org.dspace.content.Item.ANY; +import static org.dspace.profile.OrcidEntitySyncPreference.DISABLED; + +import java.sql.SQLException; +import java.util.HashSet; +import java.util.List; +import java.util.Optional; +import java.util.stream.Collectors; +import java.util.stream.Stream; + +import org.apache.commons.codec.binary.StringUtils; +import org.dspace.authorize.AuthorizeException; +import org.dspace.content.Item; +import org.dspace.content.MetadataValue; +import org.dspace.content.service.ItemService; +import org.dspace.core.Context; +import org.dspace.discovery.DiscoverQuery; +import org.dspace.discovery.SearchService; +import org.dspace.discovery.SearchServiceException; +import org.dspace.discovery.indexobject.IndexableItem; +import org.dspace.eperson.EPerson; +import org.dspace.eperson.service.EPersonService; +import org.dspace.orcid.OrcidToken; +import org.dspace.orcid.model.OrcidEntityType; +import org.dspace.orcid.model.OrcidTokenResponseDTO; +import org.dspace.orcid.service.OrcidSynchronizationService; +import org.dspace.orcid.service.OrcidTokenService; +import org.dspace.profile.OrcidEntitySyncPreference; +import org.dspace.profile.OrcidProfileDisconnectionMode; +import org.dspace.profile.OrcidProfileSyncPreference; +import org.dspace.profile.OrcidSynchronizationMode; +import org.dspace.profile.service.ResearcherProfileService; +import org.dspace.services.ConfigurationService; +import org.springframework.beans.factory.annotation.Autowired; + +/** + * Implementation of {@link OrcidSynchronizationService}. + * + * @author Luca Giamminonni (luca.giamminonni at 4science.it) + * + */ +public class OrcidSynchronizationServiceImpl implements OrcidSynchronizationService { + + @Autowired + private ItemService itemService; + + @Autowired + private ConfigurationService configurationService; + + @Autowired + private EPersonService ePersonService; + + @Autowired + private SearchService searchService; + + @Autowired + private OrcidTokenService orcidTokenService; + + @Autowired + private ResearcherProfileService researcherProfileService; + + @Override + public void linkProfile(Context context, Item profile, OrcidTokenResponseDTO token) throws SQLException { + + EPerson ePerson = ePersonService.findByProfileItem(context, profile); + if (ePerson == null) { + throw new IllegalArgumentException( + "The given profile item is not related to any eperson. Item id: " + profile.getID()); + } + + String orcid = token.getOrcid(); + String accessToken = token.getAccessToken(); + String[] scopes = token.getScopeAsArray(); + + itemService.setMetadataSingleValue(context, profile, "person", "identifier", "orcid", null, orcid); + itemService.clearMetadata(context, profile, "dspace", "orcid", "scope", Item.ANY); + for (String scope : scopes) { + itemService.addMetadata(context, profile, "dspace", "orcid", "scope", null, scope); + } + + if (isBlank(itemService.getMetadataFirstValue(profile, "dspace", "orcid", "authenticated", Item.ANY))) { + String currentDate = ISO_DATE_TIME.format(now()); + itemService.setMetadataSingleValue(context, profile, "dspace", "orcid", "authenticated", null, currentDate); + } + + setAccessToken(context, profile, ePerson, accessToken); + + EPerson ePersonByOrcid = ePersonService.findByNetid(context, orcid); + if (ePersonByOrcid == null && isBlank(ePerson.getNetid())) { + ePerson.setNetid(orcid); + updateEPerson(context, ePerson); + } + + updateItem(context, profile); + + } + + @Override + public void unlinkProfile(Context context, Item profile) throws SQLException { + + itemService.clearMetadata(context, profile, "person", "identifier", "orcid", Item.ANY); + itemService.clearMetadata(context, profile, "dspace", "orcid", "scope", Item.ANY); + itemService.clearMetadata(context, profile, "dspace", "orcid", "authenticated", Item.ANY); + + orcidTokenService.deleteByProfileItem(context, profile); + + updateItem(context, profile); + + } + + @Override + public boolean setEntityPreference(Context context, Item profile, OrcidEntityType type, + OrcidEntitySyncPreference value) throws SQLException { + String metadataQualifier = "sync-" + type.name().toLowerCase() + "s"; + return updatePreferenceForSynchronizingWithOrcid(context, profile, metadataQualifier, of(value.name())); + } + + @Override + public boolean setProfilePreference(Context context, Item profile, List values) + throws SQLException { + + List valuesAsString = values.stream() + .map(OrcidProfileSyncPreference::name) + .collect(Collectors.toList()); + + return updatePreferenceForSynchronizingWithOrcid(context, profile, "sync-profile", valuesAsString); + + } + + @Override + public boolean setSynchronizationMode(Context context, Item profile, OrcidSynchronizationMode value) + throws SQLException { + + if (!isLinkedToOrcid(context, profile)) { + throw new IllegalArgumentException("The given profile cannot be configured for the ORCID " + + "synchronization because it is not linked to any ORCID account: " + + profile.getID()); + } + + String newValue = value.name(); + String oldValue = itemService.getMetadataFirstValue(profile, "dspace", "orcid", "sync-mode", Item.ANY); + + if (StringUtils.equals(oldValue, newValue)) { + return false; + } else { + itemService.setMetadataSingleValue(context, profile, "dspace", "orcid", "sync-mode", null, value.name()); + return true; + } + + } + + @Override + public boolean isSynchronizationAllowed(Item profile, Item item) { + + if (isOrcidSynchronizationDisabled()) { + return false; + } + + String entityType = itemService.getEntityTypeLabel(item); + if (entityType == null) { + return false; + } + + if (OrcidEntityType.isValidEntityType(entityType)) { + return getEntityPreference(profile, OrcidEntityType.fromEntityType(entityType)) + .filter(pref -> pref != DISABLED) + .isPresent(); + } + + if (entityType.equals(researcherProfileService.getProfileType())) { + return profile.equals(item) && !isEmpty(getProfilePreferences(profile)); + } + + return false; + + } + + @Override + public Optional getSynchronizationMode(Item item) { + return getMetadataValue(item, "dspace.orcid.sync-mode") + .map(metadataValue -> metadataValue.getValue()) + .filter(value -> isValidEnum(OrcidSynchronizationMode.class, value)) + .map(value -> OrcidSynchronizationMode.valueOf(value)); + } + + @Override + public Optional getEntityPreference(Item item, OrcidEntityType entityType) { + return getMetadataValue(item, "dspace.orcid.sync-" + entityType.name().toLowerCase() + "s") + .map(metadataValue -> metadataValue.getValue()) + .filter(value -> isValidEnum(OrcidEntitySyncPreference.class, value)) + .map(value -> OrcidEntitySyncPreference.valueOf(value)); + } + + @Override + public List getProfilePreferences(Item item) { + return getMetadataValues(item, "dspace.orcid.sync-profile") + .map(MetadataValue::getValue) + .filter(value -> isValidEnum(OrcidProfileSyncPreference.class, value)) + .map(value -> OrcidProfileSyncPreference.valueOf(value)) + .collect(Collectors.toList()); + } + + @Override + public boolean isLinkedToOrcid(Context context, Item item) { + return getOrcidAccessToken(context, item).isPresent() && getOrcid(item).isPresent(); + } + + @Override + public OrcidProfileDisconnectionMode getDisconnectionMode() { + String value = configurationService.getProperty("orcid.disconnection.allowed-users"); + if (!OrcidProfileDisconnectionMode.isValid(value)) { + return OrcidProfileDisconnectionMode.DISABLED; + } + return OrcidProfileDisconnectionMode.fromString(value); + } + + private void setAccessToken(Context context, Item profile, EPerson ePerson, String accessToken) { + OrcidToken orcidToken = orcidTokenService.findByEPerson(context, ePerson); + if (orcidToken == null) { + orcidTokenService.create(context, ePerson, profile, accessToken); + } else { + orcidToken.setProfileItem(profile); + orcidToken.setAccessToken(accessToken); + } + } + + private boolean updatePreferenceForSynchronizingWithOrcid(Context context, Item profile, + String metadataQualifier, + List values) throws SQLException { + + if (!isLinkedToOrcid(context, profile)) { + throw new IllegalArgumentException("The given profile cannot be configured for the ORCID " + + "synchronization because it is not linked to any ORCID account: " + + profile.getID()); + } + + List oldValues = itemService.getMetadata(profile, "dspace", "orcid", metadataQualifier, ANY).stream() + .map(metadataValue -> metadataValue.getValue()) + .collect(Collectors.toList()); + + if (containsSameValues(oldValues, values)) { + return false; + } + + itemService.clearMetadata(context, profile, "dspace", "orcid", metadataQualifier, ANY); + for (String value : values) { + itemService.addMetadata(context, profile, "dspace", "orcid", metadataQualifier, null, value); + } + + return true; + + } + + private boolean containsSameValues(List firstList, List secondList) { + return new HashSet<>(firstList).equals(new HashSet<>(secondList)); + } + + private Optional getOrcidAccessToken(Context context, Item item) { + return ofNullable(orcidTokenService.findByProfileItem(context, item)) + .map(orcidToken -> orcidToken.getAccessToken()); + } + + public Optional getOrcid(Item item) { + return getMetadataValue(item, "person.identifier.orcid") + .map(metadataValue -> metadataValue.getValue()); + } + + private Optional getMetadataValue(Item item, String metadataField) { + return getMetadataValues(item, metadataField).findFirst(); + } + + private Stream getMetadataValues(Item item, String metadataField) { + return item.getMetadata().stream() + .filter(metadata -> metadataField.equals(metadata.getMetadataField().toString('.'))); + } + + + private boolean isOrcidSynchronizationDisabled() { + return !configurationService.getBooleanProperty("orcid.synchronization-enabled", true); + } + + private void updateItem(Context context, Item item) throws SQLException { + try { + context.turnOffAuthorisationSystem(); + itemService.update(context, item); + } catch (AuthorizeException e) { + throw new RuntimeException(e); + } finally { + context.restoreAuthSystemState(); + } + } + + private void updateEPerson(Context context, EPerson ePerson) throws SQLException { + try { + ePersonService.update(context, ePerson); + } catch (AuthorizeException e) { + throw new RuntimeException(e); + } + } + + @Override + public List findProfilesByOrcid(Context context, String orcid) { + DiscoverQuery discoverQuery = new DiscoverQuery(); + discoverQuery.setDSpaceObjectFilter(IndexableItem.TYPE); + discoverQuery.addFilterQueries("search.entitytype:" + researcherProfileService.getProfileType()); + discoverQuery.addFilterQueries("person.identifier.orcid:" + orcid); + try { + return searchService.search(context, discoverQuery).getIndexableObjects().stream() + .map(object -> ((IndexableItem) object).getIndexedObject()) + .collect(Collectors.toList()); + } catch (SearchServiceException ex) { + throw new RuntimeException(ex); + } + } +} diff --git a/dspace-api/src/main/java/org/dspace/orcid/service/impl/OrcidTokenServiceImpl.java b/dspace-api/src/main/java/org/dspace/orcid/service/impl/OrcidTokenServiceImpl.java new file mode 100644 index 0000000000..bf10ea981c --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/orcid/service/impl/OrcidTokenServiceImpl.java @@ -0,0 +1,99 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.orcid.service.impl; + +import java.sql.SQLException; +import java.util.List; + +import org.dspace.content.Item; +import org.dspace.core.Context; +import org.dspace.eperson.EPerson; +import org.dspace.orcid.OrcidToken; +import org.dspace.orcid.dao.OrcidTokenDAO; +import org.dspace.orcid.service.OrcidTokenService; +import org.springframework.beans.factory.annotation.Autowired; + +/** + * Implementation of {@link OrcidTokenService}. + * + * @author Luca Giamminonni (luca.giamminonni at 4science.it) + * + */ +public class OrcidTokenServiceImpl implements OrcidTokenService { + + @Autowired + private OrcidTokenDAO orcidTokenDAO; + + @Override + public OrcidToken create(Context context, EPerson ePerson, String accessToken) { + return create(context, ePerson, null, accessToken); + } + + @Override + public OrcidToken create(Context context, EPerson ePerson, Item profileItem, String accessToken) { + OrcidToken orcidToken = new OrcidToken(); + orcidToken.setAccessToken(accessToken); + orcidToken.setEPerson(ePerson); + orcidToken.setProfileItem(profileItem); + try { + return orcidTokenDAO.create(context, orcidToken); + } catch (SQLException e) { + throw new RuntimeException(e); + } + } + + @Override + public OrcidToken findByEPerson(Context context, EPerson ePerson) { + return orcidTokenDAO.findByEPerson(context, ePerson); + } + + @Override + public OrcidToken findByProfileItem(Context context, Item profileItem) { + return orcidTokenDAO.findByProfileItem(context, profileItem); + } + + @Override + public void delete(Context context, OrcidToken orcidToken) { + try { + orcidTokenDAO.delete(context, orcidToken); + } catch (SQLException e) { + throw new RuntimeException(e); + } + } + + @Override + public void deleteAll(Context context) { + try { + + List tokens = orcidTokenDAO.findAll(context, OrcidToken.class); + for (OrcidToken token : tokens) { + delete(context, token); + } + + } catch (SQLException e) { + throw new RuntimeException(e); + } + } + + @Override + public void deleteByEPerson(Context context, EPerson ePerson) { + OrcidToken orcidToken = findByEPerson(context, ePerson); + if (orcidToken != null) { + delete(context, orcidToken); + } + } + + @Override + public void deleteByProfileItem(Context context, Item profileItem) { + OrcidToken orcidToken = findByProfileItem(context, profileItem); + if (orcidToken != null) { + delete(context, orcidToken); + } + } + +} diff --git a/dspace-api/src/main/java/org/dspace/orcid/service/impl/PlainMetadataSignatureGeneratorImpl.java b/dspace-api/src/main/java/org/dspace/orcid/service/impl/PlainMetadataSignatureGeneratorImpl.java new file mode 100644 index 0000000000..4888462ce4 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/orcid/service/impl/PlainMetadataSignatureGeneratorImpl.java @@ -0,0 +1,94 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.orcid.service.impl; + +import static java.util.Comparator.comparing; + +import java.util.Arrays; +import java.util.List; +import java.util.Optional; +import java.util.stream.Collectors; +import java.util.stream.Stream; + +import org.apache.commons.lang3.StringUtils; +import org.dspace.content.Item; +import org.dspace.content.MetadataValue; +import org.dspace.core.Context; +import org.dspace.orcid.service.MetadataSignatureGenerator; + +/** + * Implementation of {@link MetadataSignatureGenerator} that composes a + * signature made up of a section for each metadata value, divided by the + * character SIGNATURE_SECTIONS_SEPARATOR.
    + * Each section is composed of the metadata field, the metadata value and, if + * present, the authority, divided by the character METADATA_SECTIONS_SEPARATOR. + *
    + * The presence of the metadata field allows to have different signatures for + * metadata with the same values but referring to different fields, while the + * authority allows to distinguish metadata that refer to different entities, + * even if they have the same value. Finally, the various sections of the + * signature are sorted by metadata field so that the order of the input + * metadata values does not affect the signature. + * + * @author Luca Giamminonni (luca.giamminonni at 4science.it) + * + */ +public class PlainMetadataSignatureGeneratorImpl implements MetadataSignatureGenerator { + + private static final String SIGNATURE_SECTIONS_SEPARATOR = "§§"; + private static final String METADATA_SECTIONS_SEPARATOR = "::"; + + @Override + public String generate(Context context, List metadataValues) { + return metadataValues.stream() + .sorted(comparing(metadataValue -> metadataValue.getMetadataField().getID())) + .map(this::composeSignatureSection) + .collect(Collectors.joining(SIGNATURE_SECTIONS_SEPARATOR)); + } + + @Override + public List findBySignature(Context context, Item item, String signature) { + return getSignatureSections(signature) + .map(signatureSection -> findFirstBySignatureSection(context, item, signatureSection)) + .flatMap(metadataValue -> metadataValue.stream()) + .collect(Collectors.toList()); + } + + private String composeSignatureSection(MetadataValue metadataValue) { + String fieldId = getField(metadataValue); + String metadataValueSignature = fieldId + METADATA_SECTIONS_SEPARATOR + getValue(metadataValue); + if (StringUtils.isNotBlank(metadataValue.getAuthority())) { + return metadataValueSignature + METADATA_SECTIONS_SEPARATOR + metadataValue.getAuthority(); + } else { + return metadataValueSignature; + } + } + + private Optional findFirstBySignatureSection(Context context, Item item, String signatureSection) { + return item.getMetadata().stream() + .filter(metadataValue -> matchSignature(context, metadataValue, signatureSection)) + .findFirst(); + } + + private boolean matchSignature(Context context, MetadataValue metadataValue, String signatureSection) { + return generate(context, List.of(metadataValue)).equals(signatureSection); + } + + private Stream getSignatureSections(String signature) { + return Arrays.stream(StringUtils.split(signature, SIGNATURE_SECTIONS_SEPARATOR)); + } + + private String getField(MetadataValue metadataValue) { + return metadataValue.getMetadataField().toString('.'); + } + + private String getValue(MetadataValue metadataValue) { + return metadataValue.getValue() != null ? metadataValue.getValue() : ""; + } + +} diff --git a/dspace-api/src/main/java/org/dspace/passwordvalidation/factory/PasswordValidationFactory.java b/dspace-api/src/main/java/org/dspace/passwordvalidation/factory/PasswordValidationFactory.java new file mode 100644 index 0000000000..81cebb84a1 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/passwordvalidation/factory/PasswordValidationFactory.java @@ -0,0 +1,29 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.passwordvalidation.factory; + +import org.dspace.authorize.service.PasswordValidatorService; +import org.dspace.services.factory.DSpaceServicesFactory; + +/** + * Abstract factory to get services for the passwordvalidation package, + * use PasswordValidationFactory.getInstance() to retrieve an implementation. + * + * @author Mykhaylo Boychuk (mykhaylo.boychuk@4science.com) + */ +public abstract class PasswordValidationFactory { + + public abstract PasswordValidatorService getPasswordValidationService(); + + public static PasswordValidationFactory getInstance() { + return DSpaceServicesFactory.getInstance() + .getServiceManager() + .getServiceByName("validationPasswordFactory", PasswordValidationFactory.class); + } + +} \ No newline at end of file diff --git a/dspace-api/src/main/java/org/dspace/passwordvalidation/factory/PasswordValidationFactoryImpl.java b/dspace-api/src/main/java/org/dspace/passwordvalidation/factory/PasswordValidationFactoryImpl.java new file mode 100644 index 0000000000..a73c7f6868 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/passwordvalidation/factory/PasswordValidationFactoryImpl.java @@ -0,0 +1,29 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.passwordvalidation.factory; + +import org.dspace.authorize.service.PasswordValidatorService; +import org.springframework.beans.factory.annotation.Autowired; + +/** + * Factory implementation to get services for the PasswordValidation package, + * use PasswordValidationFactory.getInstance() to retrieve an implementation. + * + * @author Mykhaylo Boychuk (mykhaylo.boychuk@4science.com) + */ +public class PasswordValidationFactoryImpl extends PasswordValidationFactory { + + @Autowired(required = true) + private PasswordValidatorService PasswordValidatorService; + + @Override + public PasswordValidatorService getPasswordValidationService() { + return PasswordValidatorService; + } + +} \ No newline at end of file diff --git a/dspace-api/src/main/java/org/dspace/profile/OrcidEntitySyncPreference.java b/dspace-api/src/main/java/org/dspace/profile/OrcidEntitySyncPreference.java new file mode 100644 index 0000000000..74efc57e38 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/profile/OrcidEntitySyncPreference.java @@ -0,0 +1,30 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.profile; + +/** + * Enum that model the allowed values to configure the ORCID synchronization + * preferences. + * + * @author Luca Giamminonni (luca.giamminonni at 4science.it) + * + */ +public enum OrcidEntitySyncPreference { + + /** + * Preference to be set to disable the synchronization with ORCID of the + * specific entity. + */ + DISABLED, + + /** + * Preference to be set to enable the synchronization with ORCID of all items + * relating to the specific entity. + */ + ALL +} diff --git a/dspace-api/src/main/java/org/dspace/profile/OrcidMetadataCopyingAction.java b/dspace-api/src/main/java/org/dspace/profile/OrcidMetadataCopyingAction.java new file mode 100644 index 0000000000..36abea9ddb --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/profile/OrcidMetadataCopyingAction.java @@ -0,0 +1,97 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.profile; + +import static java.time.LocalDateTime.now; +import static java.time.format.DateTimeFormatter.ISO_DATE_TIME; +import static org.apache.commons.collections.CollectionUtils.isNotEmpty; +import static org.dspace.content.Item.ANY; + +import java.sql.SQLException; +import java.util.List; +import java.util.stream.Collectors; + +import org.apache.commons.collections.CollectionUtils; +import org.dspace.content.Item; +import org.dspace.content.MetadataFieldName; +import org.dspace.content.MetadataValue; +import org.dspace.content.service.ItemService; +import org.dspace.core.Context; +import org.dspace.eperson.EPerson; +import org.dspace.eperson.service.EPersonService; +import org.dspace.orcid.OrcidToken; +import org.dspace.orcid.service.OrcidTokenService; +import org.dspace.profile.service.AfterResearcherProfileCreationAction; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.core.Ordered; +import org.springframework.core.annotation.Order; + +/** + * Implementation of {@link AfterResearcherProfileCreationAction} that copy the + * ORCID metadata, if any, from the owner to the researcher profile item. + * + * @author Luca Giamminonni (luca.giamminonni at 4science.it) + * + */ +@Order(Ordered.HIGHEST_PRECEDENCE) +public class OrcidMetadataCopyingAction implements AfterResearcherProfileCreationAction { + + @Autowired + private ItemService itemService; + + @Autowired + private EPersonService ePersonService; + + @Autowired + private OrcidTokenService orcidTokenService; + + @Override + public void perform(Context context, ResearcherProfile researcherProfile, EPerson owner) throws SQLException { + + Item item = researcherProfile.getItem(); + + copyMetadataValues(context, owner, "eperson.orcid", item, "person.identifier.orcid"); + copyMetadataValues(context, owner, "eperson.orcid.scope", item, "dspace.orcid.scope"); + + OrcidToken orcidToken = orcidTokenService.findByEPerson(context, owner); + if (orcidToken != null) { + orcidToken.setProfileItem(item); + } + + if (isLinkedToOrcid(owner, orcidToken)) { + String currentDate = ISO_DATE_TIME.format(now()); + itemService.setMetadataSingleValue(context, item, "dspace", "orcid", "authenticated", null, currentDate); + } + + } + + private void copyMetadataValues(Context context, EPerson ePerson, String ePersonMetadataField, Item item, + String itemMetadataField) throws SQLException { + + List values = getMetadataValues(ePerson, ePersonMetadataField); + if (CollectionUtils.isEmpty(values)) { + return; + } + + MetadataFieldName metadata = new MetadataFieldName(itemMetadataField); + itemService.clearMetadata(context, item, metadata.schema, metadata.element, metadata.qualifier, ANY); + itemService.addMetadata(context, item, metadata.schema, metadata.element, metadata.qualifier, null, values); + + } + + private boolean isLinkedToOrcid(EPerson ePerson, OrcidToken orcidToken) { + return isNotEmpty(getMetadataValues(ePerson, "eperson.orcid")) && orcidToken != null; + } + + private List getMetadataValues(EPerson ePerson, String metadataField) { + return ePersonService.getMetadataByMetadataString(ePerson, metadataField).stream() + .map(MetadataValue::getValue) + .collect(Collectors.toList()); + } + +} diff --git a/dspace-api/src/main/java/org/dspace/profile/OrcidProfileDisconnectionMode.java b/dspace-api/src/main/java/org/dspace/profile/OrcidProfileDisconnectionMode.java new file mode 100644 index 0000000000..22b13f047c --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/profile/OrcidProfileDisconnectionMode.java @@ -0,0 +1,49 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.profile; + +import static org.apache.commons.lang3.EnumUtils.isValidEnum; + +/** + * Enum that models all the available values of the property that which + * determines which users can disconnect a profile from an ORCID account. + * + * @author Luca Giamminonni (luca.giamminonni at 4science.it) + * + */ +public enum OrcidProfileDisconnectionMode { + + /** + * The disconnection is disabled. + */ + DISABLED, + + /** + * Only the profile's owner can disconnect that profile from ORCID. + */ + ONLY_OWNER, + + /** + * Only the admins can disconnect profiles from ORCID. + */ + ONLY_ADMIN, + + /** + * Only the admin or the profile's owner can disconnect that profile from ORCID. + */ + ADMIN_AND_OWNER; + + public static boolean isValid(String mode) { + return mode != null ? isValidEnum(OrcidProfileDisconnectionMode.class, mode.toUpperCase()) : false; + } + + public static OrcidProfileDisconnectionMode fromString(String mode) { + return isValid(mode) ? OrcidProfileDisconnectionMode.valueOf(mode.toUpperCase()) : null; + } + +} diff --git a/dspace-api/src/main/java/org/dspace/profile/OrcidProfileSyncPreference.java b/dspace-api/src/main/java/org/dspace/profile/OrcidProfileSyncPreference.java new file mode 100644 index 0000000000..a867694490 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/profile/OrcidProfileSyncPreference.java @@ -0,0 +1,29 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.profile; + +/** + * Enum that model the allowed values to configure the ORCID synchronization + * preferences for the user's profile. + * + * @author Luca Giamminonni (luca.giamminonni at 4science.it) + * + */ +public enum OrcidProfileSyncPreference { + + /** + * Data relating to the name, country and keywords of the ORCID profile. + */ + BIOGRAPHICAL, + + /** + * Data relating to external identifiers and researcher urls of the ORCID + * profile. + */ + IDENTIFIERS; +} diff --git a/dspace-api/src/main/java/org/dspace/profile/OrcidSynchronizationMode.java b/dspace-api/src/main/java/org/dspace/profile/OrcidSynchronizationMode.java new file mode 100644 index 0000000000..8bc822261b --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/profile/OrcidSynchronizationMode.java @@ -0,0 +1,29 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.profile; + +/** + * Enum that model the allowed values to configure the ORCID synchronization + * mode. + * + * @author Luca Giamminonni (luca.giamminonni at 4science.it) + * + */ +public enum OrcidSynchronizationMode { + + /** + * Mode in which the user can manually decide when to synchronize data with + * ORCID. + */ + MANUAL, + + /** + * Mode in which synchronizations with ORCID occur through an automatic process. + */ + BATCH; +} diff --git a/dspace-api/src/main/java/org/dspace/profile/ResearcherProfile.java b/dspace-api/src/main/java/org/dspace/profile/ResearcherProfile.java new file mode 100644 index 0000000000..72e7dc8008 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/profile/ResearcherProfile.java @@ -0,0 +1,88 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.profile; + +import static org.dspace.core.Constants.READ; +import static org.dspace.eperson.Group.ANONYMOUS; + +import java.util.Optional; +import java.util.UUID; +import java.util.stream.Stream; + +import org.dspace.content.Item; +import org.dspace.content.MetadataValue; +import org.dspace.util.UUIDUtils; +import org.springframework.util.Assert; + +/** + * Object representing a Researcher Profile. + * + * @author Luca Giamminonni (luca.giamminonni at 4science.it) + * + */ +public class ResearcherProfile { + + private final Item item; + + private final MetadataValue dspaceObjectOwner; + + /** + * Create a new ResearcherProfile object from the given item. + * + * @param item the profile item + * @throws IllegalArgumentException if the given item has not a dspace.object.owner + * metadata with a valid authority + */ + public ResearcherProfile(Item item) { + Assert.notNull(item, "A researcher profile requires an item"); + this.item = item; + this.dspaceObjectOwner = getDspaceObjectOwnerMetadata(item); + } + + public UUID getId() { + return UUIDUtils.fromString(dspaceObjectOwner.getAuthority()); + } + + /** + * A profile is considered visible if accessible by anonymous users. This method + * returns true if the given item has a READ policy related to ANONYMOUS group, + * false otherwise. + */ + public boolean isVisible() { + return item.getResourcePolicies().stream() + .filter(policy -> policy.getGroup() != null) + .anyMatch(policy -> READ == policy.getAction() && ANONYMOUS.equals(policy.getGroup().getName())); + } + + public Item getItem() { + return item; + } + + public Optional getOrcid() { + return getMetadataValue(item, "person.identifier.orcid") + .map(metadataValue -> metadataValue.getValue()); + } + + private MetadataValue getDspaceObjectOwnerMetadata(Item item) { + return getMetadataValue(item, "dspace.object.owner") + .filter(metadata -> UUIDUtils.fromString(metadata.getAuthority()) != null) + .orElseThrow( + () -> new IllegalArgumentException("A profile item must have a valid dspace.object.owner metadata") + ); + } + + private Optional getMetadataValue(Item item, String metadataField) { + return getMetadataValues(item, metadataField).findFirst(); + } + + private Stream getMetadataValues(Item item, String metadataField) { + return item.getMetadata().stream() + .filter(metadata -> metadataField.equals(metadata.getMetadataField().toString('.'))); + } + +} diff --git a/dspace-api/src/main/java/org/dspace/profile/ResearcherProfileServiceImpl.java b/dspace-api/src/main/java/org/dspace/profile/ResearcherProfileServiceImpl.java new file mode 100644 index 0000000000..80bbd68fd1 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/profile/ResearcherProfileServiceImpl.java @@ -0,0 +1,388 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.profile; + +import static java.util.Optional.empty; +import static java.util.Optional.of; +import static java.util.Optional.ofNullable; +import static org.dspace.content.authority.Choices.CF_ACCEPTED; +import static org.dspace.core.Constants.READ; +import static org.dspace.core.Constants.WRITE; +import static org.dspace.eperson.Group.ANONYMOUS; + +import java.io.IOException; +import java.net.URI; +import java.sql.SQLException; +import java.util.Collections; +import java.util.Iterator; +import java.util.List; +import java.util.Optional; +import java.util.UUID; +import javax.annotation.PostConstruct; + +import org.apache.commons.collections4.CollectionUtils; +import org.apache.commons.lang.StringUtils; +import org.dspace.app.exception.ResourceAlreadyExistsException; +import org.dspace.authorize.AuthorizeException; +import org.dspace.authorize.service.AuthorizeService; +import org.dspace.content.Collection; +import org.dspace.content.Item; +import org.dspace.content.MetadataValue; +import org.dspace.content.WorkspaceItem; +import org.dspace.content.service.CollectionService; +import org.dspace.content.service.InstallItemService; +import org.dspace.content.service.ItemService; +import org.dspace.content.service.WorkspaceItemService; +import org.dspace.core.Context; +import org.dspace.discovery.DiscoverQuery; +import org.dspace.discovery.DiscoverResult; +import org.dspace.discovery.IndexableObject; +import org.dspace.discovery.SearchService; +import org.dspace.discovery.SearchServiceException; +import org.dspace.discovery.indexobject.IndexableCollection; +import org.dspace.eperson.EPerson; +import org.dspace.eperson.Group; +import org.dspace.eperson.service.GroupService; +import org.dspace.orcid.service.OrcidSynchronizationService; +import org.dspace.profile.service.AfterResearcherProfileCreationAction; +import org.dspace.profile.service.ResearcherProfileService; +import org.dspace.services.ConfigurationService; +import org.dspace.util.UUIDUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.util.Assert; + +/** + * Implementation of {@link ResearcherProfileService}. + * + * @author Luca Giamminonni (luca.giamminonni at 4science.it) + * + */ +public class ResearcherProfileServiceImpl implements ResearcherProfileService { + + private static Logger log = LoggerFactory.getLogger(ResearcherProfileServiceImpl.class); + + @Autowired + private ItemService itemService; + + @Autowired + private WorkspaceItemService workspaceItemService; + + @Autowired + private InstallItemService installItemService; + + @Autowired + private ConfigurationService configurationService; + + @Autowired + private CollectionService collectionService; + + @Autowired + private SearchService searchService; + + @Autowired + private GroupService groupService; + + @Autowired + private AuthorizeService authorizeService; + + @Autowired + private OrcidSynchronizationService orcidSynchronizationService; + + @Autowired(required = false) + private List afterCreationActions; + + @PostConstruct + public void postConstruct() { + + if (afterCreationActions == null) { + afterCreationActions = Collections.emptyList(); + } + + } + + @Override + public ResearcherProfile findById(Context context, UUID id) throws SQLException, AuthorizeException { + Assert.notNull(id, "An id must be provided to find a researcher profile"); + + Item profileItem = findResearcherProfileItemById(context, id); + if (profileItem == null) { + return null; + } + + return new ResearcherProfile(profileItem); + } + + @Override + public ResearcherProfile createAndReturn(Context context, EPerson ePerson) + throws AuthorizeException, SQLException, SearchServiceException { + + Item profileItem = findResearcherProfileItemById(context, ePerson.getID()); + if (profileItem != null) { + throw new ResourceAlreadyExistsException("A profile is already linked to the provided User"); + } + + Collection collection = findProfileCollection(context) + .orElseThrow(() -> new IllegalStateException("No collection found for researcher profiles")); + + context.turnOffAuthorisationSystem(); + Item item = createProfileItem(context, ePerson, collection); + context.restoreAuthSystemState(); + + ResearcherProfile researcherProfile = new ResearcherProfile(item); + + for (AfterResearcherProfileCreationAction afterCreationAction : afterCreationActions) { + afterCreationAction.perform(context, researcherProfile, ePerson); + } + + return researcherProfile; + } + + @Override + public void deleteById(Context context, UUID id) throws SQLException, AuthorizeException { + Assert.notNull(id, "An id must be provided to find a researcher profile"); + + Item profileItem = findResearcherProfileItemById(context, id); + if (profileItem == null) { + return; + } + + if (isHardDeleteEnabled()) { + deleteItem(context, profileItem); + } else { + removeOwnerMetadata(context, profileItem); + orcidSynchronizationService.unlinkProfile(context, profileItem); + } + + } + + @Override + public void changeVisibility(Context context, ResearcherProfile profile, boolean visible) + throws AuthorizeException, SQLException { + + if (profile.isVisible() == visible) { + return; + } + + Item item = profile.getItem(); + Group anonymous = groupService.findByName(context, ANONYMOUS); + + if (visible) { + authorizeService.addPolicy(context, item, READ, anonymous); + } else { + authorizeService.removeGroupPolicies(context, item, anonymous); + } + + } + + @Override + public ResearcherProfile claim(Context context, EPerson ePerson, URI uri) + throws SQLException, AuthorizeException, SearchServiceException { + + Item profileItem = findResearcherProfileItemById(context, ePerson.getID()); + if (profileItem != null) { + throw new ResourceAlreadyExistsException("A profile is already linked to the provided User"); + } + + Item item = findItemByURI(context, uri) + .orElseThrow(() -> new IllegalArgumentException("No item found by URI " + uri)); + + if (!item.isArchived() || item.isWithdrawn()) { + throw new IllegalArgumentException( + "Only archived items can be claimed to create a researcher profile. Item ID: " + item.getID()); + } + + if (!hasProfileType(item)) { + throw new IllegalArgumentException("The provided item has not a profile type. Item ID: " + item.getID()); + } + + if (haveDifferentEmail(item, ePerson)) { + throw new IllegalArgumentException("The provided item is not claimable because it has a different email " + + "than the given user's email. Item ID: " + item.getID()); + } + + String existingOwner = itemService.getMetadataFirstValue(item, "dspace", "object", "owner", Item.ANY); + + if (StringUtils.isNotBlank(existingOwner)) { + throw new IllegalArgumentException("Item with provided uri has already an owner - ID: " + existingOwner); + } + + context.turnOffAuthorisationSystem(); + itemService.addMetadata(context, item, "dspace", "object", "owner", null, + ePerson.getName(), ePerson.getID().toString(), CF_ACCEPTED); + context.restoreAuthSystemState(); + + return new ResearcherProfile(item); + } + + @Override + public boolean hasProfileType(Item item) { + String profileType = getProfileType(); + if (StringUtils.isBlank(profileType)) { + return false; + } + return profileType.equals(itemService.getEntityTypeLabel(item)); + } + + @Override + public String getProfileType() { + return configurationService.getProperty("researcher-profile.entity-type", "Person"); + } + + private Optional findItemByURI(final Context context, final URI uri) throws SQLException { + String path = uri.getPath(); + UUID uuid = UUIDUtils.fromString(path.substring(path.lastIndexOf("/") + 1)); + return ofNullable(itemService.find(context, uuid)); + } + + /** + * Search for an profile item owned by an eperson with the given id. + */ + private Item findResearcherProfileItemById(Context context, UUID id) throws SQLException, AuthorizeException { + + String profileType = getProfileType(); + + Iterator items = itemService.findByAuthorityValue(context, "dspace", "object", "owner", id.toString()); + while (items.hasNext()) { + Item item = items.next(); + String entityType = itemService.getEntityTypeLabel(item); + if (profileType.equals(entityType)) { + return item; + } + } + + return null; + } + + /** + * Returns a Profile collection based on a configuration or searching for a + * collection of researcher profile type. + */ + private Optional findProfileCollection(Context context) throws SQLException, SearchServiceException { + return findConfiguredProfileCollection(context) + .or(() -> findFirstCollectionByProfileEntityType(context)); + } + + /** + * Create a new profile item for the given ePerson in the provided collection. + */ + private Item createProfileItem(Context context, EPerson ePerson, Collection collection) + throws AuthorizeException, SQLException { + + String id = ePerson.getID().toString(); + String fullName = ePerson.getFullName(); + + WorkspaceItem workspaceItem = workspaceItemService.create(context, collection, true); + Item item = workspaceItem.getItem(); + itemService.addMetadata(context, item, "dc", "title", null, null, fullName); + itemService.addMetadata(context, item, "person", "email", null, null, ePerson.getEmail()); + itemService.addMetadata(context, item, "dspace", "object", "owner", null, fullName, id, CF_ACCEPTED); + + item = installItemService.installItem(context, workspaceItem); + + if (isNewProfileNotVisibleByDefault()) { + Group anonymous = groupService.findByName(context, ANONYMOUS); + authorizeService.removeGroupPolicies(context, item, anonymous); + } + + authorizeService.addPolicy(context, item, READ, ePerson); + authorizeService.addPolicy(context, item, WRITE, ePerson); + + return reloadItem(context, item); + } + + private Optional findConfiguredProfileCollection(Context context) throws SQLException { + UUID uuid = UUIDUtils.fromString(configurationService.getProperty("researcher-profile.collection.uuid")); + if (uuid == null) { + return Optional.empty(); + } + + Collection collection = collectionService.find(context, uuid); + if (collection == null) { + return Optional.empty(); + } + + if (isNotProfileCollection(collection)) { + log.warn("The configured researcher-profile.collection.uuid " + + "has an invalid entity type, expected " + getProfileType()); + return Optional.empty(); + } + + return of(collection); + } + + @SuppressWarnings("rawtypes") + private Optional findFirstCollectionByProfileEntityType(Context context) { + + String profileType = getProfileType(); + + DiscoverQuery discoverQuery = new DiscoverQuery(); + discoverQuery.setDSpaceObjectFilter(IndexableCollection.TYPE); + discoverQuery.addFilterQueries("dspace.entity.type:" + profileType); + + DiscoverResult discoverResult = search(context, discoverQuery); + List indexableObjects = discoverResult.getIndexableObjects(); + + if (CollectionUtils.isEmpty(indexableObjects)) { + return empty(); + } + + return ofNullable((Collection) indexableObjects.get(0).getIndexedObject()); + } + + private boolean isHardDeleteEnabled() { + return configurationService.getBooleanProperty("researcher-profile.hard-delete.enabled"); + } + + private boolean isNewProfileNotVisibleByDefault() { + return !configurationService.getBooleanProperty("researcher-profile.set-new-profile-visible"); + } + + private boolean isNotProfileCollection(Collection collection) { + String entityType = collectionService.getMetadataFirstValue(collection, "dspace", "entity", "type", Item.ANY); + return entityType == null || !entityType.equals(getProfileType()); + } + + private boolean haveDifferentEmail(Item item, EPerson currentUser) { + return itemService.getMetadataByMetadataString(item, "person.email").stream() + .map(MetadataValue::getValue) + .filter(StringUtils::isNotBlank) + .noneMatch(email -> email.equalsIgnoreCase(currentUser.getEmail())); + } + + private void removeOwnerMetadata(Context context, Item profileItem) throws SQLException { + List metadata = itemService.getMetadata(profileItem, "dspace", "object", "owner", Item.ANY); + itemService.removeMetadataValues(context, profileItem, metadata); + } + + private Item reloadItem(Context context, Item item) throws SQLException { + context.uncacheEntity(item); + return context.reloadEntity(item); + } + + private void deleteItem(Context context, Item profileItem) throws SQLException, AuthorizeException { + try { + context.turnOffAuthorisationSystem(); + itemService.delete(context, profileItem); + } catch (IOException e) { + throw new RuntimeException(e); + } finally { + context.restoreAuthSystemState(); + } + } + + private DiscoverResult search(Context context, DiscoverQuery discoverQuery) { + try { + return searchService.search(context, discoverQuery); + } catch (SearchServiceException e) { + throw new RuntimeException(e); + } + } + +} diff --git a/dspace-api/src/main/java/org/dspace/profile/service/AfterResearcherProfileCreationAction.java b/dspace-api/src/main/java/org/dspace/profile/service/AfterResearcherProfileCreationAction.java new file mode 100644 index 0000000000..495fe59cdc --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/profile/service/AfterResearcherProfileCreationAction.java @@ -0,0 +1,35 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.profile.service; + +import java.sql.SQLException; + +import org.dspace.core.Context; +import org.dspace.eperson.EPerson; +import org.dspace.profile.ResearcherProfile; + +/** + * Interface to mark classes that allow to perform additional logic on created + * researcher profile. + * + * @author Luca Giamminonni (luca.giamminonni at 4science.it) + * + */ +public interface AfterResearcherProfileCreationAction { + + /** + * Perform some actions on the given researcher profile and returns the updated + * profile. + * + * @param context the DSpace context + * @param researcherProfile the created researcher profile + * @param owner the EPerson that is owner of the given profile + * @throws SQLException if a SQL error occurs + */ + void perform(Context context, ResearcherProfile researcherProfile, EPerson owner) throws SQLException; +} diff --git a/dspace-api/src/main/java/org/dspace/profile/service/ResearcherProfileService.java b/dspace-api/src/main/java/org/dspace/profile/service/ResearcherProfileService.java new file mode 100644 index 0000000000..9e52402f77 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/profile/service/ResearcherProfileService.java @@ -0,0 +1,112 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.profile.service; + +import java.net.URI; +import java.sql.SQLException; +import java.util.UUID; + +import org.dspace.authorize.AuthorizeException; +import org.dspace.content.Item; +import org.dspace.core.Context; +import org.dspace.discovery.SearchServiceException; +import org.dspace.eperson.EPerson; +import org.dspace.profile.ResearcherProfile; + +/** + * Service interface class for the {@link ResearcherProfile} object. The + * implementation of this class is responsible for all business logic calls for + * the {@link ResearcherProfile} object. + * + * @author Luca Giamminonni (luca.giamminonni at 4science.it) + * + */ +public interface ResearcherProfileService { + + /** + * Find the ResearcherProfile by UUID. + * + * @param context the relevant DSpace Context. + * @param id the ResearcherProfile id + * @return the found ResearcherProfile + * @throws SQLException + * @throws AuthorizeException + */ + public ResearcherProfile findById(Context context, UUID id) throws SQLException, AuthorizeException; + + /** + * Create a new researcher profile for the given ePerson. + * + * @param context the relevant DSpace Context. + * @param ePerson the ePerson + * @return the created profile + * @throws SQLException + * @throws AuthorizeException + * @throws SearchServiceException + */ + public ResearcherProfile createAndReturn(Context context, EPerson ePerson) + throws AuthorizeException, SQLException, SearchServiceException; + + /** + * Delete the profile with the given id. Based on the + * researcher-profile.hard-delete.enabled configuration, this method deletes the + * related item or removes the association between the researcher profile and + * eperson related to the input uuid. + * + * @param context the relevant DSpace Context. + * @param id the researcher profile id + * @throws AuthorizeException + * @throws SQLException + */ + public void deleteById(Context context, UUID id) throws SQLException, AuthorizeException; + + /** + * Changes the visibility of the given profile using the given new visible + * value. The visiblity controls whether the Profile is Anonymous READ or not. + * + * @param context the relevant DSpace Context. + * @param profile the researcher profile to update + * @param visible the visible value to set. If true the profile will + * be visible to all users. + * @throws SQLException + * @throws AuthorizeException + */ + public void changeVisibility(Context context, ResearcherProfile profile, boolean visible) + throws AuthorizeException, SQLException; + + /** + * Claims and links an eperson to an existing DSpaceObject + * @param context the relevant DSpace Context. + * @param ePerson the ePerson + * @param uri uri of existing Item to be linked to the + * eperson + * @return the created profile + * @throws IllegalArgumentException if the given uri is not related to an + * archived item or if the item cannot be + * claimed + */ + ResearcherProfile claim(Context context, EPerson ePerson, URI uri) + throws SQLException, AuthorizeException, SearchServiceException; + + /** + * Check if the given item has an entity type compatible with that of the + * researcher profile. If the given item does not have an entity type, the check + * returns false. + * + * @param item the item to check + * @return the check result + */ + boolean hasProfileType(Item item); + + /** + * Returns the profile entity type, if any. + * + * @return the profile type + */ + String getProfileType(); +} diff --git a/dspace-api/src/main/java/org/dspace/rdf/RDFConsumer.java b/dspace-api/src/main/java/org/dspace/rdf/RDFConsumer.java index 76ae0cd2d2..34ab572d1b 100644 --- a/dspace-api/src/main/java/org/dspace/rdf/RDFConsumer.java +++ b/dspace-api/src/main/java/org/dspace/rdf/RDFConsumer.java @@ -286,51 +286,54 @@ public class RDFConsumer implements Consumer { @Override public void end(Context ctx) throws Exception { log.debug("Started processing of queued events."); - // create a new context, to be sure to work as anonymous user - // we don't want to store private data in a triplestore with public - // SPARQL endpoint. - ctx = new Context(Context.Mode.READ_ONLY); - if (toDelete == null) { - log.debug("Deletion queue does not exists, creating empty queue."); - this.toDelete = new LinkedList<>(); - } - if (toConvert != null) { - log.debug("Starting conversion of DSpaceObjects."); + // store the context mode, set context read only for performance reasons, and restore the old mode + Context.Mode oldMode = ctx.getCurrentMode(); + try { + ctx.setMode(Context.Mode.READ_ONLY); + if (toDelete == null) { + log.debug("Deletion queue does not exists, creating empty queue."); + this.toDelete = new LinkedList<>(); + } + if (toConvert != null) { + log.debug("Starting conversion of DSpaceObjects."); + while (true) { + DSOIdentifier id; + try { + id = toConvert.removeFirst(); + } catch (NoSuchElementException ex) { + break; + } + + if (toDelete.contains(id)) { + log.debug("Skipping " + Constants.typeText[id.type] + " " + + id.id.toString() + " as it is marked for " + + "deletion as well."); + continue; + } + log.debug("Converting " + Constants.typeText[id.type] + " " + + id.id.toString() + "."); + convert(ctx, id); + } + log.debug("Conversion ended."); + } + log.debug("Starting to delete data from the triple store..."); while (true) { DSOIdentifier id; try { - id = toConvert.removeFirst(); + id = toDelete.removeFirst(); } catch (NoSuchElementException ex) { break; } - if (toDelete.contains(id)) { - log.debug("Skipping " + Constants.typeText[id.type] + " " - + id.id.toString() + " as it is marked for " - + "deletion as well."); - continue; - } - log.debug("Converting " + Constants.typeText[id.type] + " " + log.debug("Going to delete data from " + + Constants.typeText[id.type] + " " + id.id.toString() + "."); - convert(ctx, id); + delete(ctx, id); } - log.debug("Conversion ended."); + } finally { + // restore context mode + ctx.setMode(oldMode); } - log.debug("Starting to delete data from the triple store..."); - while (true) { - DSOIdentifier id; - try { - id = toDelete.removeFirst(); - } catch (NoSuchElementException ex) { - break; - } - - log.debug("Going to delete data from " + - Constants.typeText[id.type] + " " - + id.id.toString() + "."); - delete(ctx, id); - } - ctx.abort(); log.debug("Deletion finished."); } diff --git a/dspace-api/src/main/java/org/dspace/scripts/Process.java b/dspace-api/src/main/java/org/dspace/scripts/Process.java index b15fd0c84c..ce41f46bdf 100644 --- a/dspace-api/src/main/java/org/dspace/scripts/Process.java +++ b/dspace-api/src/main/java/org/dspace/scripts/Process.java @@ -10,6 +10,7 @@ package org.dspace.scripts; import java.util.ArrayList; import java.util.Date; import java.util.List; +import javax.persistence.CascadeType; import javax.persistence.Column; import javax.persistence.Entity; import javax.persistence.EnumType; @@ -33,6 +34,7 @@ import org.dspace.content.Bitstream; import org.dspace.content.ProcessStatus; import org.dspace.core.ReloadableEntity; import org.dspace.eperson.EPerson; +import org.dspace.eperson.Group; /** * This class is the DB Entity representation of the Process object to be stored in the Database @@ -77,6 +79,17 @@ public class Process implements ReloadableEntity { ) private List bitstreams; + /* + * Special Groups associated with this Process + */ + @ManyToMany(fetch = FetchType.LAZY, cascade = {CascadeType.PERSIST}) + @JoinTable( + name = "process2group", + joinColumns = {@JoinColumn(name = "process_id")}, + inverseJoinColumns = {@JoinColumn(name = "group_id")} + ) + private List groups; + @Column(name = "creation_time", nullable = false) @Temporal(TemporalType.TIMESTAMP) private Date creationTime; @@ -211,6 +224,21 @@ public class Process implements ReloadableEntity { return creationTime; } + /** + * This method will return the special groups associated with the Process. + */ + public List getGroups() { + return groups; + } + + /** + * This method sets the special groups associated with the Process. + * @param groups The special groups of this process. + */ + public void setGroups(List groups) { + this.groups = groups; + } + /** * Return true if other is the same Process * as this object, false otherwise diff --git a/dspace-api/src/main/java/org/dspace/scripts/ProcessServiceImpl.java b/dspace-api/src/main/java/org/dspace/scripts/ProcessServiceImpl.java index 8c03a9767d..33fea75add 100644 --- a/dspace-api/src/main/java/org/dspace/scripts/ProcessServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/scripts/ProcessServiceImpl.java @@ -21,6 +21,7 @@ import java.util.Comparator; import java.util.Date; import java.util.HashSet; import java.util.List; +import java.util.Optional; import java.util.Set; import java.util.regex.Pattern; @@ -43,6 +44,7 @@ import org.dspace.core.Constants; import org.dspace.core.Context; import org.dspace.core.LogHelper; import org.dspace.eperson.EPerson; +import org.dspace.eperson.Group; import org.dspace.eperson.service.EPersonService; import org.dspace.scripts.service.ProcessService; import org.springframework.beans.factory.annotation.Autowired; @@ -74,13 +76,21 @@ public class ProcessServiceImpl implements ProcessService { @Override public Process create(Context context, EPerson ePerson, String scriptName, - List parameters) throws SQLException { + List parameters, + final Set specialGroups) throws SQLException { Process process = new Process(); process.setEPerson(ePerson); process.setName(scriptName); process.setParameters(DSpaceCommandLineParameter.concatenate(parameters)); process.setCreationTime(new Date()); + Optional.ofNullable(specialGroups) + .ifPresent(sg -> { + // we use a set to be sure no duplicated special groups are stored with process + Set specialGroupsSet = new HashSet<>(sg); + process.setGroups(new ArrayList<>(specialGroupsSet)); + }); + Process createdProcess = processDAO.create(context, process); log.info(LogHelper.getHeader(context, "process_create", "Process has been created for eperson with email " + ePerson.getEmail() @@ -295,6 +305,12 @@ public class ProcessServiceImpl implements ProcessService { tempFile.delete(); } + @Override + public List findByStatusAndCreationTimeOlderThan(Context context, List statuses, + Date date) throws SQLException { + return this.processDAO.findByStatusAndCreationTimeOlderThan(context, statuses, date); + } + private String formatLogLine(int processId, String scriptName, String output, ProcessLogLevel processLogLevel) { SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss.SSS"); StringBuilder sb = new StringBuilder(); diff --git a/dspace-api/src/main/java/org/dspace/scripts/handler/DSpaceRunnableHandler.java b/dspace-api/src/main/java/org/dspace/scripts/handler/DSpaceRunnableHandler.java index f1b37cade2..223a73dad7 100644 --- a/dspace-api/src/main/java/org/dspace/scripts/handler/DSpaceRunnableHandler.java +++ b/dspace-api/src/main/java/org/dspace/scripts/handler/DSpaceRunnableHandler.java @@ -10,7 +10,9 @@ package org.dspace.scripts.handler; import java.io.IOException; import java.io.InputStream; import java.sql.SQLException; +import java.util.List; import java.util.Optional; +import java.util.UUID; import org.apache.commons.cli.Options; import org.dspace.authorize.AuthorizeException; @@ -114,4 +116,12 @@ public interface DSpaceRunnableHandler { */ public void writeFilestream(Context context, String fileName, InputStream inputStream, String type) throws IOException, SQLException, AuthorizeException; + + /** + * This method will return a List of UUIDs for the special groups + * associated with the processId contained by specific implementations of this interface. + * Otherwise, it returns an empty collection. + * @return List containing UUIDs of Special Groups of the associated Process. + */ + public List getSpecialGroups(); } diff --git a/dspace-api/src/main/java/org/dspace/scripts/handler/impl/CommandLineDSpaceRunnableHandler.java b/dspace-api/src/main/java/org/dspace/scripts/handler/impl/CommandLineDSpaceRunnableHandler.java index 6a108728d4..8a7f41d958 100644 --- a/dspace-api/src/main/java/org/dspace/scripts/handler/impl/CommandLineDSpaceRunnableHandler.java +++ b/dspace-api/src/main/java/org/dspace/scripts/handler/impl/CommandLineDSpaceRunnableHandler.java @@ -10,7 +10,10 @@ package org.dspace.scripts.handler.impl; import java.io.File; import java.io.IOException; import java.io.InputStream; +import java.util.Collections; +import java.util.List; import java.util.Optional; +import java.util.UUID; import org.apache.commons.cli.HelpFormatter; import org.apache.commons.cli.Options; @@ -113,4 +116,9 @@ public class CommandLineDSpaceRunnableHandler implements DSpaceRunnableHandler { File file = new File(fileName); FileUtils.copyInputStreamToFile(inputStream, file); } + + @Override + public List getSpecialGroups() { + return Collections.emptyList(); + } } diff --git a/dspace-api/src/main/java/org/dspace/scripts/service/ProcessService.java b/dspace-api/src/main/java/org/dspace/scripts/service/ProcessService.java index 27c0c75a35..ce6a173b0e 100644 --- a/dspace-api/src/main/java/org/dspace/scripts/service/ProcessService.java +++ b/dspace-api/src/main/java/org/dspace/scripts/service/ProcessService.java @@ -10,12 +10,16 @@ package org.dspace.scripts.service; import java.io.IOException; import java.io.InputStream; import java.sql.SQLException; +import java.util.Date; import java.util.List; +import java.util.Set; import org.dspace.authorize.AuthorizeException; import org.dspace.content.Bitstream; +import org.dspace.content.ProcessStatus; import org.dspace.core.Context; import org.dspace.eperson.EPerson; +import org.dspace.eperson.Group; import org.dspace.scripts.DSpaceCommandLineParameter; import org.dspace.scripts.Process; import org.dspace.scripts.ProcessLogLevel; @@ -32,11 +36,14 @@ public interface ProcessService { * @param ePerson The ePerson for which this process will be created on * @param scriptName The script name to be used for the process * @param parameters The parameters to be used for the process + * @param specialGroups Allows to set special groups, associated with application context when process is created, + * other than the ones derived from the eperson membership. * @return The created process * @throws SQLException If something goes wrong */ public Process create(Context context, EPerson ePerson, String scriptName, - List parameters) throws SQLException; + List parameters, + final Set specialGroups) throws SQLException; /** * This method will retrieve a Process object from the Database with the given ID @@ -235,4 +242,17 @@ public interface ProcessService { */ void createLogBitstream(Context context, Process process) throws IOException, SQLException, AuthorizeException; + + /** + * Find all the processes with one of the given status and with a creation time + * older than the specified date. + * + * @param context The relevant DSpace context + * @param statuses the statuses of the processes to search for + * @param date the creation date to search for + * @return The list of all Processes which match requirements + * @throws AuthorizeException If something goes wrong + */ + List findByStatusAndCreationTimeOlderThan(Context context, List statuses, Date date) + throws SQLException; } diff --git a/dspace-api/src/main/java/org/dspace/service/impl/ClientInfoServiceImpl.java b/dspace-api/src/main/java/org/dspace/service/impl/ClientInfoServiceImpl.java index f63a7a4f91..e83aa93e33 100644 --- a/dspace-api/src/main/java/org/dspace/service/impl/ClientInfoServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/service/impl/ClientInfoServiceImpl.java @@ -7,8 +7,12 @@ */ package org.dspace.service.impl; +import static org.apache.commons.lang3.StringUtils.ordinalIndexOf; + +import java.net.Inet4Address; import javax.servlet.http.HttpServletRequest; +import com.google.common.net.InetAddresses; import org.apache.commons.lang3.ArrayUtils; import org.apache.commons.lang3.StringUtils; import org.dspace.core.Utils; @@ -66,6 +70,13 @@ public class ClientInfoServiceImpl implements ClientInfoService { "To trust X-Forwarded-For headers, set useProxies=true."); } + if (isIPv4Address(ip)) { + int ipAnonymizationBytes = getIpAnonymizationBytes(); + if (ipAnonymizationBytes > 0) { + ip = anonymizeIpAddress(ip, ipAnonymizationBytes); + } + } + return ip; } @@ -139,7 +150,7 @@ public class ClientInfoServiceImpl implements ClientInfoService { // If our IPTable is not empty, log the trusted proxies and return it if (!ipTable.isEmpty()) { - log.info("Trusted proxies (configure via 'proxies.trusted.ipranges'): {}", ipTable.toSet().toString()); + log.info("Trusted proxies (configure via 'proxies.trusted.ipranges'): {}", ipTable); return ipTable; } else { return null; @@ -192,4 +203,38 @@ public class ClientInfoServiceImpl implements ClientInfoService { return ip; } + + /** + * Anonymize the given IP address by setting the last specified bytes to 0 + * @param ipAddress the ip address to be anonymize + * @param bytes the number of bytes to be set to 0 + * @return the modified ip address + */ + private String anonymizeIpAddress(String ipAddress, int bytes) { + + if (bytes > 4) { + log.warn("It is not possible to anonymize " + bytes + " bytes of an IPv4 address."); + return ipAddress; + } + + if (bytes == 4) { + return "0.0.0.0"; + } + + String zeroSuffix = StringUtils.repeat(".0", bytes); + return removeLastBytes(ipAddress, bytes) + zeroSuffix; + + } + + private String removeLastBytes(String ipAddress, int bytes) { + return ipAddress.substring(0, ordinalIndexOf(ipAddress, ".", 4 - bytes)); + } + + private int getIpAnonymizationBytes() { + return configurationService.getIntProperty("client.ip-anonymization.parts", 0); + } + + private boolean isIPv4Address(String ipAddress) { + return InetAddresses.forString(ipAddress) instanceof Inet4Address; + } } diff --git a/dspace-api/src/main/java/org/dspace/sort/OrderFormat.java b/dspace-api/src/main/java/org/dspace/sort/OrderFormat.java index 4b3e188662..fdaaef98b5 100644 --- a/dspace-api/src/main/java/org/dspace/sort/OrderFormat.java +++ b/dspace-api/src/main/java/org/dspace/sort/OrderFormat.java @@ -86,23 +86,23 @@ public class OrderFormat { } // No delegates found, so apply defaults - if (type.equalsIgnoreCase(OrderFormat.AUTHOR) && authorDelegate != null) { + if (type.equalsIgnoreCase(OrderFormat.AUTHOR)) { return authorDelegate.makeSortString(value, language); } - if (type.equalsIgnoreCase(OrderFormat.TITLE) && titleDelegate != null) { + if (type.equalsIgnoreCase(OrderFormat.TITLE)) { return titleDelegate.makeSortString(value, language); } - if (type.equalsIgnoreCase(OrderFormat.TEXT) && textDelegate != null) { + if (type.equalsIgnoreCase(OrderFormat.TEXT)) { return textDelegate.makeSortString(value, language); } - if (type.equalsIgnoreCase(OrderFormat.DATE) && dateDelegate != null) { + if (type.equalsIgnoreCase(OrderFormat.DATE)) { return dateDelegate.makeSortString(value, language); } - if (type.equalsIgnoreCase(OrderFormat.AUTHORITY) && authorityDelegate != null) { + if (type.equalsIgnoreCase(OrderFormat.AUTHORITY)) { return authorityDelegate.makeSortString(value, language); } } diff --git a/dspace-api/src/main/java/org/dspace/sort/OrderFormatTitle.java b/dspace-api/src/main/java/org/dspace/sort/OrderFormatTitle.java index eb3586dc61..b745f0719c 100644 --- a/dspace-api/src/main/java/org/dspace/sort/OrderFormatTitle.java +++ b/dspace-api/src/main/java/org/dspace/sort/OrderFormatTitle.java @@ -10,6 +10,7 @@ package org.dspace.sort; import org.dspace.text.filter.DecomposeDiactritics; import org.dspace.text.filter.LowerCaseAndTrim; import org.dspace.text.filter.StandardInitialArticleWord; +import org.dspace.text.filter.StripDiacritics; import org.dspace.text.filter.TextFilter; /** @@ -21,6 +22,7 @@ public class OrderFormatTitle extends AbstractTextFilterOFD { { filters = new TextFilter[] {new StandardInitialArticleWord(), new DecomposeDiactritics(), + new StripDiacritics(), new LowerCaseAndTrim()}; } } diff --git a/dspace-api/src/main/java/org/dspace/sort/OrderFormatTitleMarc21.java b/dspace-api/src/main/java/org/dspace/sort/OrderFormatTitleMarc21.java index 670e5c87e5..fa9ba29725 100644 --- a/dspace-api/src/main/java/org/dspace/sort/OrderFormatTitleMarc21.java +++ b/dspace-api/src/main/java/org/dspace/sort/OrderFormatTitleMarc21.java @@ -10,6 +10,7 @@ package org.dspace.sort; import org.dspace.text.filter.DecomposeDiactritics; import org.dspace.text.filter.LowerCaseAndTrim; import org.dspace.text.filter.MARC21InitialArticleWord; +import org.dspace.text.filter.StripDiacritics; import org.dspace.text.filter.StripLeadingNonAlphaNum; import org.dspace.text.filter.TextFilter; @@ -22,6 +23,7 @@ public class OrderFormatTitleMarc21 extends AbstractTextFilterOFD { { filters = new TextFilter[] {new MARC21InitialArticleWord(), new DecomposeDiactritics(), + new StripDiacritics(), new StripLeadingNonAlphaNum(), new LowerCaseAndTrim()}; } diff --git a/dspace-api/src/main/java/org/dspace/statistics/DataTermsFacet.java b/dspace-api/src/main/java/org/dspace/statistics/DataTermsFacet.java deleted file mode 100644 index 9de06b7bb8..0000000000 --- a/dspace-api/src/main/java/org/dspace/statistics/DataTermsFacet.java +++ /dev/null @@ -1,70 +0,0 @@ -/** - * The contents of this file are subject to the license and copyright - * detailed in the LICENSE and NOTICE files at the root of the source - * tree and available online at - * - * http://www.dspace.org/license/ - */ -package org.dspace.statistics; - -import java.util.ArrayList; -import java.util.List; - -import com.google.gson.Gson; - -/** - * A neutral data object to hold data for statistics. - */ -public class DataTermsFacet { - private List terms; - - public DataTermsFacet() { - terms = new ArrayList(); - } - - public void addTermFacet(TermsFacet termsFacet) { - terms.add(termsFacet); - } - - /** - * Render this data object into JSON format. - * - * An example of the output could be of the format: - * [{"term":"247166","count":10},{"term":"247168","count":6}] - * - * @return JSON-formatted data. - */ - public String toJson() { - Gson gson = new Gson(); - return gson.toJson(terms); - } - - - public static class TermsFacet { - private String term; - private Integer count; - - public TermsFacet(String term, Integer count) { - setTerm(term); - setCount(count); - } - - public String getTerm() { - return term; - } - - public void setTerm(String term) { - this.term = term; - } - - public Integer getCount() { - return count; - } - - public void setCount(Integer count) { - this.count = count; - } - - - } -} diff --git a/dspace-api/src/main/java/org/dspace/statistics/GeoIpService.java b/dspace-api/src/main/java/org/dspace/statistics/GeoIpService.java new file mode 100644 index 0000000000..7f8a11e5ba --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/statistics/GeoIpService.java @@ -0,0 +1,57 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.statistics; + +import java.io.File; +import java.io.FileNotFoundException; +import java.io.IOException; + +import com.maxmind.geoip2.DatabaseReader; +import org.apache.commons.lang3.StringUtils; +import org.dspace.services.ConfigurationService; +import org.springframework.beans.factory.annotation.Autowired; + +/** + * Service that handle the GeoIP database file. + * + * @author Luca Giamminonni (luca.giamminonni at 4science.it) + * + */ +public class GeoIpService { + + @Autowired + private ConfigurationService configurationService; + + /** + * Returns an instance of {@link DatabaseReader} based on the configured db + * file, if any. + * + * @return the Database reader + * @throws IllegalStateException if the db file is not configured correctly + */ + public DatabaseReader getDatabaseReader() throws IllegalStateException { + String dbPath = configurationService.getProperty("usage-statistics.dbfile"); + if (StringUtils.isBlank(dbPath)) { + throw new IllegalStateException("The required 'dbfile' configuration is missing in solr-statistics.cfg!"); + } + + try { + File dbFile = new File(dbPath); + return new DatabaseReader.Builder(dbFile).build(); + } catch (FileNotFoundException fe) { + throw new IllegalStateException( + "The GeoLite Database file is missing (" + dbPath + ")! Solr Statistics cannot generate location " + + "based reports! Please see the DSpace installation instructions for instructions to install " + + "this file.",fe); + } catch (IOException e) { + throw new IllegalStateException( + "Unable to load GeoLite Database file (" + dbPath + ")! You may need to reinstall it. See the " + + "DSpace installation instructions for more details.", e); + } + } +} diff --git a/dspace-api/src/main/java/org/dspace/statistics/SolrLoggerServiceImpl.java b/dspace-api/src/main/java/org/dspace/statistics/SolrLoggerServiceImpl.java index 9cc032a998..a952520385 100644 --- a/dspace-api/src/main/java/org/dspace/statistics/SolrLoggerServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/statistics/SolrLoggerServiceImpl.java @@ -8,7 +8,6 @@ package org.dspace.statistics; import java.io.File; -import java.io.FileNotFoundException; import java.io.FileWriter; import java.io.IOException; import java.io.InputStream; @@ -142,6 +141,8 @@ public class SolrLoggerServiceImpl implements SolrLoggerService, InitializingBea private ClientInfoService clientInfoService; @Autowired private SolrStatisticsCore solrStatisticsCore; + @Autowired + private GeoIpService geoIpService; /** URL to the current-year statistics core. Prior-year shards will have a year suffixed. */ private String statisticsCoreURL; @@ -179,26 +180,10 @@ public class SolrLoggerServiceImpl implements SolrLoggerService, InitializingBea //spiderIps = SpiderDetector.getSpiderIpAddresses(); DatabaseReader service = null; - // Get the db file for the location - String dbPath = configurationService.getProperty("usage-statistics.dbfile"); - if (dbPath != null) { - try { - File dbFile = new File(dbPath); - service = new DatabaseReader.Builder(dbFile).build(); - } catch (FileNotFoundException fe) { - log.error( - "The GeoLite Database file is missing (" + dbPath + ")! Solr Statistics cannot generate location " + - "based reports! Please see the DSpace installation instructions for instructions to install " + - "this file.", - fe); - } catch (IOException e) { - log.error( - "Unable to load GeoLite Database file (" + dbPath + ")! You may need to reinstall it. See the " + - "DSpace installation instructions for more details.", - e); - } - } else { - log.error("The required 'dbfile' configuration is missing in solr-statistics.cfg!"); + try { + service = geoIpService.getDatabaseReader(); + } catch (IllegalStateException ex) { + log.error(ex); } locationService = service; } @@ -212,7 +197,7 @@ public class SolrLoggerServiceImpl implements SolrLoggerService, InitializingBea @Override public void postView(DSpaceObject dspaceObject, HttpServletRequest request, EPerson currentUser) { - if (solr == null || locationService == null) { + if (solr == null) { return; } initSolrYearCores(); @@ -253,7 +238,7 @@ public class SolrLoggerServiceImpl implements SolrLoggerService, InitializingBea @Override public void postView(DSpaceObject dspaceObject, String ip, String userAgent, String xforwardedfor, EPerson currentUser) { - if (solr == null || locationService == null) { + if (solr == null) { return; } initSolrYearCores(); diff --git a/dspace-api/src/main/java/org/dspace/statistics/content/DatasetTimeGenerator.java b/dspace-api/src/main/java/org/dspace/statistics/content/DatasetTimeGenerator.java index 1152ee669c..a8ffbb4b40 100644 --- a/dspace-api/src/main/java/org/dspace/statistics/content/DatasetTimeGenerator.java +++ b/dspace-api/src/main/java/org/dspace/statistics/content/DatasetTimeGenerator.java @@ -187,7 +187,7 @@ public class DatasetTimeGenerator extends DatasetGenerator { cal2.clear(Calendar.HOUR); cal1.clear(Calendar.HOUR_OF_DAY); cal2.clear(Calendar.HOUR_OF_DAY); - //yet i know calendar just won't clear his hours + //yet i know calendar just won't clear its hours cal1.set(Calendar.HOUR_OF_DAY, 0); cal2.set(Calendar.HOUR_OF_DAY, 0); } diff --git a/dspace-api/src/main/java/org/dspace/statistics/content/StatisticsDataVisits.java b/dspace-api/src/main/java/org/dspace/statistics/content/StatisticsDataVisits.java index 4ee7a0f3e4..121e66af48 100644 --- a/dspace-api/src/main/java/org/dspace/statistics/content/StatisticsDataVisits.java +++ b/dspace-api/src/main/java/org/dspace/statistics/content/StatisticsDataVisits.java @@ -621,6 +621,10 @@ public class StatisticsDataVisits extends StatisticsData { } if (dsoId != null && query.dsoType != -1) { + // Store the UUID of the DSO as an attribute. Needed in particular for Bitstream download usage reports, + // as the Bitstream itself won't be available when converting points to their REST representation + attrs.put("id", dsoId); + switch (query.dsoType) { case Constants.BITSTREAM: Bitstream bit = bitstreamService.findByIdOrLegacyId(context, dsoId); diff --git a/dspace-api/src/main/java/org/dspace/statistics/export/RetryFailedOpenUrlTrackerScriptConfiguration.java b/dspace-api/src/main/java/org/dspace/statistics/export/RetryFailedOpenUrlTrackerScriptConfiguration.java index b5d65aa4e5..dcae4aa4cb 100644 --- a/dspace-api/src/main/java/org/dspace/statistics/export/RetryFailedOpenUrlTrackerScriptConfiguration.java +++ b/dspace-api/src/main/java/org/dspace/statistics/export/RetryFailedOpenUrlTrackerScriptConfiguration.java @@ -56,15 +56,12 @@ public class RetryFailedOpenUrlTrackerScriptConfiguration>>> map - = new HashMap<>(); + private final Set ipRanges = new HashSet<>(); + + /** + * Internal class representing an IP range + */ + static class IPRange { + + /* Lowest address in the range */ + private final long ipLo; + + /* Highest address in the range */ + private final long ipHi; + + IPRange(long ipLo, long ipHi) { + this.ipLo = ipLo; + this.ipHi = ipHi; + } + + /** + * Get the lowest address in the range + * @return the lowest address as a long integer + */ + public long getIpLo() { + return ipLo; + } + + /** + * Get the highest address in the range + * @return the highest address as a long integer + */ + public long getIpHi() { + return ipHi; + } + } /** * Can be full v4 IP, subnet or range string. @@ -45,155 +79,126 @@ public class IPTable { */ public void add(String ip) throws IPFormatException { - String[] start; + String start; - String[] end; + String end; String[] range = ip.split("-"); - if (range.length >= 2) { + if (range.length == 2) { - start = range[0].trim().split("/")[0].split("\\."); - end = range[1].trim().split("/")[0].split("\\."); + start = range[0].trim(); + end = range[1].trim(); - if (start.length != 4 || end.length != 4) { - throw new IPFormatException(ip + " - Ranges need to be full IPv4 Addresses"); - } - - if (!(start[0].equals(end[0]) && start[1].equals(end[1]) && start[2].equals(end[2]))) { - throw new IPFormatException(ip + " - Ranges can only be across the last subnet x.y.z.0 - x.y.z.254"); + try { + long ipLo = ipToLong(InetAddress.getByName(start)); + long ipHi = ipToLong(InetAddress.getByName(end)); + ipRanges.add(new IPRange(ipLo, ipHi)); + return; + } catch (UnknownHostException e) { + throw new IPFormatException(ip + " - Range format should be similar to 1.2.3.0-1.2.3.255"); } } else { - //need to ignore CIDR notation for the moment. - //ip = ip.split("\\/")[0]; - - String[] subnets = ip.split("\\."); - - if (subnets.length < 3) { - throw new IPFormatException(ip + " - require at least three subnet places (255.255.255.0"); + // Convert implicit ranges to netmask format + // 192 -> 192.0.0.0/8 + // 192.168 -> 192.168.0.0/16 + // 192.168.1 -> 192.168.1.0/24 + int periods = StringUtils.countMatches(ip, '.'); + if (periods < 3) { + ip = StringUtils.join(ip, StringUtils.repeat(".0", 4 - periods - 1), "/", (periods + 1) * 8); } - start = subnets; - end = subnets; - } - - if (start.length >= 3) { - Map>> first = map.get(start[0]); - - if (first == null) { - first = new HashMap<>(); - map.put(start[0], first); - } - - Map> second = first.get(start[1]); - - if (second == null) { - second = new HashMap<>(); - first.put(start[1], second); - } - - Set third = second.get(start[2]); - - if (third == null) { - third = new HashSet<>(); - second.put(start[2], third); - } - - //now populate fourth place (* or value 0-254); - - if (start.length == 3) { - third.add("*"); - } - - if (third.contains("*")) { - return; - } - - if (start.length >= 4) { - int s = Integer.valueOf(start[3]); - int e = Integer.valueOf(end[3]); - for (int i = s; i <= e; i++) { - third.add(String.valueOf(i)); + if (ip.contains("/")) { + String[] parts = ip.split("/"); + try { + long ipLong = ipToLong(InetAddress.getByName(parts[0])); + long mask = (long) Math.pow(2, 32 - Integer.parseInt(parts[1])); + long ipLo = (ipLong / mask) * mask; + long ipHi = (( (ipLong / mask) + 1) * mask) - 1; + ipRanges.add(new IPRange(ipLo, ipHi)); + return; + } catch (Exception e) { + throw new IPFormatException(ip + " - Range format should be similar to 172.16.0.0/12"); + } + } else { + try { + long ipLo = ipToLong(InetAddress.getByName(ip)); + ipRanges.add(new IPRange(ipLo, ipLo)); + return; + } catch (UnknownHostException e) { + throw new IPFormatException(ip + " - IP address format should be similar to 1.2.3.14"); } } } } + /** + * Convert an IP address to a long integer + * @param ip the IP address + * @return + */ + public static long ipToLong(InetAddress ip) { + byte[] octets = ip.getAddress(); + long result = 0; + for (byte octet : octets) { + result <<= 8; + result |= octet & 0xff; + } + return result; + } + + /** + * Convert a long integer into an IP address string + * @param ip the IP address as a long integer + * @return + */ + public static String longToIp(long ip) { + long part = ip; + String[] parts = new String[4]; + for (int i = 0; i < 4; i++) { + long octet = part & 0xff; + parts[3 - i] = String.valueOf(octet); + part = part / 256; + } + + return parts[0] + "." + parts[1] + "." + parts[2] + "." + parts[3]; + } + /** * Check whether a given address is contained in this netblock. * * @param ip the address to be tested * @return true if {@code ip} is within this table's limits. Returns false - * if {@link ip} looks like an IPv6 address. + * if {@code ip} looks like an IPv6 address. * @throws IPFormatException Exception Class to deal with IPFormat errors. */ public boolean contains(String ip) throws IPFormatException { - String[] subnets = ip.split("\\."); - - // Does it look like IPv6? - if (subnets.length > 4 || ip.contains("::")) { - log.warn("Address {} assumed not to match. IPv6 is not implemented.", ip); - return false; + try { + long ipToTest = ipToLong(InetAddress.getByName(ip)); + return ipRanges.stream() + .anyMatch(ipRange -> (ipToTest >= ipRange.getIpLo() && ipToTest <= ipRange.getIpHi())); + } catch (UnknownHostException e) { + throw new IPFormatException("ip not valid"); } - - // Does it look like a subnet? - if (subnets.length < 4) { - throw new IPFormatException("needs to be a single IP address"); - } - - Map>> first = map.get(subnets[0]); - - if (first == null) { - return false; - } - - Map> second = first.get(subnets[1]); - - if (second == null) { - return false; - } - - Set third = second.get(subnets[2]); - - if (third == null) { - return false; - } - - return third.contains(subnets[3]) || third.contains("*"); - } /** - * Convert to a Set. + * Convert to a Set. This set contains all IPs in the range * * @return this table's content as a Set */ public Set toSet() { HashSet set = new HashSet<>(); - for (Map.Entry>>> first : map.entrySet()) { - String firstString = first.getKey(); - Map>> secondMap = first.getValue(); - - for (Map.Entry>> second : secondMap.entrySet()) { - String secondString = second.getKey(); - Map> thirdMap = second.getValue(); - - for (Map.Entry> third : thirdMap.entrySet()) { - String thirdString = third.getKey(); - Set fourthSet = third.getValue(); - - if (fourthSet.contains("*")) { - set.add(firstString + "." + secondString + "." + thirdString); - } else { - for (String fourth : fourthSet) { - set.add(firstString + "." + secondString + "." + thirdString + "." + fourth); - } - } - - } + Iterator ipRangeIterator = ipRanges.iterator(); + while (ipRangeIterator.hasNext()) { + IPRange ipRange = ipRangeIterator.next(); + long ipLo = ipRange.getIpLo(); + long ipHi = ipRange.getIpHi(); + for (long ip = ipLo; ip <= ipHi; ip++) { + set.add(longToIp(ip)); } } @@ -205,7 +210,7 @@ public class IPTable { * @return true if empty, false otherwise */ public boolean isEmpty() { - return map.isEmpty(); + return ipRanges.isEmpty(); } /** @@ -217,5 +222,23 @@ public class IPTable { } } - + /** + * Represent this IP table as a string + * @return a string containing all IP ranges in this IP table + */ + @Override + public String toString() { + StringBuilder stringBuilder = new StringBuilder(); + Iterator ipRangeIterator = ipRanges.iterator(); + while (ipRangeIterator.hasNext()) { + IPRange ipRange = ipRangeIterator.next(); + stringBuilder.append(longToIp(ipRange.getIpLo())) + .append("-") + .append(longToIp(ipRange.getIpHi())); + if (ipRangeIterator.hasNext()) { + stringBuilder.append(", "); + } + } + return stringBuilder.toString(); + } } diff --git a/dspace-api/src/main/java/org/dspace/statistics/util/StatisticsClient.java b/dspace-api/src/main/java/org/dspace/statistics/util/StatisticsClient.java index b1b31c0fe1..e45ce163ed 100644 --- a/dspace-api/src/main/java/org/dspace/statistics/util/StatisticsClient.java +++ b/dspace-api/src/main/java/org/dspace/statistics/util/StatisticsClient.java @@ -16,6 +16,7 @@ import org.apache.commons.cli.DefaultParser; import org.apache.commons.cli.HelpFormatter; import org.apache.commons.cli.Options; import org.apache.logging.log4j.Logger; +import org.apache.tools.ant.Project; import org.apache.tools.ant.taskdefs.Get; import org.dspace.services.factory.DSpaceServicesFactory; import org.dspace.statistics.factory.StatisticsServiceFactory; @@ -136,6 +137,7 @@ public class StatisticsClient { URL url = new URL(value); Get get = new Get(); + get.setProject(new Project()); get.setDest(new File(spiders, url.getHost() + url.getPath().replace("/", "-"))); get.setSrc(url); get.setUseTimestamp(true); diff --git a/dspace-api/src/main/java/org/dspace/statistics/util/StatisticsImporter.java b/dspace-api/src/main/java/org/dspace/statistics/util/StatisticsImporter.java index bd8662854f..95736a8bd6 100644 --- a/dspace-api/src/main/java/org/dspace/statistics/util/StatisticsImporter.java +++ b/dspace-api/src/main/java/org/dspace/statistics/util/StatisticsImporter.java @@ -348,9 +348,9 @@ public class StatisticsImporter { // Get the eperson details EPerson eperson = EPersonServiceFactory.getInstance().getEPersonService().findByEmail(context, user); - int epersonId = 0; + UUID epersonId = null; if (eperson != null) { - eperson.getID(); + epersonId = eperson.getID(); } // Save it in our server @@ -365,12 +365,10 @@ public class StatisticsImporter { sid.addField("city", city); sid.addField("latitude", latitude); sid.addField("longitude", longitude); - if (epersonId > 0) { + if (epersonId != null) { sid.addField("epersonid", epersonId); } - if (dns != null) { - sid.addField("dns", dns.toLowerCase()); - } + sid.addField("dns", dns.toLowerCase()); solrLoggerService.storeParents(sid, dso); solr.add(sid); diff --git a/dspace-api/src/main/java/org/dspace/storage/bitstore/BaseBitStoreService.java b/dspace-api/src/main/java/org/dspace/storage/bitstore/BaseBitStoreService.java new file mode 100644 index 0000000000..209c1e21e7 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/storage/bitstore/BaseBitStoreService.java @@ -0,0 +1,217 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.storage.bitstore; + +import java.io.File; +import java.io.FileInputStream; +import java.io.FileNotFoundException; +import java.io.IOException; +import java.security.DigestInputStream; +import java.security.MessageDigest; +import java.security.NoSuchAlgorithmException; +import java.util.Map; + +import org.apache.commons.lang3.StringUtils; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.dspace.core.Utils; + +/** + * BaseBitStoreService base implementation to store + * and organize assets in digits. + * + */ +public abstract class BaseBitStoreService implements BitStoreService { + + protected static Logger log = LogManager.getLogger(DSBitStoreService.class); + // Checksum algorithm + protected static final String CSA = "MD5"; + protected static final String MODIFIED = "modified"; + protected static final String CHECKSUM_ALGORITHM = "checksum_algorithm"; + protected static final String CHECKSUM = "checksum"; + protected static final String SIZE_BYTES = "size_bytes"; + + protected boolean initialized = false; + + // These settings control the way an identifier is hashed into + // directory and file names + // + // With digitsPerLevel 2 and directoryLevels 3, an identifier + // like 12345678901234567890 turns into the relative name + // /12/34/56/12345678901234567890. + // + // You should not change these settings if you have data in the + // asset store, as the BitstreamStorageManager will be unable + // to find your existing data. + protected static final int digitsPerLevel = 2; + protected static final int directoryLevels = 3; + + /** + * Return the intermediate path derived from the internal_id. This method splits + * the id into groups which become subdirectories. + * + * @param internalId The internal_id + * @return The path based on the id without leading or trailing separators + */ + protected String getIntermediatePath(String internalId) { + StringBuilder path = new StringBuilder(); + if (StringUtils.isEmpty(internalId) || internalId.length() <= digitsPerLevel) { + return path.append(internalId).append(File.separator).toString(); + } + populatePathSplittingId(internalId, path); + appendSeparator(path); + return path.toString(); + } + + /** + * Sanity Check: If the internal ID contains a pathname separator, it's probably + * an attempt to make a path traversal attack, so ignore the path prefix. The + * internal-ID is supposed to be just a filename, so this will not affect normal + * operation. + * + * @param sInternalId + * @return Sanitized id + */ + protected String sanitizeIdentifier(String sInternalId) { + if (sInternalId.contains(File.separator)) { + sInternalId = sInternalId.substring(sInternalId.lastIndexOf(File.separator) + 1); + } + return sInternalId; + } + + /** + * Append separator to target {@code StringBuilder} + * + * @param path + */ + protected void appendSeparator(StringBuilder path) { + if (!endsWithSeparator(path)) { + path.append(File.separator); + } + } + + /** + * Utility that checks string ending with separator + * + * @param bufFilename + * @return + */ + protected boolean endsWithSeparator(StringBuilder bufFilename) { + return bufFilename.lastIndexOf(File.separator) == bufFilename.length() - 1; + } + + /** + * Splits internalId into several subpaths using {@code digitsPerLevel} that + * indicates the folder name length, and {@code direcoryLevels} that indicates + * the maximum number of subfolders. + * + * @param internalId bitStream identifier + * @param path + */ + protected void populatePathSplittingId(String internalId, StringBuilder path) { + int digits = 0; + path.append(extractSubstringFrom(internalId, digits, digits + digitsPerLevel)); + for (int i = 1; i < directoryLevels && !isLonger(internalId, digits + digitsPerLevel); i++) { + digits = i * digitsPerLevel; + path.append(File.separator); + path.append(extractSubstringFrom(internalId, digits, digits + digitsPerLevel)); + } + } + + /** + * Extract substring if is in range, otherwise will truncate to length + * + * @param internalId + * @param startIndex + * @param endIndex + * @return + */ + protected String extractSubstringFrom(String internalId, int startIndex, int endIndex) { + if (isLonger(internalId, endIndex)) { + endIndex = internalId.length(); + } + return internalId.substring(startIndex, endIndex); + } + + /** + * Checks if the {@code String} is longer than {@code endIndex} + * + * @param internalId + * @param endIndex + * @return + */ + protected boolean isLonger(String internalId, int endIndex) { + return endIndex > internalId.length(); + } + + /** + * Retrieves a map of useful metadata about the File (size, checksum, modified) + * + * @param file The File to analyze + * @param attrs The map where we are storing values + * @return Map of updated metadatas / attrs + * @throws IOException + */ + public Map about(File file, Map attrs) throws IOException { + try { + if (file != null && file.exists()) { + this.putValueIfExistsKey(attrs, SIZE_BYTES, file.length()); + if (attrs.containsKey(CHECKSUM)) { + attrs.put(CHECKSUM, Utils.toHex(this.generateChecksumFrom(file))); + attrs.put(CHECKSUM_ALGORITHM, CSA); + } + this.putValueIfExistsKey(attrs, MODIFIED, String.valueOf(file.lastModified())); + return attrs; + } + return null; + } catch (Exception e) { + log.error("about( FilePath: " + file.getAbsolutePath() + ", Map: " + attrs.toString() + ")", e); + throw new IOException(e); + } + } + + @Override + public boolean isInitialized() { + return this.initialized; + } + + private byte[] generateChecksumFrom(File file) throws FileNotFoundException, IOException { + // generate checksum by reading the bytes + try (FileInputStream fis = new FileInputStream(file)) { + return generateChecksumFrom(fis); + } catch (NoSuchAlgorithmException e) { + log.warn("Caught NoSuchAlgorithmException", e); + throw new IOException("Invalid checksum algorithm"); + } + } + + private byte[] generateChecksumFrom(FileInputStream fis) throws IOException, NoSuchAlgorithmException { + try (DigestInputStream dis = new DigestInputStream(fis, MessageDigest.getInstance(CSA))) { + final int BUFFER_SIZE = 1024 * 4; + final byte[] buffer = new byte[BUFFER_SIZE]; + while (true) { + final int count = dis.read(buffer, 0, BUFFER_SIZE); + if (count == -1) { + break; + } + } + return dis.getMessageDigest().digest(); + } + } + + protected void putValueIfExistsKey(Map attrs, String key, Object value) { + this.putEntryIfExistsKey(attrs, key, Map.entry(key, value)); + } + + protected void putEntryIfExistsKey(Map attrs, String key, Map.Entry entry) { + if (attrs.containsKey(key)) { + attrs.put(entry.getKey(), entry.getValue()); + } + } + +} diff --git a/dspace-api/src/main/java/org/dspace/storage/bitstore/BitStoreService.java b/dspace-api/src/main/java/org/dspace/storage/bitstore/BitStoreService.java index b33867f0e2..b6ac540c50 100644 --- a/dspace-api/src/main/java/org/dspace/storage/bitstore/BitStoreService.java +++ b/dspace-api/src/main/java/org/dspace/storage/bitstore/BitStoreService.java @@ -77,4 +77,20 @@ public interface BitStoreService { * @throws java.io.IOException If a problem occurs while removing the asset */ public void remove(Bitstream bitstream) throws IOException; + + /** + * Determines if a store has been initialized + * + * @return {@code boolean} true if initialized, false otherwise + */ + public boolean isInitialized(); + + /** + * Determines if a store is enabled, by default is enabled + * + * @return {@code boolean} true if enabled, false otherwise + */ + public default boolean isEnabled() { + return true; + } } diff --git a/dspace-api/src/main/java/org/dspace/storage/bitstore/BitstreamStorageServiceImpl.java b/dspace-api/src/main/java/org/dspace/storage/bitstore/BitstreamStorageServiceImpl.java index 8bf5d3cbd3..0bd71088da 100644 --- a/dspace-api/src/main/java/org/dspace/storage/bitstore/BitstreamStorageServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/storage/bitstore/BitstreamStorageServiceImpl.java @@ -18,6 +18,7 @@ import java.util.UUID; import javax.annotation.Nullable; import org.apache.commons.collections4.MapUtils; +import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.dspace.authorize.AuthorizeException; import org.dspace.checker.service.ChecksumHistoryService; @@ -57,13 +58,12 @@ import org.springframework.beans.factory.annotation.Autowired; * be notified of BitstreamStorageManager actions.

    * * @author Peter Breton, Robert Tansley, David Little, Nathan Sarr - * @version $Revision$ */ public class BitstreamStorageServiceImpl implements BitstreamStorageService, InitializingBean { /** * log4j log */ - private static Logger log = org.apache.logging.log4j.LogManager.getLogger(BitstreamStorageServiceImpl.class); + private static final Logger log = LogManager.getLogger(); @Autowired(required = true) protected BitstreamService bitstreamService; @@ -73,7 +73,7 @@ public class BitstreamStorageServiceImpl implements BitstreamStorageService, Ini /** * asset stores */ - private Map stores = new HashMap(); + private Map stores = new HashMap<>(); /** * The index of the asset store to use for new bitstreams @@ -92,7 +92,9 @@ public class BitstreamStorageServiceImpl implements BitstreamStorageService, Ini @Override public void afterPropertiesSet() throws Exception { for (Map.Entry storeEntry : stores.entrySet()) { - storeEntry.getValue().init(); + if (storeEntry.getValue().isEnabled() && !storeEntry.getValue().isInitialized()) { + storeEntry.getValue().init(); + } } } @@ -100,19 +102,18 @@ public class BitstreamStorageServiceImpl implements BitstreamStorageService, Ini public UUID store(Context context, Bitstream bitstream, InputStream is) throws SQLException, IOException { // Create internal ID String id = Utils.generateKey(); - - bitstream.setDeleted(true); - bitstream.setInternalId(id); - /* * Set the store number of the new bitstream If you want to use some * other method of working out where to put a new bitstream, here's * where it should go */ bitstream.setStoreNumber(incoming); + bitstream.setDeleted(true); + bitstream.setInternalId(id); + BitStoreService store = this.getStore(incoming); //For efficiencies sake, PUT is responsible for setting bitstream size_bytes, checksum, and checksum_algorithm - stores.get(incoming).put(bitstream, is); + store.put(bitstream, is); //bitstream.setSizeBytes(file.length()); //bitstream.setChecksum(Utils.toHex(dis.getMessageDigest().digest())); //bitstream.setChecksumAlgorithm("MD5"); @@ -169,7 +170,7 @@ public class BitstreamStorageServiceImpl implements BitstreamStorageService, Ini wantedMetadata.put("checksum", null); wantedMetadata.put("checksum_algorithm", null); - Map receivedMetadata = stores.get(assetstore).about(bitstream, wantedMetadata); + Map receivedMetadata = this.getStore(assetstore).about(bitstream, wantedMetadata); if (MapUtils.isEmpty(receivedMetadata)) { String message = "Not able to register bitstream:" + bitstream.getID() + " at path: " + bitstreamPath; log.error(message); @@ -204,7 +205,7 @@ public class BitstreamStorageServiceImpl implements BitstreamStorageService, Ini wantedMetadata.put("checksum", null); wantedMetadata.put("checksum_algorithm", null); - Map receivedMetadata = stores.get(bitstream.getStoreNumber()).about(bitstream, wantedMetadata); + Map receivedMetadata = this.getStore(bitstream.getStoreNumber()).about(bitstream, wantedMetadata); return receivedMetadata; } @@ -217,16 +218,15 @@ public class BitstreamStorageServiceImpl implements BitstreamStorageService, Ini public InputStream retrieve(Context context, Bitstream bitstream) throws SQLException, IOException { Integer storeNumber = bitstream.getStoreNumber(); - return stores.get(storeNumber).get(bitstream); + return this.getStore(storeNumber).get(bitstream); } @Override public void cleanup(boolean deleteDbRecords, boolean verbose) throws SQLException, IOException, AuthorizeException { - Context context = null; + Context context = new Context(Context.Mode.BATCH_EDIT); int commitCounter = 0; try { - context = new Context(Context.Mode.BATCH_EDIT); context.turnOffAuthorisationSystem(); List storage = bitstreamService.findDeletedBitstreams(context); @@ -235,7 +235,7 @@ public class BitstreamStorageServiceImpl implements BitstreamStorageService, Ini Map wantedMetadata = new HashMap(); wantedMetadata.put("size_bytes", null); wantedMetadata.put("modified", null); - Map receivedMetadata = stores.get(bitstream.getStoreNumber()).about(bitstream, wantedMetadata); + Map receivedMetadata = this.getStore(bitstream.getStoreNumber()).about(bitstream, wantedMetadata); // Make sure entries which do not exist are removed @@ -285,7 +285,7 @@ public class BitstreamStorageServiceImpl implements BitstreamStorageService, Ini // Since versioning allows for multiple bitstreams, check if the internal identifier isn't used on // another place if (bitstreamService.findDuplicateInternalIdentifier(context, bitstream).isEmpty()) { - stores.get(bitstream.getStoreNumber()).remove(bitstream); + this.getStore(bitstream.getStoreNumber()).remove(bitstream); String message = ("Deleted bitstreamID " + bid + ", internalID " + bitstream.getInternalId()); if (log.isDebugEnabled()) { @@ -321,9 +321,7 @@ public class BitstreamStorageServiceImpl implements BitstreamStorageService, Ini context.abort(); throw sqle; } finally { - if (context != null) { - context.restoreAuthSystemState(); - } + context.restoreAuthSystemState(); } } @@ -332,7 +330,7 @@ public class BitstreamStorageServiceImpl implements BitstreamStorageService, Ini public Long getLastModified(Bitstream bitstream) throws IOException { Map attrs = new HashMap(); attrs.put("modified", null); - attrs = stores.get(bitstream.getStoreNumber()).about(bitstream, attrs); + attrs = this.getStore(bitstream.getStoreNumber()).about(bitstream, attrs); if (attrs == null || !attrs.containsKey("modified")) { return null; } @@ -386,11 +384,12 @@ public class BitstreamStorageServiceImpl implements BitstreamStorageService, Ini * @throws AuthorizeException Exception indicating the current user of the context does not have permission * to perform a particular action. */ + @Override public void migrate(Context context, Integer assetstoreSource, Integer assetstoreDestination, boolean deleteOld, Integer batchCommitSize) throws IOException, SQLException, AuthorizeException { //Find all the bitstreams on the old source, copy it to new destination, update store_number, save, remove old Iterator allBitstreamsInSource = bitstreamService.findByStoreNumber(context, assetstoreSource); - Integer processedCounter = 0; + int processedCounter = 0; while (allBitstreamsInSource.hasNext()) { Bitstream bitstream = allBitstreamsInSource.next(); @@ -400,13 +399,13 @@ public class BitstreamStorageServiceImpl implements BitstreamStorageService, Ini .getName() + ", SizeBytes:" + bitstream.getSizeBytes()); InputStream inputStream = retrieve(context, bitstream); - stores.get(assetstoreDestination).put(bitstream, inputStream); + this.getStore(assetstoreDestination).put(bitstream, inputStream); bitstream.setStoreNumber(assetstoreDestination); bitstreamService.update(context, bitstream); if (deleteOld) { log.info("Removing bitstream:" + bitstream.getID() + " from assetstore[" + assetstoreSource + "]"); - stores.get(assetstoreSource).remove(bitstream); + this.getStore(assetstoreSource).remove(bitstream); } processedCounter++; @@ -424,14 +423,18 @@ public class BitstreamStorageServiceImpl implements BitstreamStorageService, Ini "] completed. " + processedCounter + " objects were transferred."); } + @Override public void printStores(Context context) { try { for (Integer storeNumber : stores.keySet()) { long countBitstreams = bitstreamService.countByStoreNumber(context, storeNumber); - System.out.println("store[" + storeNumber + "] == " + stores.get(storeNumber).getClass() - .getSimpleName() + ", which has " + - countBitstreams + " bitstreams."); + BitStoreService store = this.stores.get(storeNumber); + System.out.println( + "store[" + storeNumber + "] == " + store.getClass().getSimpleName() + + ", which has initialized-status: " + store.isInitialized() + + ", and has: " + countBitstreams + " bitstreams." + ); } System.out.println("Incoming assetstore is store[" + incoming + "]"); } catch (SQLException e) { @@ -475,4 +478,13 @@ public class BitstreamStorageServiceImpl implements BitstreamStorageService, Ini // Less than one hour old return (now - lastModified) < (1 * 60 * 1000); } + + protected BitStoreService getStore(int position) throws IOException { + BitStoreService bitStoreService = this.stores.get(position); + if (!bitStoreService.isInitialized()) { + bitStoreService.init(); + } + return bitStoreService; + } + } diff --git a/dspace-api/src/main/java/org/dspace/storage/bitstore/DSBitStoreService.java b/dspace-api/src/main/java/org/dspace/storage/bitstore/DSBitStoreService.java index 36f75c67f9..1fdf1e84e1 100644 --- a/dspace-api/src/main/java/org/dspace/storage/bitstore/DSBitStoreService.java +++ b/dspace-api/src/main/java/org/dspace/storage/bitstore/DSBitStoreService.java @@ -29,33 +29,17 @@ import org.dspace.core.Utils; * @author Peter Breton, Robert Tansley, Richard Rodgers, Peter Dietz */ -public class DSBitStoreService implements BitStoreService { +public class DSBitStoreService extends BaseBitStoreService { /** * log4j log */ private static Logger log = org.apache.logging.log4j.LogManager.getLogger(DSBitStoreService.class); - // These settings control the way an identifier is hashed into - // directory and file names - // - // With digitsPerLevel 2 and directoryLevels 3, an identifier - // like 12345678901234567890 turns into the relative name - // /12/34/56/12345678901234567890. - // - // You should not change these settings if you have data in the - // asset store, as the BitstreamStorageManager will be unable - // to find your existing data. - private static final int digitsPerLevel = 2; - - private static final int directoryLevels = 3; - - // Checksum algorithm - private static final String CSA = "MD5"; - /** * the asset directory */ private File baseDir; + protected final String REGISTERED_FLAG = "-R"; public DSBitStoreService() { } @@ -66,6 +50,7 @@ public class DSBitStoreService implements BitStoreService { public void init() { // the config string contains just the asset store directory path //set baseDir? + this.initialized = true; } /** @@ -152,35 +137,7 @@ public class DSBitStoreService implements BitStoreService { // potentially expensive, since it may calculate the checksum File file = getFile(bitstream); if (file != null && file.exists()) { - if (attrs.containsKey("size_bytes")) { - attrs.put("size_bytes", file.length()); - } - if (attrs.containsKey("checksum")) { - // generate checksum by reading the bytes - DigestInputStream dis = null; - try { - FileInputStream fis = new FileInputStream(file); - dis = new DigestInputStream(fis, MessageDigest.getInstance(CSA)); - } catch (NoSuchAlgorithmException e) { - log.warn("Caught NoSuchAlgorithmException", e); - throw new IOException("Invalid checksum algorithm"); - } - final int BUFFER_SIZE = 1024 * 4; - final byte[] buffer = new byte[BUFFER_SIZE]; - while (true) { - final int count = dis.read(buffer, 0, BUFFER_SIZE); - if (count == -1) { - break; - } - } - attrs.put("checksum", Utils.toHex(dis.getMessageDigest().digest())); - attrs.put("checksum_algorithm", CSA); - dis.close(); - } - if (attrs.containsKey("modified")) { - attrs.put("modified", String.valueOf(file.lastModified())); - } - return attrs; + return super.about(file, attrs); } return null; } catch (Exception e) { @@ -278,10 +235,7 @@ public class DSBitStoreService implements BitStoreService { // make a path traversal attack, so ignore the path // prefix. The internal-ID is supposed to be just a // filename, so this will not affect normal operation. - if (sInternalId.contains(File.separator)) { - sInternalId = sInternalId.substring(sInternalId.lastIndexOf(File.separator) + 1); - } - + sInternalId = this.sanitizeIdentifier(sInternalId); sIntermediatePath = getIntermediatePath(sInternalId); } @@ -297,29 +251,6 @@ public class DSBitStoreService implements BitStoreService { return new File(bufFilename.toString()); } - /** - * Return the intermediate path derived from the internal_id. This method - * splits the id into groups which become subdirectories. - * - * @param iInternalId The internal_id - * @return The path based on the id without leading or trailing separators - */ - protected String getIntermediatePath(String iInternalId) { - StringBuilder buf = new StringBuilder(); - for (int i = 0; i < directoryLevels; i++) { - int digits = i * digitsPerLevel; - if (i > 0) { - buf.append(File.separator); - } - buf.append(iInternalId.substring(digits, digits - + digitsPerLevel)); - } - buf.append(File.separator); - return buf.toString(); - } - - protected final String REGISTERED_FLAG = "-R"; - public boolean isRegisteredBitstream(String internalId) { return internalId.startsWith(REGISTERED_FLAG); } diff --git a/dspace-api/src/main/java/org/dspace/storage/bitstore/S3BitStoreService.java b/dspace-api/src/main/java/org/dspace/storage/bitstore/S3BitStoreService.java index ce2b3b3f05..2bad0ac012 100644 --- a/dspace-api/src/main/java/org/dspace/storage/bitstore/S3BitStoreService.java +++ b/dspace-api/src/main/java/org/dspace/storage/bitstore/S3BitStoreService.java @@ -11,20 +11,31 @@ import java.io.File; import java.io.IOException; import java.io.InputStream; import java.util.Map; +import java.util.function.Supplier; +import javax.validation.constraints.NotNull; import com.amazonaws.AmazonClientException; import com.amazonaws.auth.AWSCredentials; +import com.amazonaws.auth.AWSStaticCredentialsProvider; import com.amazonaws.auth.BasicAWSCredentials; import com.amazonaws.regions.Region; import com.amazonaws.regions.Regions; import com.amazonaws.services.s3.AmazonS3; import com.amazonaws.services.s3.AmazonS3Client; +import com.amazonaws.services.s3.AmazonS3ClientBuilder; import com.amazonaws.services.s3.model.AmazonS3Exception; import com.amazonaws.services.s3.model.GetObjectRequest; import com.amazonaws.services.s3.model.ObjectMetadata; -import com.amazonaws.services.s3.model.PutObjectRequest; -import com.amazonaws.services.s3.model.PutObjectResult; import com.amazonaws.services.s3.model.S3Object; +import com.amazonaws.services.s3.transfer.TransferManager; +import com.amazonaws.services.s3.transfer.TransferManagerBuilder; +import com.amazonaws.services.s3.transfer.Upload; +import org.apache.commons.cli.CommandLine; +import org.apache.commons.cli.DefaultParser; +import org.apache.commons.cli.HelpFormatter; +import org.apache.commons.cli.Option; +import org.apache.commons.cli.Options; +import org.apache.commons.cli.ParseException; import org.apache.commons.io.FileUtils; import org.apache.commons.lang3.StringUtils; import org.apache.http.HttpStatus; @@ -34,6 +45,9 @@ import org.dspace.content.Bitstream; import org.dspace.core.Utils; import org.dspace.services.ConfigurationService; import org.dspace.services.factory.DSpaceServicesFactory; +import org.dspace.storage.bitstore.factory.StorageServiceFactory; +import org.dspace.storage.bitstore.service.BitstreamStorageService; +import org.dspace.util.FunctionalUtils; import org.springframework.beans.factory.annotation.Autowired; /** @@ -42,9 +56,14 @@ import org.springframework.beans.factory.annotation.Autowired; * NB: you must have obtained an account with Amazon to use this store * * @author Richard Rodgers, Peter Dietz + * @author Vincenzo Mecca (vins01-4science - vincenzo.mecca at 4science.com) + * */ -public class S3BitStoreService implements BitStoreService { +public class S3BitStoreService extends BaseBitStoreService { + protected static final String DEFAULT_BUCKET_PREFIX = "dspace-asset-"; + // Prefix indicating a registered bitstream + protected final String REGISTERED_FLAG = "-R"; /** * log4j log */ @@ -55,9 +74,25 @@ public class S3BitStoreService implements BitStoreService { */ private static final String CSA = "MD5"; + // These settings control the way an identifier is hashed into + // directory and file names + // + // With digitsPerLevel 2 and directoryLevels 3, an identifier + // like 12345678901234567890 turns into the relative name + // /12/34/56/12345678901234567890. + // + // You should not change these settings if you have data in the + // asset store, as the BitstreamStorageManager will be unable + // to find your existing data. + protected static final int digitsPerLevel = 2; + protected static final int directoryLevels = 3; + + private boolean enabled = false; + private String awsAccessKey; private String awsSecretKey; private String awsRegionName; + private boolean useRelativePath; /** * container for all the assets @@ -74,9 +109,48 @@ public class S3BitStoreService implements BitStoreService { */ private AmazonS3 s3Service = null; + /** + * S3 transfer manager + * this is reused between put calls to use less resources for multiple uploads + */ + private TransferManager tm = null; + private static final ConfigurationService configurationService = DSpaceServicesFactory.getInstance().getConfigurationService(); - public S3BitStoreService() { + + /** + * Utility method for generate AmazonS3 builder + * + * @param regions wanted regions in client + * @param awsCredentials credentials of the client + * @return builder with the specified parameters + */ + protected static Supplier amazonClientBuilderBy( + @NotNull Regions regions, + @NotNull AWSCredentials awsCredentials + ) { + return () -> AmazonS3ClientBuilder.standard() + .withCredentials(new AWSStaticCredentialsProvider(awsCredentials)) + .withRegion(regions) + .build(); + } + + public S3BitStoreService() {} + + /** + * This constructor is used for test purpose. + * In this way is possible to use a mocked instance of AmazonS3 + * + * @param s3Service mocked AmazonS3 service + */ + protected S3BitStoreService(AmazonS3 s3Service, TransferManager tm) { + this.s3Service = s3Service; + this.tm = tm; + } + + @Override + public boolean isEnabled() { + return this.enabled; } /** @@ -88,47 +162,70 @@ public class S3BitStoreService implements BitStoreService { */ @Override public void init() throws IOException { - if (StringUtils.isBlank(getAwsAccessKey()) || StringUtils.isBlank(getAwsSecretKey())) { - log.warn("Empty S3 access or secret"); - } - // init client - AWSCredentials awsCredentials = new BasicAWSCredentials(getAwsAccessKey(), getAwsSecretKey()); - s3Service = new AmazonS3Client(awsCredentials); - - // bucket name - if (StringUtils.isEmpty(bucketName)) { - // get hostname of DSpace UI to use to name bucket - String hostname = Utils.getHostName(configurationService.getProperty("dspace.ui.url")); - bucketName = "dspace-asset-" + hostname; - log.warn("S3 BucketName is not configured, setting default: " + bucketName); + if (this.isInitialized()) { + return; } try { - if (!s3Service.doesBucketExist(bucketName)) { - s3Service.createBucket(bucketName); - log.info("Creating new S3 Bucket: " + bucketName); + if (StringUtils.isNotBlank(getAwsAccessKey()) && StringUtils.isNotBlank(getAwsSecretKey())) { + log.warn("Use local defined S3 credentials"); + // region + Regions regions = Regions.DEFAULT_REGION; + if (StringUtils.isNotBlank(awsRegionName)) { + try { + regions = Regions.fromName(awsRegionName); + } catch (IllegalArgumentException e) { + log.warn("Invalid aws_region: " + awsRegionName); + } + } + // init client + s3Service = FunctionalUtils.getDefaultOrBuild( + this.s3Service, + amazonClientBuilderBy( + regions, + new BasicAWSCredentials(getAwsAccessKey(), getAwsSecretKey()) + ) + ); + log.warn("S3 Region set to: " + regions.getName()); + } else { + log.info("Using a IAM role or aws environment credentials"); + s3Service = FunctionalUtils.getDefaultOrBuild( + this.s3Service, + AmazonS3ClientBuilder::defaultClient + ); } - } catch (AmazonClientException e) { - log.error(e); - throw new IOException(e); - } - // region - if (StringUtils.isNotBlank(awsRegionName)) { - try { - Regions regions = Regions.fromName(awsRegionName); - Region region = Region.getRegion(regions); - s3Service.setRegion(region); - log.info("S3 Region set to: " + region.getName()); - } catch (IllegalArgumentException e) { - log.warn("Invalid aws_region: " + awsRegionName); + // bucket name + if (StringUtils.isEmpty(bucketName)) { + // get hostname of DSpace UI to use to name bucket + String hostname = Utils.getHostName(configurationService.getProperty("dspace.ui.url")); + bucketName = DEFAULT_BUCKET_PREFIX + hostname; + log.warn("S3 BucketName is not configured, setting default: " + bucketName); } + + try { + if (!s3Service.doesBucketExist(bucketName)) { + s3Service.createBucket(bucketName); + log.info("Creating new S3 Bucket: " + bucketName); + } + } catch (AmazonClientException e) { + throw new IOException(e); + } + this.initialized = true; + log.info("AWS S3 Assetstore ready to go! bucket:" + bucketName); + } catch (Exception e) { + this.initialized = false; + log.error("Can't initialize this store!", e); } log.info("AWS S3 Assetstore ready to go! bucket:" + bucketName); - } + tm = FunctionalUtils.getDefaultOrBuild(tm, () -> TransferManagerBuilder.standard() + .withAlwaysCalculateMultipartMd5(true) + .withS3Client(s3Service) + .build()); + } /** * Return an identifier unique to this asset store instance @@ -151,6 +248,10 @@ public class S3BitStoreService implements BitStoreService { @Override public InputStream get(Bitstream bitstream) throws IOException { String key = getFullKey(bitstream.getInternalId()); + // Strip -R from bitstream key if it's registered + if (isRegisteredBitstream(key)) { + key = key.substring(REGISTERED_FLAG.length()); + } try { S3Object object = s3Service.getObject(new GetObjectRequest(bucketName, key)); return (object != null) ? object.getObjectContent() : null; @@ -179,22 +280,24 @@ public class S3BitStoreService implements BitStoreService { try { FileUtils.copyInputStreamToFile(in, scratchFile); long contentLength = scratchFile.length(); + // The ETag may or may not be and MD5 digest of the object data. + // Therefore, we precalculate before uploading + String localChecksum = org.dspace.curate.Utils.checksum(scratchFile, CSA); - PutObjectRequest putObjectRequest = new PutObjectRequest(bucketName, key, scratchFile); - PutObjectResult putObjectResult = s3Service.putObject(putObjectRequest); + Upload upload = tm.upload(bucketName, key, scratchFile); + + upload.waitForUploadResult(); bitstream.setSizeBytes(contentLength); - bitstream.setChecksum(putObjectResult.getETag()); + bitstream.setChecksum(localChecksum); bitstream.setChecksumAlgorithm(CSA); - scratchFile.delete(); - - } catch (AmazonClientException | IOException e) { + } catch (AmazonClientException | IOException | InterruptedException e) { log.error("put(" + bitstream.getInternalId() + ", is)", e); throw new IOException(e); } finally { - if (scratchFile.exists()) { - scratchFile.delete(); + if (!scratchFile.delete()) { + scratchFile.deleteOnExit(); } } } @@ -215,21 +318,14 @@ public class S3BitStoreService implements BitStoreService { @Override public Map about(Bitstream bitstream, Map attrs) throws IOException { String key = getFullKey(bitstream.getInternalId()); + // If this is a registered bitstream, strip the -R prefix before retrieving + if (isRegisteredBitstream(key)) { + key = key.substring(REGISTERED_FLAG.length()); + } try { ObjectMetadata objectMetadata = s3Service.getObjectMetadata(bucketName, key); - if (objectMetadata != null) { - if (attrs.containsKey("size_bytes")) { - attrs.put("size_bytes", objectMetadata.getContentLength()); - } - if (attrs.containsKey("checksum")) { - attrs.put("checksum", objectMetadata.getETag()); - attrs.put("checksum_algorithm", CSA); - } - if (attrs.containsKey("modified")) { - attrs.put("modified", String.valueOf(objectMetadata.getLastModified().getTime())); - } - return attrs; + return this.about(objectMetadata, attrs); } } catch (AmazonS3Exception e) { if (e.getStatusCode() == HttpStatus.SC_NOT_FOUND) { @@ -242,6 +338,34 @@ public class S3BitStoreService implements BitStoreService { return null; } + /** + * Populates map values by checking key existence + *
    + * Adds technical metadata about an asset in the asset store, like: + *
      + *
    • size_bytes
    • + *
    • checksum
    • + *
    • checksum_algorithm
    • + *
    • modified
    • + *
    + * + * @param objectMetadata containing technical data + * @param attrs map with keys populated + * @return Map of enriched attrs with values + */ + public Map about(ObjectMetadata objectMetadata, Map attrs) { + if (objectMetadata != null) { + this.putValueIfExistsKey(attrs, SIZE_BYTES, objectMetadata.getContentLength()); + + // put CHECKSUM_ALGORITHM if exists CHECKSUM + this.putValueIfExistsKey(attrs, CHECKSUM, objectMetadata.getETag()); + this.putEntryIfExistsKey(attrs, CHECKSUM, Map.entry(CHECKSUM_ALGORITHM, CSA)); + + this.putValueIfExistsKey(attrs, MODIFIED, String.valueOf(objectMetadata.getLastModified().getTime())); + } + return attrs; + } + /** * Remove an asset from the asset store. An irreversible operation. * @@ -266,11 +390,53 @@ public class S3BitStoreService implements BitStoreService { * @return full key prefixed with a subfolder, if applicable */ public String getFullKey(String id) { + StringBuilder bufFilename = new StringBuilder(); if (StringUtils.isNotEmpty(subfolder)) { - return subfolder + "/" + id; - } else { - return id; + bufFilename.append(subfolder); + appendSeparator(bufFilename); } + + if (this.useRelativePath) { + bufFilename.append(getRelativePath(id)); + } else { + bufFilename.append(id); + } + + if (log.isDebugEnabled()) { + log.debug("S3 filepath for " + id + " is " + + bufFilename.toString()); + } + + return bufFilename.toString(); + } + + /** + * there are 2 cases: + * - conventional bitstream, conventional storage + * - registered bitstream, conventional storage + * conventional bitstream: dspace ingested, dspace random name/path + * registered bitstream: registered to dspace, any name/path + * + * @param sInternalId + * @return Computed Relative path + */ + public String getRelativePath(String sInternalId) { + BitstreamStorageService bitstreamStorageService = StorageServiceFactory.getInstance() + .getBitstreamStorageService(); + + String sIntermediatePath = StringUtils.EMPTY; + if (bitstreamStorageService.isRegisteredBitstream(sInternalId)) { + sInternalId = sInternalId.substring(REGISTERED_FLAG.length()); + } else { + sInternalId = sanitizeIdentifier(sInternalId); + sIntermediatePath = getIntermediatePath(sInternalId); + } + + return sIntermediatePath + sInternalId; + } + + public void setEnabled(boolean enabled) { + this.enabled = enabled; } public String getAwsAccessKey() { @@ -316,6 +482,14 @@ public class S3BitStoreService implements BitStoreService { this.subfolder = subfolder; } + public boolean isUseRelativePath() { + return useRelativePath; + } + + public void setUseRelativePath(boolean useRelativePath) { + this.useRelativePath = useRelativePath; + } + /** * Contains a command-line testing tool. Expects arguments: * -a accessKey -s secretKey -f assetFileName @@ -324,27 +498,37 @@ public class S3BitStoreService implements BitStoreService { * @throws Exception generic exception */ public static void main(String[] args) throws Exception { - //TODO use proper CLI, or refactor to be a unit test. Can't mock this without keys though. + //TODO Perhaps refactor to be a unit test. Can't mock this without keys though. // parse command line - String assetFile = null; - String accessKey = null; - String secretKey = null; + Options options = new Options(); + Option option; - for (int i = 0; i < args.length; i += 2) { - if (args[i].startsWith("-a")) { - accessKey = args[i + 1]; - } else if (args[i].startsWith("-s")) { - secretKey = args[i + 1]; - } else if (args[i].startsWith("-f")) { - assetFile = args[i + 1]; - } - } + option = Option.builder("a").desc("access key").hasArg().required().build(); + options.addOption(option); - if (accessKey == null || secretKey == null || assetFile == null) { - System.out.println("Missing arguments - exiting"); + option = Option.builder("s").desc("secret key").hasArg().required().build(); + options.addOption(option); + + option = Option.builder("f").desc("asset file name").hasArg().required().build(); + options.addOption(option); + + DefaultParser parser = new DefaultParser(); + + CommandLine command; + try { + command = parser.parse(options, args); + } catch (ParseException e) { + System.err.println(e.getMessage()); + new HelpFormatter().printHelp( + S3BitStoreService.class.getSimpleName() + "options", options); return; } + + String accessKey = command.getOptionValue("a"); + String secretKey = command.getOptionValue("s"); + String assetFile = command.getOptionValue("f"); + S3BitStoreService store = new S3BitStoreService(); AWSCredentials awsCredentials = new BasicAWSCredentials(accessKey, secretKey); @@ -358,9 +542,9 @@ public class S3BitStoreService implements BitStoreService { // get hostname of DSpace UI to use to name bucket String hostname = Utils.getHostName(configurationService.getProperty("dspace.ui.url")); //Bucketname should be lowercase - store.bucketName = "dspace-asset-" + hostname + ".s3test"; + store.bucketName = DEFAULT_BUCKET_PREFIX + hostname + ".s3test"; store.s3Service.createBucket(store.bucketName); -/* Broken in DSpace 6 TODO Refactor + /* Broken in DSpace 6 TODO Refactor // time everything, todo, swtich to caliper long start = System.currentTimeMillis(); // Case 1: store a file @@ -413,4 +597,14 @@ public class S3BitStoreService implements BitStoreService { store.get(id); */ } + + /** + * Is this a registered bitstream? (not stored via this service originally) + * @param internalId + * @return + */ + public boolean isRegisteredBitstream(String internalId) { + return internalId.startsWith(REGISTERED_FLAG); + } + } diff --git a/dspace-api/src/main/java/org/dspace/storage/rdbms/DatabaseUtils.java b/dspace-api/src/main/java/org/dspace/storage/rdbms/DatabaseUtils.java index 8835e03104..1464fb44ec 100644 --- a/dspace-api/src/main/java/org/dspace/storage/rdbms/DatabaseUtils.java +++ b/dspace-api/src/main/java/org/dspace/storage/rdbms/DatabaseUtils.java @@ -26,6 +26,7 @@ import java.util.regex.Pattern; import javax.sql.DataSource; import org.apache.commons.io.IOUtils; +import org.apache.commons.lang3.ArrayUtils; import org.apache.commons.lang3.StringUtils; import org.apache.logging.log4j.Logger; import org.dspace.core.Context; @@ -37,6 +38,7 @@ import org.dspace.workflow.factory.WorkflowServiceFactory; import org.flywaydb.core.Flyway; import org.flywaydb.core.api.FlywayException; import org.flywaydb.core.api.MigrationInfo; +import org.flywaydb.core.api.MigrationVersion; import org.flywaydb.core.api.callback.Callback; import org.flywaydb.core.api.configuration.FluentConfiguration; import org.flywaydb.core.internal.info.MigrationInfoDumper; @@ -93,7 +95,7 @@ public class DatabaseUtils { // Usage checks if (argv.length < 1) { System.out.println("\nDatabase action argument is missing."); - System.out.println("Valid actions: 'test', 'info', 'migrate', 'repair', 'validate', " + + System.out.println("Valid actions: 'test', 'info', 'migrate', 'repair', 'skip', 'validate', " + "'update-sequences' or 'clean'"); System.out.println("\nOr, type 'database help' for more information.\n"); System.exit(1); @@ -111,280 +113,339 @@ public class DatabaseUtils { // *before* any other Flyway commands can be run. This is a safety check. FlywayUpgradeUtils.upgradeFlywayTable(flyway, dataSource.getConnection()); - // "test" = Test Database Connection - if (argv[0].equalsIgnoreCase("test")) { - // Try to connect to the database - System.out.println("\nAttempting to connect to database"); - try (Connection connection = dataSource.getConnection()) { - System.out.println("Connected successfully!"); + // Determine action param passed to "./dspace database" + switch (argv[0].toLowerCase(Locale.ENGLISH)) { + // "test" = Test Database Connection + case "test": + // Try to connect to the database + System.out.println("\nAttempting to connect to database"); + try (Connection connection = dataSource.getConnection()) { + System.out.println("Connected successfully!"); - // Print basic database connection information - printDBInfo(connection); + // Print basic database connection information + printDBInfo(connection); - // Print any database warnings/errors found (if any) - boolean issueFound = printDBIssues(connection); + // Print any database warnings/errors found (if any) + boolean issueFound = printDBIssues(connection); - // If issues found, exit with an error status (even if connection succeeded). - if (issueFound) { - System.exit(1); - } else { - System.exit(0); - } - } catch (SQLException sqle) { - System.err.println("\nError running 'test': "); - System.err.println(" - " + sqle); - System.err.println("\nPlease see the DSpace documentation for assistance.\n"); - sqle.printStackTrace(System.err); - System.exit(1); - } - } else if (argv[0].equalsIgnoreCase("info") || argv[0].equalsIgnoreCase("status")) { - try (Connection connection = dataSource.getConnection()) { - // Print basic Database info - printDBInfo(connection); - - // Get info table from Flyway - System.out.println("\n" + MigrationInfoDumper.dumpToAsciiTable(flyway.info().all())); - - // If Flyway is NOT yet initialized, also print the determined version information - // NOTE: search is case sensitive, as flyway table name is ALWAYS lowercase, - // See: http://flywaydb.org/documentation/faq.html#case-sensitive - if (!tableExists(connection, flyway.getConfiguration().getTable(), true)) { - System.out - .println("\nNOTE: This database is NOT yet initialized for auto-migrations (via Flyway)."); - // Determine which version of DSpace this looks like - String dbVersion = determineDBVersion(connection); - if (dbVersion != null) { - System.out - .println("\nYour database looks to be compatible with DSpace version " + dbVersion); - System.out.println( - "All upgrades *after* version " + dbVersion + " will be run during the next migration" + - "."); - System.out.println("\nIf you'd like to upgrade now, simply run 'dspace database migrate'."); - } - } - - // Print any database warnings/errors found (if any) - boolean issueFound = printDBIssues(connection); - - // If issues found, exit with an error status - if (issueFound) { - System.exit(1); - } else { - System.exit(0); - } - } catch (SQLException e) { - System.err.println("Info exception:"); - e.printStackTrace(System.err); - System.exit(1); - } - } else if (argv[0].equalsIgnoreCase("migrate")) { - try (Connection connection = dataSource.getConnection()) { - System.out.println("\nDatabase URL: " + connection.getMetaData().getURL()); - - // "migrate" allows for an OPTIONAL second argument (only one may be specified): - // - "ignored" = Also run any previously "ignored" migrations during the migration - // - "force" = Even if no pending migrations exist, still run a migration to trigger callbacks. - // - [version] = ONLY run migrations up to a specific DSpace version (ONLY FOR TESTING) - if (argv.length == 2) { - if (argv[1].equalsIgnoreCase("ignored")) { - System.out.println( - "Migrating database to latest version AND running previously \"Ignored\" " + - "migrations... (Check logs for details)"); - // Update the database to latest version, but set "outOfOrder=true" - // This will ensure any old migrations in the "ignored" state are now run - updateDatabase(dataSource, connection, null, true); - } else if (argv[1].equalsIgnoreCase("force")) { - updateDatabase(dataSource, connection, null, false, true); + // If issues found, exit with an error status (even if connection succeeded). + if (issueFound) { + System.exit(1); } else { - // Otherwise, we assume "argv[1]" is a valid migration version number - // This is only for testing! Never specify for Production! + System.exit(0); + } + } catch (SQLException sqle) { + System.err.println("\nError running 'test': "); + System.err.println(" - " + sqle); + System.err.println("\nPlease see the DSpace documentation for assistance.\n"); + sqle.printStackTrace(System.err); + System.exit(1); + } + break; + // "info" and "status" are identical and provide database info + case "info": + case "status": + try (Connection connection = dataSource.getConnection()) { + // Print basic Database info + printDBInfo(connection); + + // Get info table from Flyway + System.out.println("\n" + MigrationInfoDumper.dumpToAsciiTable(flyway.info().all())); + + // If Flyway is NOT yet initialized, also print the determined version information + // NOTE: search is case sensitive, as flyway table name is ALWAYS lowercase, + // See: http://flywaydb.org/documentation/faq.html#case-sensitive + if (!tableExists(connection, flyway.getConfiguration().getTable(), true)) { + System.out + .println("\nNOTE: This database is NOT yet initialized for auto-migrations " + + "(via Flyway)."); + // Determine which version of DSpace this looks like + String dbVersion = determineDBVersion(connection); + if (dbVersion != null) { + System.out + .println("\nYour database looks to be compatible with DSpace version " + dbVersion); + System.out.println( + "All upgrades *after* version " + dbVersion + " will be run during the next " + + "migration."); + System.out.println("\nIf you'd like to upgrade now, simply run 'dspace database " + + "migrate'."); + } + } + + // Print any database warnings/errors found (if any) + boolean issueFound = printDBIssues(connection); + + // If issues found, exit with an error status + if (issueFound) { + System.exit(1); + } else { + System.exit(0); + } + } catch (SQLException e) { + System.err.println("Info exception:"); + e.printStackTrace(System.err); + System.exit(1); + } + break; + // "migrate" = Run all pending database migrations + case "migrate": + try (Connection connection = dataSource.getConnection()) { + System.out.println("\nDatabase URL: " + connection.getMetaData().getURL()); + + // "migrate" allows for an OPTIONAL second argument (only one may be specified): + // - "ignored" = Also run any previously "ignored" migrations during the migration + // - "force" = Even if no pending migrations exist, still run migrate to trigger callbacks. + // - [version] = ONLY run migrations up to a specific DSpace version (ONLY FOR TESTING) + if (argv.length == 2) { + if (argv[1].equalsIgnoreCase("ignored")) { + System.out.println( + "Migrating database to latest version AND running previously \"Ignored\" " + + "migrations... (Check logs for details)"); + // Update the database to latest version, but set "outOfOrder=true" + // This will ensure any old migrations in the "ignored" state are now run + updateDatabase(dataSource, connection, null, true); + } else if (argv[1].equalsIgnoreCase("force")) { + updateDatabase(dataSource, connection, null, false, true); + } else { + // Otherwise, we assume "argv[1]" is a valid migration version number + // This is only for testing! Never specify for Production! + String migrationVersion = argv[1]; + BufferedReader input = new BufferedReader( + new InputStreamReader(System.in, StandardCharsets.UTF_8)); + + System.out.println( + "You've specified to migrate your database ONLY to version " + migrationVersion + + " ..."); + System.out.println( + "\nWARNING: In this mode, we DISABLE all callbacks, which means that you will " + + "need to manually update registries and manually run a reindex. This is " + + "because you are attempting to use an OLD version (" + migrationVersion + ") " + + "Database with a newer DSpace API. NEVER do this in a PRODUCTION scenario. " + + "The resulting database is only useful for migration testing.\n"); + + System.out.print( + "Are you SURE you only want to migrate your database to version " + + migrationVersion + "? [y/n]: "); + String choiceString = input.readLine(); + input.close(); + + if (choiceString.equalsIgnoreCase("y")) { + System.out.println( + "Migrating database ONLY to version " + migrationVersion + " ... " + + "(Check logs for details)"); + // Update the database, to the version specified. + updateDatabase(dataSource, connection, migrationVersion, false); + } else { + System.out.println("No action performed."); + } + } + } else { + System.out.println("Migrating database to latest version... " + + "(Check dspace logs for details)"); + updateDatabase(dataSource, connection); + } + System.out.println("Done."); + System.exit(0); + } catch (SQLException e) { + System.err.println("Migration exception:"); + e.printStackTrace(System.err); + System.exit(1); + } + break; + // "repair" = Run Flyway repair script + case "repair": + try (Connection connection = dataSource.getConnection();) { + System.out.println("\nDatabase URL: " + connection.getMetaData().getURL()); + System.out.println( + "Attempting to repair any previously failed migrations (or mismatched checksums) via " + + "FlywayDB... (Check dspace logs for details)"); + flyway.repair(); + System.out.println("Done."); + System.exit(0); + } catch (SQLException | FlywayException e) { + System.err.println("Repair exception:"); + e.printStackTrace(System.err); + System.exit(1); + } + break; + // "skip" = Skip a specific Flyway migration (by telling Flyway it succeeded) + case "skip": + try { + // "skip" requires a migration version to skip. Only that exact version will be skipped. + if (argv.length == 2) { String migrationVersion = argv[1]; + BufferedReader input = new BufferedReader( - new InputStreamReader(System.in, StandardCharsets.UTF_8)); - + new InputStreamReader(System.in, StandardCharsets.UTF_8)); System.out.println( - "You've specified to migrate your database ONLY to version " + migrationVersion + " " + + "You've specified to SKIP the migration with version='" + migrationVersion + "' " + "..."); - System.out.println( - "\nWARNING: In this mode, we DISABLE all callbacks, which means that you will need " + - "to manually update registries and manually run a reindex. This is because you " + - "are attempting to use an OLD version (" + migrationVersion + ") Database with " + - "a newer DSpace API. NEVER do this in a PRODUCTION scenario. The resulting " + - "database is only useful for migration testing.\n"); - System.out.print( - "Are you SURE you only want to migrate your database to version " + migrationVersion - + "? [y/n]: "); + "\nWARNING: You should only skip migrations which are no longer required or have " + + "become obsolete. Skipping a REQUIRED migration may result in DSpace failing " + + "to startup or function properly. Are you sure you want to SKIP the " + + "migration with version '" + migrationVersion + "'? [y/n]: "); String choiceString = input.readLine(); input.close(); if (choiceString.equalsIgnoreCase("y")) { System.out.println( - "Migrating database ONLY to version " + migrationVersion + " ... (Check logs for " + - "details)"); - // Update the database, to the version specified. - updateDatabase(dataSource, connection, migrationVersion, false); - } else { - System.out.println("No action performed."); + "Attempting to skip migration with version " + migrationVersion + " " + + "... (Check logs for details)"); + skipMigration(dataSource, migrationVersion); + } + } else { + System.out.println("The 'skip' command REQUIRES a version to be specified. " + + "Only that single migration will be skipped. For the list " + + "of migration versions use the 'info' command."); + } + } catch (IOException e) { + System.err.println("Exception when attempting to skip migration:"); + e.printStackTrace(System.err); + System.exit(1); + } + break; + // "validate" = Run Flyway validation to check for database errors/issues + case "validate": + try (Connection connection = dataSource.getConnection();) { + System.out.println("\nDatabase URL: " + connection.getMetaData().getURL()); + System.out + .println("Attempting to validate database status (and migration checksums) via " + + "FlywayDB..."); + flyway.validate(); + System.out.println("No errors thrown. Validation succeeded. (Check dspace logs for more " + + "details)"); + System.exit(0); + } catch (SQLException | FlywayException e) { + System.err.println("Validation exception:"); + e.printStackTrace(System.err); + System.exit(1); + } + break; + // "clean" = Run Flyway clean script + case "clean": + // If clean is disabled, return immediately + if (flyway.getConfiguration().isCleanDisabled()) { + System.out.println( + "\nWARNING: 'clean' command is currently disabled, as it is dangerous to run in " + + "Production scenarios!"); + System.out.println( + "\nIn order to run a 'clean' you first must enable it in your DSpace config by " + + "specifying 'db.cleanDisabled=false'.\n"); + System.exit(1); + } + + try (Connection connection = dataSource.getConnection()) { + String dbType = getDbType(connection); + + // Not all Postgres user accounts will be able to run a 'clean', + // as only 'superuser' accounts can remove the 'pgcrypto' extension. + if (dbType.equals(DBMS_POSTGRES)) { + // Check if database user has permissions suitable to run a clean + if (!PostgresUtils.checkCleanPermissions(connection)) { + String username = connection.getMetaData().getUserName(); + // Exit immediately, providing a descriptive error message + System.out.println( + "\nERROR: The database user '" + username + "' does not have sufficient " + + "privileges to run a 'database clean' (via Flyway)."); + System.out.println( + "\nIn order to run a 'clean', the database user MUST have 'superuser' privileges"); + System.out.println( + "OR the '" + PostgresUtils.PGCRYPTO + "' extension must be installed in a " + + "separate schema (see documentation)."); + System.out.println( + "\nOptionally, you could also manually remove the '" + PostgresUtils.PGCRYPTO + + "' extension first (DROP EXTENSION " + PostgresUtils.PGCRYPTO + + " CASCADE;), then rerun the 'clean'"); + System.exit(1); } } - } else { - System.out.println("Migrating database to latest version... (Check dspace logs for details)"); - updateDatabase(dataSource, connection); - } - System.out.println("Done."); - System.exit(0); - } catch (SQLException e) { - System.err.println("Migration exception:"); - e.printStackTrace(System.err); - System.exit(1); - } - } else if (argv[0].equalsIgnoreCase("repair")) { - // "repair" = Run Flyway repair script - try (Connection connection = dataSource.getConnection();) { - System.out.println("\nDatabase URL: " + connection.getMetaData().getURL()); - System.out.println( - "Attempting to repair any previously failed migrations (or mismatched checksums) via " + - "FlywayDB... (Check dspace logs for details)"); - flyway.repair(); - System.out.println("Done."); - System.exit(0); - } catch (SQLException | FlywayException e) { - System.err.println("Repair exception:"); - e.printStackTrace(System.err); - System.exit(1); - } - } else if (argv[0].equalsIgnoreCase("validate")) { - // "validate" = Run Flyway validation to check for database errors/issues + BufferedReader input = new BufferedReader(new InputStreamReader(System.in, + StandardCharsets.UTF_8)); - try (Connection connection = dataSource.getConnection();) { - System.out.println("\nDatabase URL: " + connection.getMetaData().getURL()); - System.out - .println("Attempting to validate database status (and migration checksums) via FlywayDB..."); - flyway.validate(); - System.out.println("No errors thrown. Validation succeeded. (Check dspace logs for more details)"); - System.exit(0); - } catch (SQLException | FlywayException e) { - System.err.println("Validation exception:"); - e.printStackTrace(System.err); - System.exit(1); - } - } else if (argv[0].equalsIgnoreCase("clean")) { - // "clean" = Run Flyway clean script - - // If clean is disabled, return immediately - if (flyway.getConfiguration().isCleanDisabled()) { - System.out.println( - "\nWARNING: 'clean' command is currently disabled, as it is dangerous to run in Production " + - "scenarios!"); - System.out.println( - "\nIn order to run a 'clean' you first must enable it in your DSpace config by specifying 'db" + - ".cleanDisabled=false'.\n"); - System.exit(1); - } - - try (Connection connection = dataSource.getConnection()) { - String dbType = getDbType(connection); - - // Not all Postgres user accounts will be able to run a 'clean', - // as only 'superuser' accounts can remove the 'pgcrypto' extension. - if (dbType.equals(DBMS_POSTGRES)) { - // Check if database user has permissions suitable to run a clean - if (!PostgresUtils.checkCleanPermissions(connection)) { - String username = connection.getMetaData().getUserName(); - // Exit immediately, providing a descriptive error message + System.out.println("\nDatabase URL: " + connection.getMetaData().getURL()); + System.out + .println("\nWARNING: ALL DATA AND TABLES IN YOUR DATABASE WILL BE PERMANENTLY DELETED.\n"); + System.out.println("There is NO turning back from this action. Backup your DB before " + + "continuing."); + if (dbType.equals(DBMS_ORACLE)) { + System.out.println("\nORACLE WARNING: your RECYCLEBIN will also be PURGED.\n"); + } else if (dbType.equals(DBMS_POSTGRES)) { System.out.println( - "\nERROR: The database user '" + username + "' does not have sufficient privileges to" + - " run a 'database clean' (via Flyway)."); - System.out.println( - "\nIn order to run a 'clean', the database user MUST have 'superuser' privileges"); - System.out.println( - "OR the '" + PostgresUtils.PGCRYPTO + "' extension must be installed in a separate " + - "schema (see documentation)."); - System.out.println( - "\nOptionally, you could also manually remove the '" + PostgresUtils.PGCRYPTO + "' " + - "extension first (DROP EXTENSION " + PostgresUtils.PGCRYPTO + " CASCADE;), then " + - "rerun the 'clean'"); - System.exit(1); + "\nPOSTGRES WARNING: the '" + PostgresUtils.PGCRYPTO + "' extension will be dropped " + + "if it is in the same schema as the DSpace database.\n"); } - } + System.out.print("Do you want to PERMANENTLY DELETE everything from your database? [y/n]: "); + String choiceString = input.readLine(); + input.close(); - BufferedReader input = new BufferedReader(new InputStreamReader(System.in, StandardCharsets.UTF_8)); - - System.out.println("\nDatabase URL: " + connection.getMetaData().getURL()); - System.out - .println("\nWARNING: ALL DATA AND TABLES IN YOUR DATABASE WILL BE PERMANENTLY DELETED.\n"); - System.out.println("There is NO turning back from this action. Backup your DB before continuing."); - if (dbType.equals(DBMS_ORACLE)) { - System.out.println("\nORACLE WARNING: your RECYCLEBIN will also be PURGED.\n"); - } else if (dbType.equals(DBMS_POSTGRES)) { - System.out.println( - "\nPOSTGRES WARNING: the '" + PostgresUtils.PGCRYPTO + "' extension will be dropped if it" + - " is in the same schema as the DSpace database.\n"); - } - System.out.print("Do you want to PERMANENTLY DELETE everything from your database? [y/n]: "); - String choiceString = input.readLine(); - input.close(); - - if (choiceString.equalsIgnoreCase("y")) { - System.out.println("Scrubbing database clean... (Check dspace logs for details)"); - cleanDatabase(flyway, dataSource); - System.out.println("Done."); - System.exit(0); - } else { - System.out.println("No action performed."); - } - } catch (SQLException e) { - System.err.println("Clean exception:"); - e.printStackTrace(System.err); - System.exit(1); - } - } else if (argv[0].equalsIgnoreCase("update-sequences")) { - try (Connection connection = dataSource.getConnection()) { - String dbType = getDbType(connection); - String sqlfile = "org/dspace/storage/rdbms/sqlmigration/" + dbType + - "/update-sequences.sql"; - InputStream sqlstream = DatabaseUtils.class.getClassLoader().getResourceAsStream(sqlfile); - if (sqlstream != null) { - String s = IOUtils.toString(sqlstream, "UTF-8"); - if (!s.isEmpty()) { - System.out.println("Running " + sqlfile); - connection.createStatement().execute(s); - System.out.println("update-sequences complete"); + if (choiceString.equalsIgnoreCase("y")) { + System.out.println("Scrubbing database clean... (Check dspace logs for details)"); + cleanDatabase(flyway, dataSource); + System.out.println("Done."); + System.exit(0); } else { - System.err.println(sqlfile + " contains no SQL to execute"); + System.out.println("No action performed."); } - } else { - System.err.println(sqlfile + " not found"); + } catch (SQLException e) { + System.err.println("Clean exception:"); + e.printStackTrace(System.err); + System.exit(1); } - } - } else { - System.out.println("\nUsage: database [action]"); - System.out.println("Valid actions: 'test', 'info', 'migrate', 'repair', " + - "'update-sequences' or 'clean'"); - System.out.println( - " - test = Performs a test connection to database to " + - "validate connection settings"); - System.out.println( - " - info / status = Describe basic info/status about database, including validating the " + - "compatibility of this database"); - System.out.println( - " - migrate = Migrate the database to the latest version"); - System.out.println( - " - repair = Attempt to repair any previously failed database " + - "migrations or checksum mismatches (via Flyway repair)"); - System.out.println( - " - validate = Validate current database's migration status (via Flyway validate), " + - "validating all migration checksums."); - System.out.println( - " - update-sequences = Update database sequences after running AIP ingest."); - System.out.println( - " - clean = DESTROY all data and tables in database " + - "(WARNING there is no going back!). " + - "Requires 'db.cleanDisabled=false' setting in config."); - System.out.println(""); - System.exit(0); + break; + // "update-sequences" = Run DSpace's "update-sequences.sql" script + case "update-sequences": + try (Connection connection = dataSource.getConnection()) { + String dbType = getDbType(connection); + String sqlfile = "org/dspace/storage/rdbms/sqlmigration/" + dbType + + "/update-sequences.sql"; + InputStream sqlstream = DatabaseUtils.class.getClassLoader().getResourceAsStream(sqlfile); + if (sqlstream != null) { + String s = IOUtils.toString(sqlstream, StandardCharsets.UTF_8); + if (!s.isEmpty()) { + System.out.println("Running " + sqlfile); + connection.createStatement().execute(s); + System.out.println("update-sequences complete"); + } else { + System.err.println(sqlfile + " contains no SQL to execute"); + } + } else { + System.err.println(sqlfile + " not found"); + } + } + break; + // default = show help information + default: + System.out.println("\nUsage: database [action]"); + System.out.println("Valid actions: 'test', 'info', 'migrate', 'repair', 'skip', " + + "'validate', 'update-sequences' or 'clean'"); + System.out.println( + " - test = Performs a test connection to database to " + + "validate connection settings"); + System.out.println( + " - info / status = Describe basic info/status about database, including validating the " + + "compatibility of this database"); + System.out.println( + " - migrate = Migrate the database to the latest version"); + System.out.println( + " - repair = Attempt to repair any previously failed database " + + "migrations or checksum mismatches (via Flyway repair)"); + System.out.println( + " - skip [version] = Skip a single, pending or ignored migration, " + + "ensuring it never runs."); + System.out.println( + " - validate = Validate current database's migration status (via Flyway validate), " + + "validating all migration checksums."); + System.out.println( + " - update-sequences = Update database sequences after running AIP ingest."); + System.out.println( + " - clean = DESTROY all data and tables in database " + + "(WARNING there is no going back!). " + + "Requires 'db.cleanDisabled=false' setting in config."); + System.out.println(""); + System.exit(0); + break; } } catch (Exception e) { @@ -406,6 +467,12 @@ public class DatabaseUtils { DatabaseMetaData meta = connection.getMetaData(); String dbType = getDbType(connection); System.out.println("\nDatabase Type: " + dbType); + if (dbType.equals(DBMS_ORACLE)) { + System.out.println("===================================="); + System.out.println("WARNING: Oracle support is deprecated!"); + System.out.println("See https://github.com/DSpace/DSpace/issues/8214"); + System.out.println("====================================="); + } System.out.println("Database URL: " + meta.getURL()); System.out.println("Database Schema: " + getSchemaName(connection)); System.out.println("Database Username: " + meta.getUserName()); @@ -539,6 +606,10 @@ public class DatabaseUtils { String dbType = getDbType(connection); connection.close(); + if (dbType.equals(DBMS_ORACLE)) { + log.warn("ORACLE SUPPORT IS DEPRECATED! See https://github.com/DSpace/DSpace/issues/8214"); + } + // Determine location(s) where Flyway will load all DB migrations ArrayList scriptLocations = new ArrayList<>(); @@ -776,6 +847,89 @@ public class DatabaseUtils { } } + /** + * Skips the given migration by marking it as "successful" in the Flyway table. This ensures + * the given migration will never be run again. + *

    + * WARNING: Skipping a required migration can result in unexpected errors. Make sure the migration is + * not required (or obsolete) before skipping it. + * @param dataSource current DataSource + * @param skipVersion version of migration to skip + * @throws SQLException if error occurs + */ + private static synchronized void skipMigration(DataSource dataSource, + String skipVersion) throws SQLException { + if (null == dataSource) { + throw new SQLException("The datasource is a null reference -- cannot continue."); + } + + try (Connection connection = dataSource.getConnection()) { + // Setup Flyway API against our database + FluentConfiguration flywayConfiguration = setupFlyway(dataSource); + + // In order to allow for skipping "Ignored" migrations, we MUST set "outOfOrder=true". + // (Otherwise Ignored migrations never appear in the pending list) + flywayConfiguration.outOfOrder(true); + + // Initialized Flyway object based on this configuration + Flyway flyway = flywayConfiguration.load(); + + // Find the migration we are skipping in the list of pending migrations + boolean foundMigration = false; + for (MigrationInfo migration : flyway.info().pending()) { + // If this migration matches our "skipVersion" + if (migration.getVersion().equals(MigrationVersion.fromVersion(skipVersion))) { + foundMigration = true; + System.out.println("Found migration matching version='" + skipVersion + "'. " + + "Changing state to 'Success' in order to skip it."); + + PreparedStatement statement = null; + try { + // Create SQL Insert which will log this migration as having already been run. + String INSERT_SQL = "INSERT INTO " + FLYWAY_TABLE + " " + + "(" + + "installed_rank, version, description, type, script, " + + "checksum, installed_by, execution_time, success" + + ") VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)"; + statement = connection.prepareStatement(INSERT_SQL); + // installed_rank + statement.setInt(1, getNextFlywayInstalledRank(flyway)); + // version + statement.setString(2, migration.getVersion().getVersion()); + // description + statement.setString(3, migration.getDescription()); + // type + statement.setString(4, migration.getType().toString()); + // script + statement.setString(5, migration.getScript()); + // checksum + statement.setInt(6, migration.getChecksum()); + // installed_by + statement.setString(7, getDBUserName(connection)); + // execution_time is set to zero as we didn't really execute it + statement.setInt(8, 0); + // success=true tells Flyway this migration no longer needs to be run. + statement.setBoolean(9, true); + + // Run the INSERT + statement.executeUpdate(); + } finally { + if (statement != null && !statement.isClosed()) { + statement.close(); + } + } + } + } + if (!foundMigration) { + System.err.println("Could not find migration to skip! " + + "No 'Pending' or 'Ignored' migrations match version='" + skipVersion + "'"); + } + } catch (FlywayException fe) { + // If any FlywayException (Runtime) is thrown, change it to a SQLException + throw new SQLException("Flyway error occurred", fe); + } + } + /** * Clean the existing database, permanently removing all data and tables *

    @@ -1182,6 +1336,34 @@ public class DatabaseUtils { return schema; } + /** + * Get the Database User Name in use by this Connection. + * + * @param connection Current Database Connection + * @return User name as a string, or "null" if cannot be determined or unspecified + * @throws SQLException An exception that provides information on a database access error or other errors. + */ + public static String getDBUserName(Connection connection) + throws SQLException { + String username = null; + + // Try to get the schema from the DB connection itself. + // As long as the Database driver supports JDBC4.1, there should be a getSchema() method + // If this method is unimplemented or doesn't exist, it will throw an exception (likely an AbstractMethodError) + try { + username = connection.getMetaData().getUserName(); + } catch (Exception | AbstractMethodError e) { + // ignore + } + + // If we don't know our schema, let's try the schema in the DSpace configuration + if (StringUtils.isBlank(username)) { + username = canonicalize(connection, DSpaceServicesFactory.getInstance().getConfigurationService() + .getProperty("db.username")); + } + return username; + } + /** * Return the canonical name for a database identifier based on whether this * database defaults to storing identifiers in uppercase or lowercase. @@ -1433,4 +1615,22 @@ public class DatabaseUtils { } return null; } + + /** + * Determine next valid "installed_rank" value from Flyway, based on the "installed_rank" of the + * last applied migration. + * @param flyway currently loaded Flyway + * @return next installed rank value + */ + private static int getNextFlywayInstalledRank(Flyway flyway) throws FlywayException { + // Load all applied migrations + MigrationInfo[] appliedMigrations = flyway.info().applied(); + // If no applied migrations, throw an error. + // This should never happen, but this would mean Flyway is not installed or initialized + if (ArrayUtils.isEmpty(appliedMigrations)) { + throw new FlywayException("Cannot determine next 'installed_rank' as no applied migrations exist"); + } + // Find the last migration in the list, and increment its "installed_rank" by one. + return appliedMigrations[appliedMigrations.length - 1].getInstalledRank() + 1; + } } diff --git a/dspace-api/src/main/java/org/dspace/storage/rdbms/EntityTypeServiceInitializer.java b/dspace-api/src/main/java/org/dspace/storage/rdbms/EntityTypeServiceInitializer.java index ebf790900b..e0e41516d0 100644 --- a/dspace-api/src/main/java/org/dspace/storage/rdbms/EntityTypeServiceInitializer.java +++ b/dspace-api/src/main/java/org/dspace/storage/rdbms/EntityTypeServiceInitializer.java @@ -49,6 +49,16 @@ public class EntityTypeServiceInitializer implements Callback { } } + /** + * The callback name, Flyway will use this to sort the callbacks alphabetically before executing them + * @return The callback name + */ + @Override + public String getCallbackName() { + // Return class name only (not prepended by package) + return EntityTypeServiceInitializer.class.getSimpleName(); + } + @Override public boolean supports(Event event, org.flywaydb.core.api.callback.Context context) { // Must run AFTER all migrations complete, since it is dependent on Hibernate diff --git a/dspace-api/src/main/java/org/dspace/storage/rdbms/GroupServiceInitializer.java b/dspace-api/src/main/java/org/dspace/storage/rdbms/GroupServiceInitializer.java index 7338dd75bc..54498a1c64 100644 --- a/dspace-api/src/main/java/org/dspace/storage/rdbms/GroupServiceInitializer.java +++ b/dspace-api/src/main/java/org/dspace/storage/rdbms/GroupServiceInitializer.java @@ -51,6 +51,16 @@ public class GroupServiceInitializer implements Callback { } + /** + * The callback name, Flyway will use this to sort the callbacks alphabetically before executing them + * @return The callback name + */ + @Override + public String getCallbackName() { + // Return class name only (not prepended by package) + return GroupServiceInitializer.class.getSimpleName(); + } + /** * Events supported by this callback. * @param event Flyway event diff --git a/dspace-api/src/main/java/org/dspace/storage/rdbms/PostgreSQLCryptoChecker.java b/dspace-api/src/main/java/org/dspace/storage/rdbms/PostgreSQLCryptoChecker.java index 5798f4254c..5459cc3cc3 100644 --- a/dspace-api/src/main/java/org/dspace/storage/rdbms/PostgreSQLCryptoChecker.java +++ b/dspace-api/src/main/java/org/dspace/storage/rdbms/PostgreSQLCryptoChecker.java @@ -97,6 +97,16 @@ public class PostgreSQLCryptoChecker implements Callback { } } + /** + * The callback name, Flyway will use this to sort the callbacks alphabetically before executing them + * @return The callback name + */ + @Override + public String getCallbackName() { + // Return class name only (not prepended by package) + return PostgreSQLCryptoChecker.class.getSimpleName(); + } + /** * Events supported by this callback. * @param event Flyway event diff --git a/dspace-api/src/main/java/org/dspace/storage/rdbms/RegistryUpdater.java b/dspace-api/src/main/java/org/dspace/storage/rdbms/RegistryUpdater.java index ae8be0988a..7debf3ba44 100644 --- a/dspace-api/src/main/java/org/dspace/storage/rdbms/RegistryUpdater.java +++ b/dspace-api/src/main/java/org/dspace/storage/rdbms/RegistryUpdater.java @@ -12,6 +12,7 @@ import java.io.IOException; import java.sql.SQLException; import javax.xml.parsers.ParserConfigurationException; import javax.xml.transform.TransformerException; +import javax.xml.xpath.XPathExpressionException; import org.dspace.administer.MetadataImporter; import org.dspace.administer.RegistryImportException; @@ -89,7 +90,7 @@ public class RegistryUpdater implements Callback { } catch (IOException | SQLException | ParserConfigurationException | TransformerException | RegistryImportException | AuthorizeException | NonUniqueMetadataException - | SAXException e) { + | SAXException | XPathExpressionException e) { log.error("Error attempting to update Bitstream Format and/or Metadata Registries", e); throw new RuntimeException("Error attempting to update Bitstream Format and/or Metadata Registries", e); } finally { @@ -101,6 +102,16 @@ public class RegistryUpdater implements Callback { } + /** + * The callback name, Flyway will use this to sort the callbacks alphabetically before executing them + * @return The callback name + */ + @Override + public String getCallbackName() { + // Return class name only (not prepended by package) + return RegistryUpdater.class.getSimpleName(); + } + /** * Events supported by this callback. * @param event Flyway event diff --git a/dspace-api/src/main/java/org/dspace/storage/rdbms/SiteServiceInitializer.java b/dspace-api/src/main/java/org/dspace/storage/rdbms/SiteServiceInitializer.java index 26e76804e1..872a633146 100644 --- a/dspace-api/src/main/java/org/dspace/storage/rdbms/SiteServiceInitializer.java +++ b/dspace-api/src/main/java/org/dspace/storage/rdbms/SiteServiceInitializer.java @@ -73,6 +73,16 @@ public class SiteServiceInitializer implements Callback { } + /** + * The callback name, Flyway will use this to sort the callbacks alphabetically before executing them + * @return The callback name + */ + @Override + public String getCallbackName() { + // Return class name only (not prepended by package) + return SiteServiceInitializer.class.getSimpleName(); + } + /** * Events supported by this callback. * @param event Flyway event diff --git a/dspace-api/src/main/java/org/dspace/storage/rdbms/hibernate/DatabaseAwareLobType.java b/dspace-api/src/main/java/org/dspace/storage/rdbms/hibernate/DatabaseAwareLobType.java new file mode 100644 index 0000000000..95939f9902 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/storage/rdbms/hibernate/DatabaseAwareLobType.java @@ -0,0 +1,57 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.storage.rdbms.hibernate; + +import org.apache.commons.lang.StringUtils; +import org.dspace.services.ConfigurationService; +import org.dspace.services.factory.DSpaceServicesFactory; +import org.hibernate.type.AbstractSingleColumnStandardBasicType; +import org.hibernate.type.descriptor.java.StringTypeDescriptor; +import org.hibernate.type.descriptor.sql.ClobTypeDescriptor; +import org.hibernate.type.descriptor.sql.LongVarcharTypeDescriptor; +import org.hibernate.type.descriptor.sql.SqlTypeDescriptor; + +/** + * A Hibernate @Type used to properly support the CLOB in both Postgres and Oracle. + * PostgreSQL doesn't have a CLOB type, instead it's a TEXT field. + * Normally, you'd use org.hibernate.type.TextType to support TEXT, but that won't work for Oracle. + * https://github.com/hibernate/hibernate-orm/blob/5.6/hibernate-core/src/main/java/org/hibernate/type/TextType.java + * + * This Type checks if we are using PostgreSQL. + * If so, it configures Hibernate to map CLOB to LongVarChar (same as org.hibernate.type.TextType) + * If not, it uses default CLOB (which works for other databases). + */ +public class DatabaseAwareLobType extends AbstractSingleColumnStandardBasicType { + + public static final DatabaseAwareLobType INSTANCE = new DatabaseAwareLobType(); + + public DatabaseAwareLobType() { + super( getDbDescriptor(), StringTypeDescriptor.INSTANCE ); + } + + public static SqlTypeDescriptor getDbDescriptor() { + if ( isPostgres() ) { + return LongVarcharTypeDescriptor.INSTANCE; + } else { + return ClobTypeDescriptor.DEFAULT; + } + } + + private static boolean isPostgres() { + ConfigurationService configurationService = DSpaceServicesFactory.getInstance().getConfigurationService(); + String dbDialect = configurationService.getProperty("db.dialect"); + + return StringUtils.containsIgnoreCase(dbDialect, "PostgreSQL"); + } + + @Override + public String getName() { + return "database_aware_lob"; + } +} + diff --git a/dspace-api/src/main/java/org/dspace/storage/rdbms/hibernate/postgres/DSpacePostgreSQL82Dialect.java b/dspace-api/src/main/java/org/dspace/storage/rdbms/hibernate/postgres/DSpacePostgreSQL82Dialect.java deleted file mode 100644 index 2701c22fd2..0000000000 --- a/dspace-api/src/main/java/org/dspace/storage/rdbms/hibernate/postgres/DSpacePostgreSQL82Dialect.java +++ /dev/null @@ -1,67 +0,0 @@ -/** - * The contents of this file are subject to the license and copyright - * detailed in the LICENSE and NOTICE files at the root of the source - * tree and available online at - * - * http://www.dspace.org/license/ - */ -package org.dspace.storage.rdbms.hibernate.postgres; - -import java.sql.Types; - -import org.hibernate.dialect.PostgreSQL82Dialect; -import org.hibernate.service.ServiceRegistry; -import org.hibernate.type.PostgresUUIDType; -import org.hibernate.type.descriptor.sql.LongVarcharTypeDescriptor; -import org.hibernate.type.descriptor.sql.SqlTypeDescriptor; - -/** - * UUID's are not supported by default in hibernate due to differences in the database in order to fix this a custom - * sql dialect is needed. - * Source: https://forum.hibernate.org/viewtopic.php?f=1&t=1014157 - * - * @author kevinvandevelde at atmire.com - */ -public class DSpacePostgreSQL82Dialect extends PostgreSQL82Dialect { - @Override - public void contributeTypes(final org.hibernate.boot.model.TypeContributions typeContributions, - final ServiceRegistry serviceRegistry) { - super.contributeTypes(typeContributions, serviceRegistry); - typeContributions.contributeType(new InternalPostgresUUIDType()); - } - - @Override - protected void registerHibernateType(int code, String name) { - super.registerHibernateType(code, name); - } - - protected static class InternalPostgresUUIDType extends PostgresUUIDType { - - @Override - protected boolean registerUnderJavaType() { - return true; - } - } - - /** - * Override is needed to properly support the CLOB on metadatavalue in Postgres and Oracle. - * - * @param sqlCode {@linkplain java.sql.Types JDBC type-code} for the column mapped by this type. - * @return Descriptor for the SQL/JDBC side of a value mapping. - */ - @Override - public SqlTypeDescriptor getSqlTypeDescriptorOverride(int sqlCode) { - SqlTypeDescriptor descriptor; - switch (sqlCode) { - case Types.CLOB: { - descriptor = LongVarcharTypeDescriptor.INSTANCE; - break; - } - default: { - descriptor = super.getSqlTypeDescriptorOverride(sqlCode); - break; - } - } - return descriptor; - } -} diff --git a/dspace-api/src/main/java/org/dspace/storage/rdbms/migration/MigrationUtils.java b/dspace-api/src/main/java/org/dspace/storage/rdbms/migration/MigrationUtils.java index 624d0cb55a..842fc15e16 100644 --- a/dspace-api/src/main/java/org/dspace/storage/rdbms/migration/MigrationUtils.java +++ b/dspace-api/src/main/java/org/dspace/storage/rdbms/migration/MigrationUtils.java @@ -86,10 +86,11 @@ public class MigrationUtils { cascade = true; break; case "h2": - // In H2, constraints are listed in the "information_schema.constraints" table + // In H2, column constraints are listed in the "INFORMATION_SCHEMA.KEY_COLUMN_USAGE" table constraintNameSQL = "SELECT DISTINCT CONSTRAINT_NAME " + - "FROM information_schema.constraints " + - "WHERE table_name = ? AND column_list = ?"; + "FROM INFORMATION_SCHEMA.KEY_COLUMN_USAGE " + + "WHERE TABLE_NAME = ? AND COLUMN_NAME = ?"; + cascade = true; break; default: throw new SQLException("DBMS " + dbtype + " is unsupported in this migration."); diff --git a/dspace-api/src/main/java/org/dspace/submit/migration/SubmissionFormsMigration.java b/dspace-api/src/main/java/org/dspace/submit/migration/SubmissionFormsMigration.java index 362f2720bb..db1fdcdd19 100644 --- a/dspace-api/src/main/java/org/dspace/submit/migration/SubmissionFormsMigration.java +++ b/dspace-api/src/main/java/org/dspace/submit/migration/SubmissionFormsMigration.java @@ -64,12 +64,6 @@ public class SubmissionFormsMigration extends DSpaceRunnable"; private List tempFiles = new ArrayList<>(); - /** - * We need to force this, because some dependency elsewhere interferes. - */ - private static final String TRANSFORMER_FACTORY_CLASS - = "org.apache.xalan.processor.TransformerFactoryImpl"; - @Override public void internalRun() throws TransformerException { if (help) { @@ -101,8 +95,7 @@ public class SubmissionFormsMigration extends DSpaceRunnable options; @@ -24,22 +26,52 @@ public class UploadConfiguration { private Boolean required; private String name; + /** + * Construct a bitstream uploading configuration. + * @param configurationService DSpace configuration provided by the DI container. + */ + @Inject + public UploadConfiguration(ConfigurationService configurationService) { + this.configurationService = configurationService; + } + + /** + * The list of access restriction types from which a submitter may choose. + * @return choices for restricting access to Bitstreams. + */ public List getOptions() { return options; } + /** + * Set the list of access restriction types from which to choose. + * Required. May be empty. + * @param options choices for restricting access to Bitstreams. + */ public void setOptions(List options) { this.options = options; } + /** + * Name of the submission form to which these conditions are attached. + * @return the form's name. + */ public String getMetadata() { return metadataDefinition; } + /** + * Name the submission form to which these conditions are attached. + * @param metadata the form's name. + */ public void setMetadata(String metadata) { this.metadataDefinition = metadata; } + /** + * Limit on the maximum size of an uploaded Bitstream. + * @return maximum upload size in bytes. + */ public Long getMaxSize() { if (maxSize == null) { maxSize = configurationService.getLongProperty("upload.max"); @@ -47,10 +79,18 @@ public class UploadConfiguration { return maxSize; } + /** + * Limit the maximum size of an uploaded Bitstream. + * @param maxSize maximum upload size in bytes. + */ public void setMaxSize(Long maxSize) { this.maxSize = maxSize; } + /** + * Is at least one Bitstream required when submitting a new Item? + * @return true if a Bitstream is required. + */ public Boolean isRequired() { if (required == null) { //defaults to true @@ -60,25 +100,27 @@ public class UploadConfiguration { return required; } + /** + * Is at least one Bitstream required when submitting a new Item? + * @param required true if a Bitstream is required. + */ public void setRequired(Boolean required) { this.required = required; } - public ConfigurationService getConfigurationService() { - return configurationService; - } - - public void setConfigurationService(ConfigurationService configurationService) { - this.configurationService = configurationService; - } - + /** + * The unique name of this configuration. + * @return configuration's name. + */ public String getName() { return name; } + /** + * Give this configuration a unique name. Required. + * @param name configuration's name. + */ public void setName(String name) { this.name = name; } - - } diff --git a/dspace-api/src/main/java/org/dspace/testing/PubMedToImport.java b/dspace-api/src/main/java/org/dspace/testing/PubMedToImport.java index b7ded5ecbf..ec51528429 100644 --- a/dspace-api/src/main/java/org/dspace/testing/PubMedToImport.java +++ b/dspace-api/src/main/java/org/dspace/testing/PubMedToImport.java @@ -24,10 +24,10 @@ import org.apache.commons.cli.Options; import org.apache.commons.lang3.StringUtils; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; -import org.jdom.Document; -import org.jdom.Element; -import org.jdom.output.Format; -import org.jdom.output.XMLOutputter; +import org.jdom2.Document; +import org.jdom2.Element; +import org.jdom2.output.Format; +import org.jdom2.output.XMLOutputter; import org.xml.sax.Attributes; import org.xml.sax.SAXException; import org.xml.sax.helpers.DefaultHandler; diff --git a/dspace-api/src/main/java/org/dspace/util/FunctionalUtils.java b/dspace-api/src/main/java/org/dspace/util/FunctionalUtils.java new file mode 100644 index 0000000000..422c2405a8 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/util/FunctionalUtils.java @@ -0,0 +1,61 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.util; + +import java.util.Objects; +import java.util.function.Predicate; +import java.util.function.Supplier; + +/** + * + * These methods are linked to the functional paradigm and use {@code Functional} interfaces of java 8+, all the main + * interfaces are in the package {@link java.util.function}. + * + * @author Vincenzo Mecca (vins01-4science - vincenzo.mecca at 4science.com) + * + */ +public class FunctionalUtils { + + /** + * Private constructor, it's an Utils class with static methods / functions. + */ + private FunctionalUtils() { + } + + /** + * + * Tests that {@code defaultValue} isn't null. If this test is positive, then + * returns the {@code defaultValue}; Otherwise builds a new instance using the + * {@code builder} + * + * @param defaultValue default instance value + * @param builder instance generator + * @return corresponding non-null instance + */ + public static T getDefaultOrBuild(T defaultValue, Supplier builder) { + return getCheckDefaultOrBuild(Objects::nonNull, defaultValue, builder); + } + + /** + * Tests the {@code defaultValue} using the {@code defaultValueChecker}. If its + * test is positive, then returns the {@code defaultValue}; Otherwise builds a + * new instance using the {@code builder} + * + * @param defaultValueChecker checker that tests the defaultValue + * @param defaultValue default instance value + * @param builder supplier that generates a typed instance + * @return corresponding instance after check + */ + public static T getCheckDefaultOrBuild(Predicate defaultValueChecker, T defaultValue, Supplier builder) { + if (defaultValueChecker.test(defaultValue)) { + return defaultValue; + } + return builder.get(); + } + +} diff --git a/dspace-api/src/main/java/org/dspace/util/SimpleMapConverter.java b/dspace-api/src/main/java/org/dspace/util/SimpleMapConverter.java new file mode 100644 index 0000000000..2b0d8d96dd --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/util/SimpleMapConverter.java @@ -0,0 +1,107 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.util; + +import java.io.File; +import java.io.FileInputStream; +import java.util.HashMap; +import java.util.Map; +import java.util.Properties; + +import org.apache.commons.lang3.StringUtils; +import org.dspace.services.ConfigurationService; +import org.springframework.util.Assert; + +/** + * Class that parse a properties file present in the crosswalks directory and + * allows to get its values given a key. + * + * @author Andrea Bollini + * @author Kostas Stamatis + * @author Luigi Andrea Pascarelli + * @author Panagiotis Koutsourakis + * @author Luca Giamminonni + */ +public class SimpleMapConverter { + + private String converterNameFile; // The properties filename + + private ConfigurationService configurationService; + + private Map mapping; + + private String defaultValue = ""; + + /** + * Parse the configured property file. + */ + public void init() { + + Assert.notNull(converterNameFile, "No properties file name provided"); + Assert.notNull(configurationService, "No configuration service provided"); + + String mappingFile = configurationService.getProperty( + "dspace.dir") + File.separator + "config" + File.separator + "crosswalks" + File.separator + + converterNameFile; + + try (FileInputStream fis = new FileInputStream(new File(mappingFile))) { + + Properties mapConfig = new Properties(); + mapConfig.load(fis); + + this.mapping = parseProperties(mapConfig); + + } catch (Exception e) { + throw new IllegalArgumentException("An error occurs parsing " + mappingFile, e); + } + + } + + /** + * Returns the value related to the given key. If the given key is not found the + * incoming value is returned. + * + * @param key the key to search for a value + * @return the value + */ + public String getValue(String key) { + + String value = mapping.getOrDefault(key, defaultValue); + + if (StringUtils.isBlank(value)) { + return key; + } + + return value; + } + + private Map parseProperties(Properties properties) { + + Map mapping = new HashMap(); + + for (Object key : properties.keySet()) { + String keyString = (String) key; + mapping.put(keyString, properties.getProperty(keyString, "")); + } + + return mapping; + + } + + public void setDefaultValue(String defaultValue) { + this.defaultValue = defaultValue; + } + + public void setConverterNameFile(String converterNameFile) { + this.converterNameFile = converterNameFile; + } + + public void setConfigurationService(ConfigurationService configurationService) { + this.configurationService = configurationService; + } +} diff --git a/dspace-api/src/main/java/org/dspace/util/SolrUpgradePre6xStatistics.java b/dspace-api/src/main/java/org/dspace/util/SolrUpgradePre6xStatistics.java index 12a9970539..7dcebcc09f 100644 --- a/dspace-api/src/main/java/org/dspace/util/SolrUpgradePre6xStatistics.java +++ b/dspace-api/src/main/java/org/dspace/util/SolrUpgradePre6xStatistics.java @@ -240,8 +240,8 @@ public class SolrUpgradePre6xStatistics { /** * Print a status message appended with the processing time for the operation * - * @param header - * Message to display + * @param numProcessed + * count of records processed so far. * @param fromStart * if true, report on processing time since the start of the program */ diff --git a/dspace-api/src/main/java/org/dspace/versioning/AbstractVersionProvider.java b/dspace-api/src/main/java/org/dspace/versioning/AbstractVersionProvider.java index 8b0ca9aeb8..329332d315 100644 --- a/dspace-api/src/main/java/org/dspace/versioning/AbstractVersionProvider.java +++ b/dspace-api/src/main/java/org/dspace/versioning/AbstractVersionProvider.java @@ -21,6 +21,7 @@ import org.dspace.content.Item; import org.dspace.content.MetadataField; import org.dspace.content.MetadataSchema; import org.dspace.content.MetadataValue; +import org.dspace.content.RelationshipMetadataValue; import org.dspace.content.service.BitstreamService; import org.dspace.content.service.BundleService; import org.dspace.content.service.ItemService; @@ -55,14 +56,24 @@ public abstract class AbstractVersionProvider { MetadataSchema metadataSchema = metadataField.getMetadataSchema(); String unqualifiedMetadataField = metadataSchema.getName() + "." + metadataField.getElement(); if (getIgnoredMetadataFields().contains(metadataField.toString('.')) || - getIgnoredMetadataFields().contains(unqualifiedMetadataField + "." + Item.ANY)) { - //Skip this metadata field + getIgnoredMetadataFields().contains(unqualifiedMetadataField + "." + Item.ANY) || + aMd instanceof RelationshipMetadataValue) { + //Skip this metadata field (ignored and/or virtual) continue; } - itemService - .addMetadata(context, itemNew, metadataField, aMd.getLanguage(), aMd.getValue(), aMd.getAuthority(), - aMd.getConfidence()); + itemService.addMetadata( + context, + itemNew, + metadataField.getMetadataSchema().getName(), + metadataField.getElement(), + metadataField.getQualifier(), + aMd.getLanguage(), + aMd.getValue(), + aMd.getAuthority(), + aMd.getConfidence(), + aMd.getPlace() + ); } } diff --git a/dspace-api/src/main/java/org/dspace/versioning/DefaultItemVersionProvider.java b/dspace-api/src/main/java/org/dspace/versioning/DefaultItemVersionProvider.java index 7903a49c31..d4590ae24e 100644 --- a/dspace-api/src/main/java/org/dspace/versioning/DefaultItemVersionProvider.java +++ b/dspace-api/src/main/java/org/dspace/versioning/DefaultItemVersionProvider.java @@ -15,7 +15,9 @@ import org.apache.logging.log4j.Logger; import org.dspace.authorize.AuthorizeException; import org.dspace.authorize.ResourcePolicy; import org.dspace.content.Item; +import org.dspace.content.Relationship; import org.dspace.content.WorkspaceItem; +import org.dspace.content.service.RelationshipService; import org.dspace.content.service.WorkspaceItemService; import org.dspace.core.Context; import org.dspace.identifier.IdentifierException; @@ -44,6 +46,8 @@ public class DefaultItemVersionProvider extends AbstractVersionProvider implemen protected VersioningService versioningService; @Autowired(required = true) protected IdentifierService identifierService; + @Autowired(required = true) + protected RelationshipService relationshipService; @Override public Item createNewItemAndAddItInWorkspace(Context context, Item nativeItem) { @@ -89,10 +93,18 @@ public class DefaultItemVersionProvider extends AbstractVersionProvider implemen } } + /** + * Copy all data (minus a few exceptions) from the old item to the new item. + * @param c the DSpace context. + * @param itemNew the new version of the item. + * @param previousItem the old version of the item. + * @return the new version of the item, with data from the old item. + */ @Override public Item updateItemState(Context c, Item itemNew, Item previousItem) { try { copyMetadata(c, itemNew, previousItem); + copyRelationships(c, itemNew, previousItem); createBundlesAndAddBitstreams(c, itemNew, previousItem); try { identifierService.reserve(c, itemNew); @@ -114,4 +126,49 @@ public class DefaultItemVersionProvider extends AbstractVersionProvider implemen throw new RuntimeException(e.getMessage(), e); } } + + /** + * Copy all relationships of the old item to the new item. + * At this point in the lifecycle of the item-version (before archival), only the opposite item receives + * "latest" status. On item archival of the item-version, the "latest" status of the relevant relationships + * will be updated. + * @param context the DSpace context. + * @param newItem the new version of the item. + * @param oldItem the old version of the item. + */ + protected void copyRelationships( + Context context, Item newItem, Item oldItem + ) throws SQLException, AuthorizeException { + List oldRelationships = relationshipService.findByItem(context, oldItem, -1, -1, false, true); + for (Relationship oldRelationship : oldRelationships) { + if (oldRelationship.getLeftItem().equals(oldItem)) { + // current item is on left side of this relationship + relationshipService.create( + context, + newItem, // new item + oldRelationship.getRightItem(), + oldRelationship.getRelationshipType(), + oldRelationship.getLeftPlace(), + oldRelationship.getRightPlace(), + oldRelationship.getLeftwardValue(), + oldRelationship.getRightwardValue(), + Relationship.LatestVersionStatus.RIGHT_ONLY // only mark the opposite side as "latest" for now + ); + } else if (oldRelationship.getRightItem().equals(oldItem)) { + // current item is on right side of this relationship + relationshipService.create( + context, + oldRelationship.getLeftItem(), + newItem, // new item + oldRelationship.getRelationshipType(), + oldRelationship.getLeftPlace(), + oldRelationship.getRightPlace(), + oldRelationship.getLeftwardValue(), + oldRelationship.getRightwardValue(), + Relationship.LatestVersionStatus.LEFT_ONLY // only mark the opposite side as "latest" for now + ); + } + } + } + } diff --git a/dspace-api/src/main/java/org/dspace/versioning/ItemVersionProvider.java b/dspace-api/src/main/java/org/dspace/versioning/ItemVersionProvider.java index 83369e0465..74014b6262 100644 --- a/dspace-api/src/main/java/org/dspace/versioning/ItemVersionProvider.java +++ b/dspace-api/src/main/java/org/dspace/versioning/ItemVersionProvider.java @@ -22,5 +22,12 @@ public interface ItemVersionProvider { public void deleteVersionedItem(Context c, Version versionToDelete, VersionHistory history) throws SQLException; + /** + * Copy all data (minus a few exceptions) from the old item to the new item. + * @param c the DSpace context. + * @param itemNew the new version of the item. + * @param previousItem the old version of the item. + * @return the new version of the item, with data from the old item. + */ public Item updateItemState(Context c, Item itemNew, Item previousItem); } diff --git a/dspace-api/src/main/java/org/dspace/versioning/VersioningConsumer.java b/dspace-api/src/main/java/org/dspace/versioning/VersioningConsumer.java index 6683419844..63b5391d0a 100644 --- a/dspace-api/src/main/java/org/dspace/versioning/VersioningConsumer.java +++ b/dspace-api/src/main/java/org/dspace/versioning/VersioningConsumer.java @@ -7,39 +7,66 @@ */ package org.dspace.versioning; -import java.util.HashSet; -import java.util.Set; +import static org.dspace.versioning.utils.RelationshipVersioningUtils.LatestVersionStatusChangelog.NO_CHANGES; +import java.sql.SQLException; +import java.util.HashSet; +import java.util.List; +import java.util.Set; +import java.util.stream.Collectors; + +import org.apache.commons.collections4.CollectionUtils; +import org.apache.commons.lang3.StringUtils; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.dspace.content.EntityType; import org.dspace.content.Item; +import org.dspace.content.Relationship; +import org.dspace.content.RelationshipType; import org.dspace.content.factory.ContentServiceFactory; +import org.dspace.content.service.EntityTypeService; import org.dspace.content.service.ItemService; +import org.dspace.content.service.RelationshipService; +import org.dspace.content.service.RelationshipTypeService; import org.dspace.core.Constants; import org.dspace.core.Context; +import org.dspace.discovery.IndexEventConsumer; import org.dspace.event.Consumer; import org.dspace.event.Event; import org.dspace.versioning.factory.VersionServiceFactory; import org.dspace.versioning.service.VersionHistoryService; -import org.dspace.versioning.service.VersioningService; +import org.dspace.versioning.utils.RelationshipVersioningUtils; +import org.dspace.versioning.utils.RelationshipVersioningUtils.LatestVersionStatusChangelog; /** + * When a new version of an item is published, unarchive the previous version and + * update {@link Relationship#latestVersionStatus} of the relevant relationships. + * * @author Fabio Bolognesi (fabio at atmire dot com) * @author Mark Diggory (markd at atmire dot com) * @author Ben Bosman (ben at atmire dot com) */ public class VersioningConsumer implements Consumer { - private static Set itemsToProcess; + private static final Logger log = LogManager.getLogger(VersioningConsumer.class); + + private Set itemsToProcess; private VersionHistoryService versionHistoryService; - private VersioningService versioningService; private ItemService itemService; - + private EntityTypeService entityTypeService; + private RelationshipTypeService relationshipTypeService; + private RelationshipService relationshipService; + private RelationshipVersioningUtils relationshipVersioningUtils; @Override public void initialize() throws Exception { versionHistoryService = VersionServiceFactory.getInstance().getVersionHistoryService(); - versioningService = VersionServiceFactory.getInstance().getVersionService(); itemService = ContentServiceFactory.getInstance().getItemService(); + entityTypeService = ContentServiceFactory.getInstance().getEntityTypeService(); + relationshipTypeService = ContentServiceFactory.getInstance().getRelationshipTypeService(); + relationshipService = ContentServiceFactory.getInstance().getRelationshipService(); + relationshipVersioningUtils = VersionServiceFactory.getInstance().getRelationshipVersioningUtils(); } @Override @@ -49,37 +76,399 @@ public class VersioningConsumer implements Consumer { @Override public void consume(Context ctx, Event event) throws Exception { if (itemsToProcess == null) { - itemsToProcess = new HashSet(); + itemsToProcess = new HashSet<>(); } - int st = event.getSubjectType(); - int et = event.getEventType(); + // only items + if (event.getSubjectType() != Constants.ITEM) { + return; + } - if (st == Constants.ITEM && et == Event.INSTALL) { - Item item = (Item) event.getSubject(ctx); - if (item != null && item.isArchived()) { - VersionHistory history = versionHistoryService.findByItem(ctx, item); - if (history != null) { - Version latest = versionHistoryService.getLatestVersion(ctx, history); - Version previous = versionHistoryService.getPrevious(ctx, history, latest); - if (previous != null) { - Item previousItem = previous.getItem(); - if (previousItem != null) { - previousItem.setArchived(false); - itemsToProcess.add(previousItem); - //Fire a new modify event for our previous item - //Due to the need to reindex the item in the search - //and browse index we need to fire a new event - ctx.addEvent(new Event(Event.MODIFY, - previousItem.getType(), previousItem.getID(), - null, itemService.getIdentifiers(ctx, previousItem))); - } - } + // only install events + if (event.getEventType() != Event.INSTALL) { + return; + } + + // get the item (should be archived) + Item item = (Item) event.getSubject(ctx); + if (item == null || !item.isArchived()) { + return; + } + + // get version history + VersionHistory history = versionHistoryService.findByItem(ctx, item); + if (history == null) { + return; + } + + // get latest version + Version latestVersion = versionHistoryService.getLatestVersion(ctx, history); + if (latestVersion == null) { + return; + } + + // get previous version + Version previousVersion = versionHistoryService.getPrevious(ctx, history, latestVersion); + if (previousVersion == null) { + return; + } + + // get latest item + Item latestItem = latestVersion.getItem(); + if (latestItem == null) { + String msg = String.format( + "Illegal state: Obtained version history of item with uuid %s, handle %s, but the latest item is null", + item.getID(), item.getHandle() + ); + log.error(msg); + throw new IllegalStateException(msg); + } + + // get previous item + Item previousItem = previousVersion.getItem(); + if (previousItem == null) { + return; + } + + // unarchive previous item + unarchiveItem(ctx, previousItem); + + // update relationships + updateRelationships(ctx, latestItem, previousItem); + } + + protected void unarchiveItem(Context ctx, Item item) { + item.setArchived(false); + itemsToProcess.add(item); + //Fire a new modify event for our previous item + //Due to the need to reindex the item in the search + //and browse index we need to fire a new event + ctx.addEvent(new Event( + Event.MODIFY, item.getType(), item.getID(), null, itemService.getIdentifiers(ctx, item) + )); + } + + /** + * Update {@link Relationship#latestVersionStatus} of the relationships of both the old version and the new version + * of the item. + * + * This method will first locate all relationships that are eligible for an update, + * then it will try to match each of those relationships on the old version of given item + * with a relationship on the new version. + * + * One of the following scenarios will happen: + * - if a match is found, then the "latest" status on the side of given item is transferred from + * the old relationship to the new relationship. This implies that on the page of the third-party item, + * the old version of given item will NOT be shown anymore and the new version of given item will appear. + * Both versions of the given item still show the third-party item on their pages. + * - if a relationship only exists on the new version of given item, then this method does nothing. + * The status of those relationships should already have been set to "latest" on both sides during relationship + * creation. + * - if a relationship only exists on the old version of given item, then we assume that the relationship is no + * longer relevant to / has been removed from the new version of the item. The "latest" status is removed from + * the side of the given item. This implies that on the page of the third-party item, + * the relationship with given item will no longer be listed. The old version of given item still lists + * the third-party item and the new version doesn't. + * @param ctx the DSpace context. + * @param latestItem the new version of the item. + * @param previousItem the old version of the item. + */ + protected void updateRelationships(Context ctx, Item latestItem, Item previousItem) { + // check that the entity types of both items match + if (!doEntityTypesMatch(latestItem, previousItem)) { + return; + } + + // get the entity type (same for both items) + EntityType entityType = getEntityType(ctx, latestItem); + if (entityType == null) { + return; + } + + // get all relationship types that are linked to the given entity type + List relationshipTypes = getRelationshipTypes(ctx, entityType); + if (CollectionUtils.isEmpty(relationshipTypes)) { + return; + } + + for (RelationshipType relationshipType : relationshipTypes) { + List latestItemRelationships = getAllRelationships(ctx, latestItem, relationshipType); + if (latestItemRelationships == null) { + continue; + } + + List previousItemRelationships = getAllRelationships(ctx, previousItem, relationshipType); + if (previousItemRelationships == null) { + continue; + } + + // NOTE: no need to loop through latestItemRelationships, because if no match can be found + // (meaning a relationship is only present on the new version of the item), then it's + // a newly added relationship and its status should have been set to BOTH during creation. + for (Relationship previousItemRelationship : previousItemRelationships) { + // determine on which side of the relationship the latest and previous item should be + boolean isLeft = previousItem.equals(previousItemRelationship.getLeftItem()); + boolean isRight = previousItem.equals(previousItemRelationship.getRightItem()); + if (isLeft == isRight) { + Item leftItem = previousItemRelationship.getLeftItem(); + Item rightItem = previousItemRelationship.getRightItem(); + String msg = String.format( + "Illegal state: could not determine side of item with uuid %s, handle %s in " + + "relationship with id %s, rightward name %s between " + + "left item with uuid %s, handle %s and right item with uuid %s, handle %s", + previousItem.getID(), previousItem.getHandle(), previousItemRelationship.getID(), + previousItemRelationship.getRelationshipType().getRightwardType(), + leftItem.getID(), leftItem.getHandle(), rightItem.getID(), rightItem.getHandle() + ); + log.error(msg); + throw new IllegalStateException(msg); + } + + // get the matching relationship on the latest item + Relationship latestItemRelationship = + getMatchingRelationship(latestItem, isLeft, previousItemRelationship, latestItemRelationships); + + // the other side of the relationship should be "latest", otherwise the relationship could not have been + // copied to the new item in the first place (by DefaultVersionProvider#copyRelationships) + if (relationshipVersioningUtils.otherSideIsLatest( + isLeft, previousItemRelationship.getLatestVersionStatus() + )) { + // Set the previous version of the item to non-latest. This implies that the previous version + // of the item will not be shown anymore on the page of the third-party item. That makes sense, + // because either the relationship has been deleted from the new version of the item (no match), + // or the matching relationship (linked to new version) will receive "latest" status in + // the next step. + LatestVersionStatusChangelog changelog = + relationshipVersioningUtils.updateLatestVersionStatus(previousItemRelationship, isLeft, false); + reindexRelationship(ctx, changelog, previousItemRelationship); + } + + if (latestItemRelationship != null) { + // Set the new version of the item to latest if the relevant relationship exists (match found). + // This implies that the new version of the item will appear on the page of the third-party item. + // The old version of the item will not appear anymore on the page of the third-party item, + // see previous step. + LatestVersionStatusChangelog changelog = + relationshipVersioningUtils.updateLatestVersionStatus(latestItemRelationship, isLeft, true); + reindexRelationship(ctx, changelog, latestItemRelationship); } } } } + /** + * If the {@link Relationship#latestVersionStatus} of the relationship has changed, + * an "item modified" event should be fired for both the left and right item of the relationship. + * On one item the relation.* fields will change. On the other item the relation.*.latestForDiscovery will change. + * The event will cause the items to be re-indexed by the {@link IndexEventConsumer}. + * @param ctx the DSpace context. + * @param changelog indicates which side of the relationship has changed. + * @param relationship the relationship. + */ + protected void reindexRelationship( + Context ctx, LatestVersionStatusChangelog changelog, Relationship relationship + ) { + if (changelog == NO_CHANGES) { + return; + } + + // on one item, relation.* fields will change + // on the other item, relation.*.latestForDiscovery will change + + // reindex left item + Item leftItem = relationship.getLeftItem(); + itemsToProcess.add(leftItem); + ctx.addEvent(new Event( + Event.MODIFY, leftItem.getType(), leftItem.getID(), null, itemService.getIdentifiers(ctx, leftItem) + )); + + // reindex right item + Item rightItem = relationship.getRightItem(); + itemsToProcess.add(rightItem); + ctx.addEvent(new Event( + Event.MODIFY, rightItem.getType(), rightItem.getID(), null, itemService.getIdentifiers(ctx, rightItem) + )); + } + + /** + * Given two items, check if their entity types match. + * If one or both items don't have an entity type, comparing is pointless and this method will return false. + * @param latestItem the item that represents the most recent version. + * @param previousItem the item that represents the second-most recent version. + * @return true if the entity types of both items are non-null and equal, false otherwise. + */ + protected boolean doEntityTypesMatch(Item latestItem, Item previousItem) { + String latestItemEntityType = itemService.getEntityTypeLabel(latestItem); + String previousItemEntityType = itemService.getEntityTypeLabel(previousItem); + + // check if both items have an entity type + if (latestItemEntityType == null || previousItemEntityType == null) { + if (previousItemEntityType != null) { + log.warn( + "Inconsistency: Item with uuid {}, handle {} has NO entity type, " + + "but the previous version of that item with uuid {}, handle {} has entity type {}", + latestItem.getID(), latestItem.getHandle(), + previousItem.getID(), previousItem.getHandle(), previousItemEntityType + ); + } + + // one or both items do not have an entity type, so comparing is pointless + return false; + } + + // check if the entity types are equal + if (!StringUtils.equals(latestItemEntityType, previousItemEntityType)) { + log.warn( + "Inconsistency: Item with uuid {}, handle {} has entity type {}, " + + "but the previous version of that item with uuid {}, handle {} has entity type {}", + latestItem.getID(), latestItem.getHandle(), latestItemEntityType, + previousItem.getID(), previousItem.getHandle(), previousItemEntityType + ); + return false; + } + + // success - the entity types of both items are non-null and equal + log.info( + "Item with uuid {}, handle {} and the previous version of that item with uuid {}, handle {} " + + "have the same entity type: {}", + latestItem.getID(), latestItem.getHandle(), previousItem.getID(), previousItem.getHandle(), + latestItemEntityType + ); + return true; + } + + /** + * Get the entity type (stored in metadata field dspace.entity.type) of any item. + * @param item the item. + * @return the entity type. + */ + protected EntityType getEntityType(Context ctx, Item item) { + try { + return itemService.getEntityType(ctx, item); + } catch (SQLException e) { + log.error( + "Exception occurred when trying to obtain entity type with label {} of item with uuid {}, handle {}", + itemService.getEntityTypeLabel(item), item.getID(), item.getHandle(), e + ); + return null; + } + } + + /** + * Get all relationship types that have the given entity type on their left and/or right side. + * @param ctx the DSpace context. + * @param entityType the entity type for which all relationship types should be found. + * @return a list of relationship types (possibly empty), or null in case of error. + */ + protected List getRelationshipTypes(Context ctx, EntityType entityType) { + try { + return relationshipTypeService.findByEntityType(ctx, entityType); + } catch (SQLException e) { + log.error( + "Exception occurred when trying to obtain relationship types via entity type with id {}, label {}", + entityType.getID(), entityType.getLabel(), e + ); + return null; + } + } + + /** + * Get all relationships of the given type linked to the given item. + * @param ctx the DSpace context. + * @param item the item. + * @param relationshipType the relationship type. + * @return a list of relationships (possibly empty), or null in case of error. + */ + protected List getAllRelationships(Context ctx, Item item, RelationshipType relationshipType) { + try { + return relationshipService.findByItemAndRelationshipType(ctx, item, relationshipType, -1, -1, false); + } catch (SQLException e) { + log.error( + "Exception occurred when trying to obtain relationships of type with id {}, rightward name {} " + + "for item with uuid {}, handle {}", + relationshipType.getID(), relationshipType.getRightwardType(), item.getID(), item.getHandle(), e + ); + return null; + } + } + + /** + * From a list of relationships, find the relationship with the correct relationship type and items. + * If isLeft is true, the provided item should be on the left side of the relationship. + * If isLeft is false, the provided item should be on the right side of the relationship. + * In both cases, the other item is taken from the given relationship. + * @param latestItem the item that should either be on the left or right side of the returned relationship (if any). + * @param isLeft decide on which side of the relationship the provided item should be. + * @param previousItemRelationship the relationship from which the type and the other item are read. + * @param relationships the list of relationships that we'll search through. + * @return the relationship that satisfies the requirements (can only be one or zero). + */ + protected Relationship getMatchingRelationship( + Item latestItem, boolean isLeft, Relationship previousItemRelationship, List relationships + ) { + Item leftItem = previousItemRelationship.getLeftItem(); + RelationshipType relationshipType = previousItemRelationship.getRelationshipType(); + Item rightItem = previousItemRelationship.getRightItem(); + + if (isLeft) { + return getMatchingRelationship(latestItem, relationshipType, rightItem, relationships); + } else { + return getMatchingRelationship(leftItem, relationshipType, latestItem, relationships); + } + } + + + /** + * Find the relationship with the given left item, relation type and right item, from a list of relationships. + * @param expectedLeftItem the relationship that we're looking for has this item on the left side. + * @param expectedRelationshipType the relationship that we're looking for has this relationship type. + * @param expectedRightItem the relationship that we're looking for has this item on the right side. + * @param relationships the list of relationships that we'll search through. + * @return the relationship that satisfies the requirements (can only be one or zero). + */ + protected Relationship getMatchingRelationship( + Item expectedLeftItem, RelationshipType expectedRelationshipType, Item expectedRightItem, + List relationships + ) { + Integer expectedRelationshipTypeId = expectedRelationshipType.getID(); + + List matchingRelationships = relationships.stream() + .filter(relationship -> { + int relationshipTypeId = relationship.getRelationshipType().getID(); + + boolean leftItemMatches = expectedLeftItem.equals(relationship.getLeftItem()); + boolean relationshipTypeMatches = expectedRelationshipTypeId == relationshipTypeId; + boolean rightItemMatches = expectedRightItem.equals(relationship.getRightItem()); + + return leftItemMatches && relationshipTypeMatches && rightItemMatches; + }) + .distinct() + .collect(Collectors.toUnmodifiableList()); + + if (matchingRelationships.isEmpty()) { + return null; + } + + // NOTE: this situation should never occur because the relationship table has a unique constraint + // over the "left_id", "type_id" and "right_id" columns + if (matchingRelationships.size() > 1) { + String msg = String.format( + "Illegal state: expected 0 or 1 relationship, but found %s relationships (ids: %s) " + + "of type with id %s, rightward name %s " + + "between left item with uuid %s, handle %s and right item with uuid %s, handle %s", + matchingRelationships.size(), + matchingRelationships.stream().map(Relationship::getID).collect(Collectors.toUnmodifiableList()), + expectedRelationshipTypeId, expectedRelationshipType.getRightwardType(), + expectedLeftItem.getID(), expectedLeftItem.getHandle(), + expectedRightItem.getID(), expectedRightItem.getHandle() + ); + log.error(msg); + throw new IllegalStateException(msg); + } + + return matchingRelationships.get(0); + } + @Override public void end(Context ctx) throws Exception { if (itemsToProcess != null) { diff --git a/dspace-api/src/main/java/org/dspace/versioning/factory/VersionServiceFactory.java b/dspace-api/src/main/java/org/dspace/versioning/factory/VersionServiceFactory.java index ecc3315a72..8e8cc786ca 100644 --- a/dspace-api/src/main/java/org/dspace/versioning/factory/VersionServiceFactory.java +++ b/dspace-api/src/main/java/org/dspace/versioning/factory/VersionServiceFactory.java @@ -10,6 +10,7 @@ package org.dspace.versioning.factory; import org.dspace.services.factory.DSpaceServicesFactory; import org.dspace.versioning.service.VersionHistoryService; import org.dspace.versioning.service.VersioningService; +import org.dspace.versioning.utils.RelationshipVersioningUtils; /** * Abstract factory to get services for the versioning package, use VersionServiceFactory.getInstance() to retrieve @@ -23,6 +24,8 @@ public abstract class VersionServiceFactory { public abstract VersioningService getVersionService(); + public abstract RelationshipVersioningUtils getRelationshipVersioningUtils(); + public static VersionServiceFactory getInstance() { return DSpaceServicesFactory.getInstance().getServiceManager() .getServiceByName("versionServiceFactory", VersionServiceFactory.class); diff --git a/dspace-api/src/main/java/org/dspace/versioning/factory/VersionServiceFactoryImpl.java b/dspace-api/src/main/java/org/dspace/versioning/factory/VersionServiceFactoryImpl.java index 613cb4faf4..97e4083426 100644 --- a/dspace-api/src/main/java/org/dspace/versioning/factory/VersionServiceFactoryImpl.java +++ b/dspace-api/src/main/java/org/dspace/versioning/factory/VersionServiceFactoryImpl.java @@ -9,6 +9,7 @@ package org.dspace.versioning.factory; import org.dspace.versioning.service.VersionHistoryService; import org.dspace.versioning.service.VersioningService; +import org.dspace.versioning.utils.RelationshipVersioningUtils; import org.springframework.beans.factory.annotation.Autowired; /** @@ -25,6 +26,9 @@ public class VersionServiceFactoryImpl extends VersionServiceFactory { @Autowired(required = true) protected VersioningService versionService; + @Autowired(required = true) + protected RelationshipVersioningUtils relationshipVersioningUtils; + @Override public VersionHistoryService getVersionHistoryService() { return versionHistoryService; @@ -34,4 +38,10 @@ public class VersionServiceFactoryImpl extends VersionServiceFactory { public VersioningService getVersionService() { return versionService; } + + @Override + public RelationshipVersioningUtils getRelationshipVersioningUtils() { + return relationshipVersioningUtils; + } + } diff --git a/dspace-api/src/main/java/org/dspace/versioning/utils/RelationshipVersioningUtils.java b/dspace-api/src/main/java/org/dspace/versioning/utils/RelationshipVersioningUtils.java new file mode 100644 index 0000000000..5e40176082 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/versioning/utils/RelationshipVersioningUtils.java @@ -0,0 +1,114 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.versioning.utils; + +import static org.dspace.versioning.utils.RelationshipVersioningUtils.LatestVersionStatusChangelog.LEFT_SIDE_CHANGED; +import static org.dspace.versioning.utils.RelationshipVersioningUtils.LatestVersionStatusChangelog.NO_CHANGES; +import static org.dspace.versioning.utils.RelationshipVersioningUtils.LatestVersionStatusChangelog.RIGHT_SIDE_CHANGED; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.dspace.content.Relationship; +import org.dspace.content.Relationship.LatestVersionStatus; + +/** + * Class with utility methods to manipulate relationships that are linked to versioned items. + * Specifically focussed on the "latest version status" of relationships, + * which controls which related items are relevant (visible) to any given item. + */ +public class RelationshipVersioningUtils { + + private static final Logger log = LogManager.getLogger(RelationshipVersioningUtils.class); + + /** + * Given a latest version status, check if the other side is "latest". + * If we look from the left, this implies BOTH and RIGHT_ONLY return true. + * If we look from the right, this implies BOTH and LEFT_ONLY return true. + * @param isLeft whether we should look from the left or right side. + * @param latestVersionStatus the latest version status. + * @return true if the other side has "latest" status, false otherwise. + */ + public boolean otherSideIsLatest(boolean isLeft, LatestVersionStatus latestVersionStatus) { + if (latestVersionStatus == LatestVersionStatus.BOTH) { + return true; + } + + return latestVersionStatus == (isLeft ? LatestVersionStatus.RIGHT_ONLY : LatestVersionStatus.LEFT_ONLY); + } + + public enum LatestVersionStatusChangelog { + NO_CHANGES, + LEFT_SIDE_CHANGED, + RIGHT_SIDE_CHANGED + } + + /** + * Update {@link Relationship#latestVersionStatus} of the given relationship. + * If isLatest = true, this method will never throw IllegalStateException. + * If isLatest = false, you should make sure that the selected side of given relationship + * currently has "latest" status, otherwise IllegalStateException will be thrown. + * @param relationship the relationship. + * @param updateLeftSide whether the status of the left item or the right item should be updated. + * @param isLatest to what the status should be set. + * @throws IllegalStateException if the operation would result in both the left side and the right side + * being set to non-latest. + */ + public LatestVersionStatusChangelog updateLatestVersionStatus( + Relationship relationship, boolean updateLeftSide, boolean isLatest + ) throws IllegalStateException { + LatestVersionStatus lvs = relationship.getLatestVersionStatus(); + + boolean leftSideIsLatest = lvs == LatestVersionStatus.BOTH || lvs == LatestVersionStatus.LEFT_ONLY; + boolean rightSideIsLatest = lvs == LatestVersionStatus.BOTH || lvs == LatestVersionStatus.RIGHT_ONLY; + + if (updateLeftSide) { + if (leftSideIsLatest == isLatest) { + return NO_CHANGES; // no change needed + } + leftSideIsLatest = isLatest; + } else { + if (rightSideIsLatest == isLatest) { + return NO_CHANGES; // no change needed + } + rightSideIsLatest = isLatest; + } + + LatestVersionStatus newVersionStatus; + if (leftSideIsLatest && rightSideIsLatest) { + newVersionStatus = LatestVersionStatus.BOTH; + } else if (leftSideIsLatest) { + newVersionStatus = LatestVersionStatus.LEFT_ONLY; + } else if (rightSideIsLatest) { + newVersionStatus = LatestVersionStatus.RIGHT_ONLY; + } else { + String msg = String.format( + "Illegal state: cannot set %s item to latest = false, because relationship with id %s, " + + "rightward name %s between left item with uuid %s, handle %s and right item with uuid %s, handle %s " + + "has latest version status set to %s", + updateLeftSide ? "left" : "right", relationship.getID(), + relationship.getRelationshipType().getRightwardType(), + relationship.getLeftItem().getID(), relationship.getLeftItem().getHandle(), + relationship.getRightItem().getID(), relationship.getRightItem().getHandle(), lvs + ); + log.error(msg); + throw new IllegalStateException(msg); + } + + log.info( + "set latest version status from {} to {} for relationship with id {}, rightward name {} " + + "between left item with uuid {}, handle {} and right item with uuid {}, handle {}", + lvs, newVersionStatus, relationship.getID(), relationship.getRelationshipType().getRightwardType(), + relationship.getLeftItem().getID(), relationship.getLeftItem().getHandle(), + relationship.getRightItem().getID(), relationship.getRightItem().getHandle() + ); + relationship.setLatestVersionStatus(newVersionStatus); + + return updateLeftSide ? LEFT_SIDE_CHANGED : RIGHT_SIDE_CHANGED; + } + +} diff --git a/dspace-api/src/main/java/org/dspace/vocabulary/ControlledVocabulary.java b/dspace-api/src/main/java/org/dspace/vocabulary/ControlledVocabulary.java index ee1b0445bb..7f2bdc6ef7 100644 --- a/dspace-api/src/main/java/org/dspace/vocabulary/ControlledVocabulary.java +++ b/dspace-api/src/main/java/org/dspace/vocabulary/ControlledVocabulary.java @@ -15,8 +15,11 @@ import javax.xml.parsers.DocumentBuilder; import javax.xml.parsers.DocumentBuilderFactory; import javax.xml.parsers.ParserConfigurationException; import javax.xml.transform.TransformerException; +import javax.xml.xpath.XPath; +import javax.xml.xpath.XPathConstants; +import javax.xml.xpath.XPathExpressionException; +import javax.xml.xpath.XPathFactory; -import org.apache.xpath.XPathAPI; import org.dspace.services.ConfigurationService; import org.dspace.services.factory.DSpaceServicesFactory; import org.w3c.dom.Document; @@ -56,7 +59,7 @@ public class ControlledVocabulary { * TODO: add some caching ! */ public static ControlledVocabulary loadVocabulary(String fileName) - throws IOException, SAXException, ParserConfigurationException, TransformerException { + throws IOException, SAXException, ParserConfigurationException, XPathExpressionException { StringBuilder filePath = new StringBuilder(); ConfigurationService configurationService = DSpaceServicesFactory.getInstance().getConfigurationService(); @@ -70,7 +73,9 @@ public class ControlledVocabulary { if (controlledVocFile.exists()) { DocumentBuilder builder = DocumentBuilderFactory.newInstance().newDocumentBuilder(); Document document = builder.parse(controlledVocFile); - return loadVocabularyNode(XPathAPI.selectSingleNode(document, "node"), ""); + XPath xPath = XPathFactory.newInstance().newXPath(); + Node node = (Node) xPath.compile("node").evaluate(document, XPathConstants.NODE); + return loadVocabularyNode(node, ""); } else { return null; } @@ -85,7 +90,8 @@ public class ControlledVocabulary { * @return a vocabulary node with all its children * @throws TransformerException should something go wrong with loading the xml */ - private static ControlledVocabulary loadVocabularyNode(Node node, String initialValue) throws TransformerException { + private static ControlledVocabulary loadVocabularyNode(Node node, String initialValue) + throws XPathExpressionException { Node idNode = node.getAttributes().getNamedItem("id"); String id = null; if (idNode != null) { @@ -102,7 +108,9 @@ public class ControlledVocabulary { } else { value = label; } - NodeList subNodes = XPathAPI.selectNodeList(node, "isComposedBy/node"); + XPath xPath = XPathFactory.newInstance().newXPath(); + NodeList subNodes = (NodeList) xPath.compile("isComposedBy/node").evaluate(node, + XPathConstants.NODESET); List subVocabularies = new ArrayList<>(subNodes.getLength()); for (int i = 0; i < subNodes.getLength(); i++) { diff --git a/dspace-api/src/main/java/org/dspace/xmlworkflow/WorkflowRequirementsServiceImpl.java b/dspace-api/src/main/java/org/dspace/xmlworkflow/WorkflowRequirementsServiceImpl.java index c651097fcb..aecdccd55a 100644 --- a/dspace-api/src/main/java/org/dspace/xmlworkflow/WorkflowRequirementsServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/xmlworkflow/WorkflowRequirementsServiceImpl.java @@ -100,7 +100,7 @@ public class WorkflowRequirementsServiceImpl implements WorkflowRequirementsServ //Then remove the current user from the inProgressUsers inProgressUserService.delete(context, inProgressUserService.findByWorkflowItemAndEPerson(context, wfi, user)); - //Make sure the removed user has his custom rights removed + //Make sure the removed user has their custom rights removed xmlWorkflowService.removeUserItemPolicies(context, wfi.getItem(), user); Workflow workflow = workflowFactory.getWorkflow(wfi.getCollection()); diff --git a/dspace-api/src/main/java/org/dspace/xmlworkflow/XmlWorkflowServiceImpl.java b/dspace-api/src/main/java/org/dspace/xmlworkflow/XmlWorkflowServiceImpl.java index fbe06245ab..90f180ec87 100644 --- a/dspace-api/src/main/java/org/dspace/xmlworkflow/XmlWorkflowServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/xmlworkflow/XmlWorkflowServiceImpl.java @@ -447,7 +447,7 @@ public class XmlWorkflowServiceImpl implements XmlWorkflowService { enteredNewStep); } } else if (enteredNewStep) { - // If the user finished his/her step, we keep processing until there is a UI step action or no + // If the user finished their step, we keep processing until there is a UI step action or no // step at all nextStep = workflow.getNextStep(c, wfi, currentStep, currentOutcome.getResult()); c.turnOffAuthorisationSystem(); @@ -938,7 +938,7 @@ public class XmlWorkflowServiceImpl implements XmlWorkflowService { authorizeService.removeEPersonPolicies(context, bitstream, e); } } - // Ensure that the submitter always retains his resource policies + // Ensure that the submitter always retains their resource policies if (e.getID().equals(item.getSubmitter().getID())) { grantSubmitterReadPolicies(context, item); } diff --git a/dspace-api/src/main/java/org/dspace/xmlworkflow/state/Workflow.java b/dspace-api/src/main/java/org/dspace/xmlworkflow/state/Workflow.java index 636007344c..fd081b3a1b 100644 --- a/dspace-api/src/main/java/org/dspace/xmlworkflow/state/Workflow.java +++ b/dspace-api/src/main/java/org/dspace/xmlworkflow/state/Workflow.java @@ -8,7 +8,7 @@ package org.dspace.xmlworkflow.state; import java.sql.SQLException; -import java.util.HashMap; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; @@ -118,7 +118,7 @@ public class Workflow implements BeanNameAware { * @return a map containing the roles, the role name will the key, the role itself the value */ public Map getRoles() { - Map roles = new HashMap<>(); + Map roles = new LinkedHashMap<>(); for (Step step : steps) { if (step.getRole() != null) { roles.put(step.getRole().getId(), step.getRole()); diff --git a/dspace-api/src/main/java/org/dspace/xmlworkflow/state/actions/processingaction/SingleUserReviewAction.java b/dspace-api/src/main/java/org/dspace/xmlworkflow/state/actions/processingaction/SingleUserReviewAction.java index 9ef554821d..27cf98f77f 100644 --- a/dspace-api/src/main/java/org/dspace/xmlworkflow/state/actions/processingaction/SingleUserReviewAction.java +++ b/dspace-api/src/main/java/org/dspace/xmlworkflow/state/actions/processingaction/SingleUserReviewAction.java @@ -25,7 +25,7 @@ import org.dspace.xmlworkflow.storedcomponents.XmlWorkflowItem; /** * Processing class of an action where a single user has - * been assigned and he can either accept/reject the workflow item + * been assigned and they can either accept/reject the workflow item * or reject the task * * @author Bram De Schouwer (bram.deschouwer at dot com) @@ -90,7 +90,7 @@ public class SingleUserReviewAction extends ProcessingAction { } else { request.setAttribute("page", REJECT_PAGE); } - // We have pressed reject item, so take the user to a page where he can reject + // We have pressed reject item, so take the user to a page where they can reject return new ActionResult(ActionResult.TYPE.TYPE_PAGE); } else if (request.getParameter(SUBMIT_DECLINE_TASK) != null) { return new ActionResult(ActionResult.TYPE.TYPE_OUTCOME, OUTCOME_REJECT); diff --git a/dspace-api/src/main/java/org/dspace/xmlworkflow/state/actions/userassignment/AssignOriginalSubmitterAction.java b/dspace-api/src/main/java/org/dspace/xmlworkflow/state/actions/userassignment/AssignOriginalSubmitterAction.java index 5d934ba189..0cd82fe770 100644 --- a/dspace-api/src/main/java/org/dspace/xmlworkflow/state/actions/userassignment/AssignOriginalSubmitterAction.java +++ b/dspace-api/src/main/java/org/dspace/xmlworkflow/state/actions/userassignment/AssignOriginalSubmitterAction.java @@ -136,7 +136,7 @@ public class AssignOriginalSubmitterAction extends UserSelectionAction { protected void createTaskForEPerson(Context c, XmlWorkflowItem wfi, Step step, WorkflowActionConfig actionConfig, EPerson user) throws SQLException, AuthorizeException, IOException { if (claimedTaskService.find(c, wfi, step.getId(), actionConfig.getId()) != null) { - workflowRequirementsService.addClaimedUser(c, wfi, step, c.getCurrentUser()); + workflowRequirementsService.addClaimedUser(c, wfi, step, user); XmlWorkflowServiceFactory.getInstance().getXmlWorkflowService() .createOwnedTask(c, wfi, step, actionConfig, user); } diff --git a/dspace-api/src/main/java/org/dspace/xmlworkflow/storedcomponents/PoolTaskServiceImpl.java b/dspace-api/src/main/java/org/dspace/xmlworkflow/storedcomponents/PoolTaskServiceImpl.java index f64f1b3942..fb673725e1 100644 --- a/dspace-api/src/main/java/org/dspace/xmlworkflow/storedcomponents/PoolTaskServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/xmlworkflow/storedcomponents/PoolTaskServiceImpl.java @@ -92,7 +92,7 @@ public class PoolTaskServiceImpl implements PoolTaskService { return poolTask; } else { //If the user has a is processing or has finished the step for a workflowitem, there is no need to look - // for pooltasks for one of his + // for pooltasks for one of their //groups because the user already has the task claimed if (inProgressUserService.findByWorkflowItemAndEPerson(context, workflowItem, ePerson) != null) { return null; diff --git a/dspace-api/src/main/resources/Messages.properties b/dspace-api/src/main/resources/Messages.properties index 0583fb493c..c478e4e69b 100644 --- a/dspace-api/src/main/resources/Messages.properties +++ b/dspace-api/src/main/resources/Messages.properties @@ -119,3 +119,5 @@ org.dspace.app.rest.exception.RESTEmptyWorkflowGroupException.message = Refused workflow group {1}. Delete the tasks and group first if you want to remove this user. org.dspace.app.rest.exception.EPersonNameNotProvidedException.message = The eperson.firstname and eperson.lastname values need to be filled in org.dspace.app.rest.exception.GroupNameNotProvidedException.message = Cannot create group, no group name is provided +org.dspace.app.rest.exception.GroupHasPendingWorkflowTasksException.message = Cannot delete group, the associated workflow role still has pending tasks +org.dspace.app.rest.exception.PasswordNotValidException.message = New password is invalid. Valid passwords must be at least 8 characters long! diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/h2/V1.4__Upgrade_to_DSpace_1.4_schema.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/h2/V1.4__Upgrade_to_DSpace_1.4_schema.sql index e00a651626..62d12fe5ce 100644 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/h2/V1.4__Upgrade_to_DSpace_1.4_schema.sql +++ b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/h2/V1.4__Upgrade_to_DSpace_1.4_schema.sql @@ -245,13 +245,13 @@ insert into most_recent_checksum ) select bitstream.bitstream_id, - '1', + true, CASE WHEN bitstream.checksum IS NULL THEN '' ELSE bitstream.checksum END, CASE WHEN bitstream.checksum IS NULL THEN '' ELSE bitstream.checksum END, FORMATDATETIME(NOW(),'DD-MM-RRRR HH24:MI:SS'), FORMATDATETIME(NOW(),'DD-MM-RRRR HH24:MI:SS'), CASE WHEN bitstream.checksum_algorithm IS NULL THEN 'MD5' ELSE bitstream.checksum_algorithm END, - '1' + true from bitstream; -- Update all the deleted checksums @@ -263,7 +263,7 @@ update most_recent_checksum set to_be_processed = 0 where most_recent_checksum.bitstream_id in ( select bitstream_id -from bitstream where deleted = '1' ); +from bitstream where deleted = true ); -- this will insert into history table -- for the initial start diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/h2/V5.0_2014.09.26__DS-1582_Metadata_For_All_Objects.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/h2/V5.0_2014.09.26__DS-1582_Metadata_For_All_Objects.sql index 87551bdf4e..cd908279f1 100644 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/h2/V5.0_2014.09.26__DS-1582_Metadata_For_All_Objects.sql +++ b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/h2/V5.0_2014.09.26__DS-1582_Metadata_For_All_Objects.sql @@ -36,7 +36,7 @@ alter table metadatavalue alter column resource_id set not null; INSERT INTO metadatavalue (metadata_value_id, resource_id, resource_type_id, metadata_field_id, text_value, text_lang, place) SELECT -metadatavalue_seq.nextval as metadata_value_id, +NEXT VALUE FOR metadatavalue_seq as metadata_value_id, community_id AS resource_id, 4 AS resource_type_id, (select metadata_field_id from metadatafieldregistry where metadata_schema_id=(select metadata_schema_id from metadataschemaregistry where short_id='dc') and element = 'description' and qualifier is null) AS metadata_field_id, @@ -47,7 +47,7 @@ FROM community where not introductory_text is null; INSERT INTO metadatavalue (metadata_value_id, resource_id, resource_type_id, metadata_field_id, text_value, text_lang, place) SELECT -metadatavalue_seq.nextval as metadata_value_id, +NEXT VALUE FOR metadatavalue_seq as metadata_value_id, community_id AS resource_id, 4 AS resource_type_id, (select metadata_field_id from metadatafieldregistry where metadata_schema_id=(select metadata_schema_id from metadataschemaregistry where short_id='dc') and element = 'description' and qualifier = 'abstract') AS metadata_field_id, @@ -58,7 +58,7 @@ FROM community where not short_description is null; INSERT INTO metadatavalue (metadata_value_id, resource_id, resource_type_id, metadata_field_id, text_value, text_lang, place) SELECT -metadatavalue_seq.nextval as metadata_value_id, +NEXT VALUE FOR metadatavalue_seq as metadata_value_id, community_id AS resource_id, 4 AS resource_type_id, (select metadata_field_id from metadatafieldregistry where metadata_schema_id=(select metadata_schema_id from metadataschemaregistry where short_id='dc') and element = 'description' and qualifier = 'tableofcontents') AS metadata_field_id, @@ -69,7 +69,7 @@ FROM community where not side_bar_text is null; INSERT INTO metadatavalue (metadata_value_id, resource_id, resource_type_id, metadata_field_id, text_value, text_lang, place) SELECT -metadatavalue_seq.nextval as metadata_value_id, +NEXT VALUE FOR metadatavalue_seq as metadata_value_id, community_id AS resource_id, 4 AS resource_type_id, (select metadata_field_id from metadatafieldregistry where metadata_schema_id=(select metadata_schema_id from metadataschemaregistry where short_id='dc') and element = 'rights' and qualifier is null) AS metadata_field_id, @@ -80,7 +80,7 @@ FROM community where not copyright_text is null; INSERT INTO metadatavalue (metadata_value_id, resource_id, resource_type_id, metadata_field_id, text_value, text_lang, place) SELECT -metadatavalue_seq.nextval as metadata_value_id, +NEXT VALUE FOR metadatavalue_seq as metadata_value_id, community_id AS resource_id, 4 AS resource_type_id, (select metadata_field_id from metadatafieldregistry where metadata_schema_id=(select metadata_schema_id from metadataschemaregistry where short_id='dc') and element = 'title' and qualifier is null) AS metadata_field_id, @@ -104,7 +104,7 @@ alter table community drop column name; INSERT INTO metadatavalue (metadata_value_id, resource_id, resource_type_id, metadata_field_id, text_value, text_lang, place) SELECT -metadatavalue_seq.nextval as metadata_value_id, +NEXT VALUE FOR metadatavalue_seq as metadata_value_id, collection_id AS resource_id, 3 AS resource_type_id, (select metadata_field_id from metadatafieldregistry where metadata_schema_id=(select metadata_schema_id from metadataschemaregistry where short_id='dc') and element = 'description' and qualifier is null) AS metadata_field_id, @@ -115,7 +115,7 @@ FROM collection where not introductory_text is null; INSERT INTO metadatavalue (metadata_value_id, resource_id, resource_type_id, metadata_field_id, text_value, text_lang, place) SELECT -metadatavalue_seq.nextval as metadata_value_id, +NEXT VALUE FOR metadatavalue_seq as metadata_value_id, collection_id AS resource_id, 3 AS resource_type_id, (select metadata_field_id from metadatafieldregistry where metadata_schema_id=(select metadata_schema_id from metadataschemaregistry where short_id='dc') and element = 'description' and qualifier = 'abstract') AS metadata_field_id, @@ -126,7 +126,7 @@ FROM collection where not short_description is null; INSERT INTO metadatavalue (metadata_value_id, resource_id, resource_type_id, metadata_field_id, text_value, text_lang, place) SELECT -metadatavalue_seq.nextval as metadata_value_id, +NEXT VALUE FOR metadatavalue_seq as metadata_value_id, collection_id AS resource_id, 3 AS resource_type_id, (select metadata_field_id from metadatafieldregistry where metadata_schema_id=(select metadata_schema_id from metadataschemaregistry where short_id='dc') and element = 'description' and qualifier = 'tableofcontents') AS metadata_field_id, @@ -137,7 +137,7 @@ FROM collection where not side_bar_text is null; INSERT INTO metadatavalue (metadata_value_id, resource_id, resource_type_id, metadata_field_id, text_value, text_lang, place) SELECT -metadatavalue_seq.nextval as metadata_value_id, +NEXT VALUE FOR metadatavalue_seq as metadata_value_id, collection_id AS resource_id, 3 AS resource_type_id, (select metadata_field_id from metadatafieldregistry where metadata_schema_id=(select metadata_schema_id from metadataschemaregistry where short_id='dc') and element = 'rights' and qualifier is null) AS metadata_field_id, @@ -148,7 +148,7 @@ FROM collection where not copyright_text is null; INSERT INTO metadatavalue (metadata_value_id, resource_id, resource_type_id, metadata_field_id, text_value, text_lang, place) SELECT -metadatavalue_seq.nextval as metadata_value_id, +NEXT VALUE FOR metadatavalue_seq as metadata_value_id, collection_id AS resource_id, 3 AS resource_type_id, (select metadata_field_id from metadatafieldregistry where metadata_schema_id=(select metadata_schema_id from metadataschemaregistry where short_id='dc') and element = 'title' and qualifier is null) AS metadata_field_id, @@ -159,7 +159,7 @@ FROM collection where not name is null; INSERT INTO metadatavalue (metadata_value_id, resource_id, resource_type_id, metadata_field_id, text_value, text_lang, place) SELECT -metadatavalue_seq.nextval as metadata_value_id, +NEXT VALUE FOR metadatavalue_seq as metadata_value_id, collection_id AS resource_id, 3 AS resource_type_id, (select metadata_field_id from metadatafieldregistry where metadata_schema_id=(select metadata_schema_id from metadataschemaregistry where short_id='dc') and element = 'provenance' and qualifier is null) AS metadata_field_id, @@ -170,7 +170,7 @@ FROM collection where not provenance_description is null; INSERT INTO metadatavalue (metadata_value_id, resource_id, resource_type_id, metadata_field_id, text_value, text_lang, place) SELECT -metadatavalue_seq.nextval as metadata_value_id, +NEXT VALUE FOR metadatavalue_seq as metadata_value_id, collection_id AS resource_id, 3 AS resource_type_id, (select metadata_field_id from metadatafieldregistry where metadata_schema_id=(select metadata_schema_id from metadataschemaregistry where short_id='dc') and element = 'rights' and qualifier = 'license') AS metadata_field_id, @@ -194,7 +194,7 @@ alter table collection drop column provenance_description; INSERT INTO metadatavalue (metadata_value_id, resource_id, resource_type_id, metadata_field_id, text_value, text_lang, place) SELECT -metadatavalue_seq.nextval as metadata_value_id, +NEXT VALUE FOR metadatavalue_seq as metadata_value_id, bundle_id AS resource_id, 1 AS resource_type_id, (select metadata_field_id from metadatafieldregistry where metadata_schema_id=(select metadata_schema_id from metadataschemaregistry where short_id='dc') and element = 'title' and qualifier is null) AS metadata_field_id, @@ -214,7 +214,7 @@ alter table bundle drop column name; INSERT INTO metadatavalue (metadata_value_id, resource_id, resource_type_id, metadata_field_id, text_value, text_lang, place) SELECT -metadatavalue_seq.nextval as metadata_value_id, +NEXT VALUE FOR metadatavalue_seq as metadata_value_id, bitstream_id AS resource_id, 0 AS resource_type_id, (select metadata_field_id from metadatafieldregistry where metadata_schema_id=(select metadata_schema_id from metadataschemaregistry where short_id='dc') and element = 'title' and qualifier is null) AS metadata_field_id, @@ -225,7 +225,7 @@ FROM bitstream where not name is null; INSERT INTO metadatavalue (metadata_value_id, resource_id, resource_type_id, metadata_field_id, text_value, text_lang, place) SELECT -metadatavalue_seq.nextval as metadata_value_id, +NEXT VALUE FOR metadatavalue_seq as metadata_value_id, bitstream_id AS resource_id, 0 AS resource_type_id, (select metadata_field_id from metadatafieldregistry where metadata_schema_id=(select metadata_schema_id from metadataschemaregistry where short_id='dc') and element = 'description' and qualifier is null) AS metadata_field_id, @@ -236,7 +236,7 @@ FROM bitstream where not description is null; INSERT INTO metadatavalue (metadata_value_id, resource_id, resource_type_id, metadata_field_id, text_value, text_lang, place) SELECT -metadatavalue_seq.nextval as metadata_value_id, +NEXT VALUE FOR metadatavalue_seq as metadata_value_id, bitstream_id AS resource_id, 0 AS resource_type_id, (select metadata_field_id from metadatafieldregistry where metadata_schema_id=(select metadata_schema_id from metadataschemaregistry where short_id='dc') and element = 'format' and qualifier is null) AS metadata_field_id, @@ -247,7 +247,7 @@ FROM bitstream where not user_format_description is null; INSERT INTO metadatavalue (metadata_value_id, resource_id, resource_type_id, metadata_field_id, text_value, text_lang, place) SELECT -metadatavalue_seq.nextval as metadata_value_id, +NEXT VALUE FOR metadatavalue_seq as metadata_value_id, bitstream_id AS resource_id, 0 AS resource_type_id, (select metadata_field_id from metadatafieldregistry where metadata_schema_id=(select metadata_schema_id from metadataschemaregistry where short_id='dc') and element = 'source' and qualifier is null) AS metadata_field_id, @@ -269,7 +269,7 @@ alter table bitstream drop column source; INSERT INTO metadatavalue (metadata_value_id, resource_id, resource_type_id, metadata_field_id, text_value, text_lang, place) SELECT -metadatavalue_seq.nextval as metadata_value_id, +NEXT VALUE FOR metadatavalue_seq as metadata_value_id, eperson_group_id AS resource_id, 6 AS resource_type_id, (select metadata_field_id from metadatafieldregistry where metadata_schema_id=(select metadata_schema_id from metadataschemaregistry where short_id='dc') and element = 'title' and qualifier is null) AS metadata_field_id, @@ -288,7 +288,7 @@ alter table epersongroup drop column name; INSERT INTO metadatavalue (metadata_value_id, resource_id, resource_type_id, metadata_field_id, text_value, text_lang, place) SELECT -metadatavalue_seq.nextval as metadata_value_id, +NEXT VALUE FOR metadatavalue_seq as metadata_value_id, eperson_id AS resource_id, 7 AS resource_type_id, (select metadata_field_id from metadatafieldregistry where metadata_schema_id=(select metadata_schema_id from metadataschemaregistry where short_id='eperson') and element = 'firstname' and qualifier is null) AS metadata_field_id, @@ -299,7 +299,7 @@ FROM eperson where not firstname is null; INSERT INTO metadatavalue (metadata_value_id, resource_id, resource_type_id, metadata_field_id, text_value, text_lang, place) SELECT -metadatavalue_seq.nextval as metadata_value_id, +NEXT VALUE FOR metadatavalue_seq as metadata_value_id, eperson_id AS resource_id, 7 AS resource_type_id, (select metadata_field_id from metadatafieldregistry where metadata_schema_id=(select metadata_schema_id from metadataschemaregistry where short_id='eperson') and element = 'lastname' and qualifier is null) AS metadata_field_id, @@ -310,7 +310,7 @@ FROM eperson where not lastname is null; INSERT INTO metadatavalue (metadata_value_id, resource_id, resource_type_id, metadata_field_id, text_value, text_lang, place) SELECT -metadatavalue_seq.nextval as metadata_value_id, +NEXT VALUE FOR metadatavalue_seq as metadata_value_id, eperson_id AS resource_id, 7 AS resource_type_id, (select metadata_field_id from metadatafieldregistry where metadata_schema_id=(select metadata_schema_id from metadataschemaregistry where short_id='eperson') and element = 'phone' and qualifier is null) AS metadata_field_id, @@ -321,7 +321,7 @@ FROM eperson where not phone is null; INSERT INTO metadatavalue (metadata_value_id, resource_id, resource_type_id, metadata_field_id, text_value, text_lang, place) SELECT -metadatavalue_seq.nextval as metadata_value_id, +NEXT VALUE FOR metadatavalue_seq as metadata_value_id, eperson_id AS resource_id, 7 AS resource_type_id, (select metadata_field_id from metadatafieldregistry where metadata_schema_id=(select metadata_schema_id from metadataschemaregistry where short_id='eperson') and element = 'language' and qualifier is null) AS metadata_field_id, diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/h2/V5.6_2016.08.23__DS-3097.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/h2/V5.6_2016.08.23__DS-3097.sql index 2e09b807de..0bd68c5201 100644 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/h2/V5.6_2016.08.23__DS-3097.sql +++ b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/h2/V5.6_2016.08.23__DS-3097.sql @@ -14,11 +14,11 @@ UPDATE resourcepolicy SET action_id = 12 where action_id = 0 and resource_type_i SELECT bundle2bitstream.bitstream_id FROM bundle2bitstream LEFT JOIN item2bundle ON bundle2bitstream.bundle_id = item2bundle.bundle_id LEFT JOIN item ON item2bundle.item_id = item.item_id - WHERE item.withdrawn = 1 + WHERE item.withdrawn = true ); UPDATE resourcepolicy SET action_id = 12 where action_id = 0 and resource_type_id = 1 and resource_id in ( SELECT item2bundle.bundle_id FROM item2bundle LEFT JOIN item ON item2bundle.item_id = item.item_id - WHERE item.withdrawn = 1 + WHERE item.withdrawn = true ); \ No newline at end of file diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/h2/V6.0_2016.04.14__DS-3125-fix-bundle-bitstream-delete-rights.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/h2/V6.0_2016.04.14__DS-3125-fix-bundle-bitstream-delete-rights.sql index 1c98ceef2a..1ee23246ea 100644 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/h2/V6.0_2016.04.14__DS-3125-fix-bundle-bitstream-delete-rights.sql +++ b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/h2/V6.0_2016.04.14__DS-3125-fix-bundle-bitstream-delete-rights.sql @@ -17,7 +17,7 @@ INSERT INTO resourcepolicy (policy_id, resource_type_id, resource_id, action_id, start_date, end_date, rpname, rptype, rpdescription, eperson_id, epersongroup_id, dspace_object) SELECT -resourcepolicy_seq.nextval AS policy_id, +NEXT VALUE FOR resourcepolicy_seq AS policy_id, resource_type_id, resource_id, -- Insert the Constants.DELETE action diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/h2/V6.0_2016.08.23__DS-3097.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/h2/V6.0_2016.08.23__DS-3097.sql index e1220c8c7c..5bb59970c5 100644 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/h2/V6.0_2016.08.23__DS-3097.sql +++ b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/h2/V6.0_2016.08.23__DS-3097.sql @@ -14,11 +14,11 @@ UPDATE resourcepolicy SET action_id = 12 where action_id = 0 and dspace_object i SELECT bundle2bitstream.bitstream_id FROM bundle2bitstream LEFT JOIN item2bundle ON bundle2bitstream.bundle_id = item2bundle.bundle_id LEFT JOIN item ON item2bundle.item_id = item.uuid - WHERE item.withdrawn = 1 + WHERE item.withdrawn = true ); UPDATE resourcepolicy SET action_id = 12 where action_id = 0 and dspace_object in ( SELECT item2bundle.bundle_id FROM item2bundle LEFT JOIN item ON item2bundle.item_id = item.uuid - WHERE item.withdrawn = 1 + WHERE item.withdrawn = true ); \ No newline at end of file diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/h2/V7.1_2021.10.18__Fix_MDV_place_after_migrating_from_DSpace_5.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/h2/V7.1_2021.10.18__Fix_MDV_place_after_migrating_from_DSpace_5.sql index 3b649a321c..7506433cdd 100644 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/h2/V7.1_2021.10.18__Fix_MDV_place_after_migrating_from_DSpace_5.sql +++ b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/h2/V7.1_2021.10.18__Fix_MDV_place_after_migrating_from_DSpace_5.sql @@ -9,10 +9,11 @@ ---------------------------------------------------- -- Make sure the metadatavalue.place column starts at 0 instead of 1 ---------------------------------------------------- + CREATE LOCAL TEMPORARY TABLE mdv_minplace ( dspace_object_id UUID NOT NULL, metadata_field_id INT NOT NULL, - minplace INT NOT NULL, + minplace INT NOT NULL ); INSERT INTO mdv_minplace diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/h2/V7.3_2022.04.29__orcid_queue_and_history.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/h2/V7.3_2022.04.29__orcid_queue_and_history.sql new file mode 100644 index 0000000000..e76926480a --- /dev/null +++ b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/h2/V7.3_2022.04.29__orcid_queue_and_history.sql @@ -0,0 +1,54 @@ +-- +-- The contents of this file are subject to the license and copyright +-- detailed in the LICENSE and NOTICE files at the root of the source +-- tree and available online at +-- +-- http://www.dspace.org/license/ +-- + +----------------------------------------------------------------------------------- +-- Create tables for ORCID Queue and History +----------------------------------------------------------------------------------- + +CREATE SEQUENCE orcid_queue_id_seq; + +CREATE TABLE orcid_queue +( + id INTEGER NOT NULL, + owner_id UUID NOT NULL, + entity_id UUID, + put_code VARCHAR(255), + record_type VARCHAR(255), + description VARCHAR(255), + operation VARCHAR(255), + metadata CLOB, + attempts INTEGER, + CONSTRAINT orcid_queue_pkey PRIMARY KEY (id), + CONSTRAINT orcid_queue_owner_id_fkey FOREIGN KEY (owner_id) REFERENCES item (uuid), + CONSTRAINT orcid_queue_entity_id_fkey FOREIGN KEY (entity_id) REFERENCES item (uuid) +); + +CREATE INDEX orcid_queue_owner_id_index on orcid_queue(owner_id); + + +CREATE SEQUENCE orcid_history_id_seq; + +CREATE TABLE orcid_history +( + id INTEGER NOT NULL, + owner_id UUID NOT NULL, + entity_id UUID, + put_code VARCHAR(255), + timestamp_last_attempt TIMESTAMP, + response_message CLOB, + status INTEGER, + metadata CLOB, + operation VARCHAR(255), + record_type VARCHAR(255), + description VARCHAR(255), + CONSTRAINT orcid_history_pkey PRIMARY KEY (id), + CONSTRAINT orcid_history_owner_id_fkey FOREIGN KEY (owner_id) REFERENCES item (uuid), + CONSTRAINT orcid_history_entity_id_fkey FOREIGN KEY (entity_id) REFERENCES item (uuid) +); + +CREATE INDEX orcid_history_owner_id_index on orcid_history(owner_id); \ No newline at end of file diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/h2/V7.3_2022.05.16__Orcid_token_table.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/h2/V7.3_2022.05.16__Orcid_token_table.sql new file mode 100644 index 0000000000..8bda3a8acd --- /dev/null +++ b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/h2/V7.3_2022.05.16__Orcid_token_table.sql @@ -0,0 +1,24 @@ +-- +-- The contents of this file are subject to the license and copyright +-- detailed in the LICENSE and NOTICE files at the root of the source +-- tree and available online at +-- +-- http://www.dspace.org/license/ +-- + +----------------------------------------------------------------------------------- +-- Create table for ORCID access tokens +----------------------------------------------------------------------------------- + +CREATE SEQUENCE orcid_token_id_seq; + +CREATE TABLE orcid_token +( + id INTEGER NOT NULL, + eperson_id UUID NOT NULL UNIQUE, + profile_item_id UUID, + access_token VARCHAR(100) NOT NULL, + CONSTRAINT orcid_token_pkey PRIMARY KEY (id), + CONSTRAINT orcid_token_eperson_id_fkey FOREIGN KEY (eperson_id) REFERENCES eperson (uuid), + CONSTRAINT orcid_token_profile_item_id_fkey FOREIGN KEY (profile_item_id) REFERENCES item (uuid) +); diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/h2/V7.3_2022.06.16__process_to_group.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/h2/V7.3_2022.06.16__process_to_group.sql new file mode 100644 index 0000000000..0e7d417ae5 --- /dev/null +++ b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/h2/V7.3_2022.06.16__process_to_group.sql @@ -0,0 +1,18 @@ +-- +-- The contents of this file are subject to the license and copyright +-- detailed in the LICENSE and NOTICE files at the root of the source +-- tree and available online at +-- +-- http://www.dspace.org/license/ +-- + +------------------------------------------------------------------------------- +-- Table to store Groups related to a Process on its creation +------------------------------------------------------------------------------- + +CREATE TABLE Process2Group +( + process_id INTEGER REFERENCES Process(process_id), + group_id UUID REFERENCES epersongroup (uuid) ON DELETE CASCADE, + CONSTRAINT PK_Process2Group PRIMARY KEY (process_id, group_id) +); \ No newline at end of file diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/h2/V7.3_2022.06.20__add_last_version_status_column_to_relationship_table.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/h2/V7.3_2022.06.20__add_last_version_status_column_to_relationship_table.sql new file mode 100644 index 0000000000..7bf3948d3a --- /dev/null +++ b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/h2/V7.3_2022.06.20__add_last_version_status_column_to_relationship_table.sql @@ -0,0 +1,10 @@ +-- +-- The contents of this file are subject to the license and copyright +-- detailed in the LICENSE and NOTICE files at the root of the source +-- tree and available online at +-- +-- http://www.dspace.org/license/ +-- + +-- NOTE: default 0 ensures that existing relations have "latest_version_status" set to "both" (first constant in enum, see Relationship class) +ALTER TABLE relationship ADD COLUMN IF NOT EXISTS latest_version_status INTEGER DEFAULT 0 NOT NULL; diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/h2/V7.5_2022.12.06__index_action_resource_policy.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/h2/V7.5_2022.12.06__index_action_resource_policy.sql new file mode 100644 index 0000000000..696e84433d --- /dev/null +++ b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/h2/V7.5_2022.12.06__index_action_resource_policy.sql @@ -0,0 +1,9 @@ +-- +-- The contents of this file are subject to the license and copyright +-- detailed in the LICENSE and NOTICE files at the root of the source +-- tree and available online at +-- +-- http://www.dspace.org/license/ +-- + +CREATE INDEX resourcepolicy_action_idx ON resourcepolicy(action_id); diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/README.md b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/README.md index 229b70ec37..6cef123859 100644 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/README.md +++ b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/README.md @@ -1,5 +1,10 @@ # Oracle Flyway Database Migrations (i.e. Upgrades) +--- +WARNING: Oracle Support is deprecated. +See https://github.com/DSpace/DSpace/issues/8214 +--- + The SQL scripts in this directory are Oracle-specific database migrations. They are used to automatically upgrade your DSpace database using [Flyway](http://flywaydb.org/). As such, these scripts are automatically called by Flyway when the DSpace diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.3_2022.04.29__orcid_queue_and_history.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.3_2022.04.29__orcid_queue_and_history.sql new file mode 100644 index 0000000000..3fe424cf6c --- /dev/null +++ b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.3_2022.04.29__orcid_queue_and_history.sql @@ -0,0 +1,54 @@ +-- +-- The contents of this file are subject to the license and copyright +-- detailed in the LICENSE and NOTICE files at the root of the source +-- tree and available online at +-- +-- http://www.dspace.org/license/ +-- + +----------------------------------------------------------------------------------- +-- Create tables for ORCID Queue and History +----------------------------------------------------------------------------------- + +CREATE SEQUENCE orcid_queue_id_seq; + +CREATE TABLE orcid_queue +( + id INTEGER NOT NULL, + owner_id RAW(16) NOT NULL, + entity_id RAW(16), + put_code VARCHAR(255), + record_type VARCHAR(255), + description VARCHAR(255), + operation VARCHAR(255), + metadata CLOB, + attempts INTEGER, + CONSTRAINT orcid_queue_pkey PRIMARY KEY (id), + CONSTRAINT orcid_queue_owner_id_fkey FOREIGN KEY (owner_id) REFERENCES item (uuid), + CONSTRAINT orcid_queue_entity_id_fkey FOREIGN KEY (entity_id) REFERENCES item (uuid) +); + +CREATE INDEX orcid_queue_owner_id_index on orcid_queue(owner_id); + + +CREATE SEQUENCE orcid_history_id_seq; + +CREATE TABLE orcid_history +( + id INTEGER NOT NULL, + owner_id RAW(16) NOT NULL, + entity_id RAW(16), + put_code VARCHAR(255), + timestamp_last_attempt TIMESTAMP, + response_message CLOB, + status INTEGER, + metadata CLOB, + operation VARCHAR(255), + record_type VARCHAR(255), + description VARCHAR(255), + CONSTRAINT orcid_history_pkey PRIMARY KEY (id), + CONSTRAINT orcid_history_owner_id_fkey FOREIGN KEY (owner_id) REFERENCES item (uuid), + CONSTRAINT orcid_history_entity_id_fkey FOREIGN KEY (entity_id) REFERENCES item (uuid) +); + +CREATE INDEX orcid_history_owner_id_index on orcid_history(owner_id); \ No newline at end of file diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.3_2022.05.16__Orcid_token_table.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.3_2022.05.16__Orcid_token_table.sql new file mode 100644 index 0000000000..14bf853143 --- /dev/null +++ b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.3_2022.05.16__Orcid_token_table.sql @@ -0,0 +1,24 @@ +-- +-- The contents of this file are subject to the license and copyright +-- detailed in the LICENSE and NOTICE files at the root of the source +-- tree and available online at +-- +-- http://www.dspace.org/license/ +-- + +----------------------------------------------------------------------------------- +-- Create table for ORCID access tokens +----------------------------------------------------------------------------------- + +CREATE SEQUENCE orcid_token_id_seq; + +CREATE TABLE orcid_token +( + id INTEGER NOT NULL, + eperson_id RAW(16) NOT NULL UNIQUE, + profile_item_id RAW(16), + access_token VARCHAR2(100) NOT NULL, + CONSTRAINT orcid_token_pkey PRIMARY KEY (id), + CONSTRAINT orcid_token_eperson_id_fkey FOREIGN KEY (eperson_id) REFERENCES eperson (uuid), + CONSTRAINT orcid_token_profile_item_id_fkey FOREIGN KEY (profile_item_id) REFERENCES item (uuid) +); diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.3_2022.06.16__process_to_group.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.3_2022.06.16__process_to_group.sql new file mode 100644 index 0000000000..0e7d417ae5 --- /dev/null +++ b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.3_2022.06.16__process_to_group.sql @@ -0,0 +1,18 @@ +-- +-- The contents of this file are subject to the license and copyright +-- detailed in the LICENSE and NOTICE files at the root of the source +-- tree and available online at +-- +-- http://www.dspace.org/license/ +-- + +------------------------------------------------------------------------------- +-- Table to store Groups related to a Process on its creation +------------------------------------------------------------------------------- + +CREATE TABLE Process2Group +( + process_id INTEGER REFERENCES Process(process_id), + group_id UUID REFERENCES epersongroup (uuid) ON DELETE CASCADE, + CONSTRAINT PK_Process2Group PRIMARY KEY (process_id, group_id) +); \ No newline at end of file diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.3_2022.06.20__add_last_version_status_column_to_relationship_table.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.3_2022.06.20__add_last_version_status_column_to_relationship_table.sql new file mode 100644 index 0000000000..3eb9ae6dd4 --- /dev/null +++ b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.3_2022.06.20__add_last_version_status_column_to_relationship_table.sql @@ -0,0 +1,10 @@ +-- +-- The contents of this file are subject to the license and copyright +-- detailed in the LICENSE and NOTICE files at the root of the source +-- tree and available online at +-- +-- http://www.dspace.org/license/ +-- + +-- NOTE: default 0 ensures that existing relations have "latest_version_status" set to "both" (first constant in enum, see Relationship class) +ALTER TABLE relationship ADD latest_version_status INTEGER DEFAULT 0 NOT NULL; diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/postgres/V7.3_2022.04.29__orcid_queue_and_history.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/postgres/V7.3_2022.04.29__orcid_queue_and_history.sql new file mode 100644 index 0000000000..3031602515 --- /dev/null +++ b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/postgres/V7.3_2022.04.29__orcid_queue_and_history.sql @@ -0,0 +1,54 @@ +-- +-- The contents of this file are subject to the license and copyright +-- detailed in the LICENSE and NOTICE files at the root of the source +-- tree and available online at +-- +-- http://www.dspace.org/license/ +-- + +----------------------------------------------------------------------------------- +-- Create tables for ORCID Queue and History +----------------------------------------------------------------------------------- + +CREATE SEQUENCE orcid_queue_id_seq; + +CREATE TABLE orcid_queue +( + id INTEGER NOT NULL, + owner_id uuid NOT NULL, + entity_id uuid, + attempts INTEGER, + put_code CHARACTER VARYING(255), + record_type CHARACTER VARYING(255), + description CHARACTER VARYING(255), + operation CHARACTER VARYING(255), + metadata TEXT, + CONSTRAINT orcid_queue_pkey PRIMARY KEY (id), + CONSTRAINT orcid_queue_owner_id_fkey FOREIGN KEY (owner_id) REFERENCES item (uuid), + CONSTRAINT orcid_queue_entity_id_fkey FOREIGN KEY (entity_id) REFERENCES item (uuid) +); + +CREATE INDEX orcid_queue_owner_id_index on orcid_queue(owner_id); + + +CREATE SEQUENCE orcid_history_id_seq; + +CREATE TABLE orcid_history +( + id INTEGER NOT NULL, + owner_id uuid NOT NULL, + entity_id uuid, + put_code CHARACTER VARYING(255), + timestamp_last_attempt TIMESTAMP, + response_message text, + status INTEGER, + metadata TEXT, + operation CHARACTER VARYING(255), + record_type CHARACTER VARYING(255), + description CHARACTER VARYING(255), + CONSTRAINT orcid_history_pkey PRIMARY KEY (id), + CONSTRAINT orcid_history_owner_id_fkey FOREIGN KEY (owner_id) REFERENCES item (uuid), + CONSTRAINT orcid_history_entity_id_fkey FOREIGN KEY (entity_id) REFERENCES item (uuid) +); + +CREATE INDEX orcid_history_owner_id_index on orcid_history(owner_id); \ No newline at end of file diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/postgres/V7.3_2022.05.16__Orcid_token_table.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/postgres/V7.3_2022.05.16__Orcid_token_table.sql new file mode 100644 index 0000000000..6c3793d422 --- /dev/null +++ b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/postgres/V7.3_2022.05.16__Orcid_token_table.sql @@ -0,0 +1,24 @@ +-- +-- The contents of this file are subject to the license and copyright +-- detailed in the LICENSE and NOTICE files at the root of the source +-- tree and available online at +-- +-- http://www.dspace.org/license/ +-- + +----------------------------------------------------------------------------------- +-- Create table for ORCID access tokens +----------------------------------------------------------------------------------- + +CREATE SEQUENCE orcid_token_id_seq; + +CREATE TABLE orcid_token +( + id INTEGER NOT NULL, + eperson_id uuid NOT NULL UNIQUE, + profile_item_id uuid, + access_token VARCHAR(100) NOT NULL, + CONSTRAINT orcid_token_pkey PRIMARY KEY (id), + CONSTRAINT orcid_token_eperson_id_fkey FOREIGN KEY (eperson_id) REFERENCES eperson (uuid), + CONSTRAINT orcid_token_profile_item_id_fkey FOREIGN KEY (profile_item_id) REFERENCES item (uuid) +); diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/postgres/V7.3_2022.06.16__process_to_group.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/postgres/V7.3_2022.06.16__process_to_group.sql new file mode 100644 index 0000000000..0e7d417ae5 --- /dev/null +++ b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/postgres/V7.3_2022.06.16__process_to_group.sql @@ -0,0 +1,18 @@ +-- +-- The contents of this file are subject to the license and copyright +-- detailed in the LICENSE and NOTICE files at the root of the source +-- tree and available online at +-- +-- http://www.dspace.org/license/ +-- + +------------------------------------------------------------------------------- +-- Table to store Groups related to a Process on its creation +------------------------------------------------------------------------------- + +CREATE TABLE Process2Group +( + process_id INTEGER REFERENCES Process(process_id), + group_id UUID REFERENCES epersongroup (uuid) ON DELETE CASCADE, + CONSTRAINT PK_Process2Group PRIMARY KEY (process_id, group_id) +); \ No newline at end of file diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/postgres/V7.3_2022.06.20__add_last_version_status_column_to_relationship_table.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/postgres/V7.3_2022.06.20__add_last_version_status_column_to_relationship_table.sql new file mode 100644 index 0000000000..7bf3948d3a --- /dev/null +++ b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/postgres/V7.3_2022.06.20__add_last_version_status_column_to_relationship_table.sql @@ -0,0 +1,10 @@ +-- +-- The contents of this file are subject to the license and copyright +-- detailed in the LICENSE and NOTICE files at the root of the source +-- tree and available online at +-- +-- http://www.dspace.org/license/ +-- + +-- NOTE: default 0 ensures that existing relations have "latest_version_status" set to "both" (first constant in enum, see Relationship class) +ALTER TABLE relationship ADD COLUMN IF NOT EXISTS latest_version_status INTEGER DEFAULT 0 NOT NULL; diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/postgres/V7.5_2022.12.06__index_action_resource_policy.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/postgres/V7.5_2022.12.06__index_action_resource_policy.sql new file mode 100644 index 0000000000..696e84433d --- /dev/null +++ b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/postgres/V7.5_2022.12.06__index_action_resource_policy.sql @@ -0,0 +1,9 @@ +-- +-- The contents of this file are subject to the license and copyright +-- detailed in the LICENSE and NOTICE files at the root of the source +-- tree and available online at +-- +-- http://www.dspace.org/license/ +-- + +CREATE INDEX resourcepolicy_action_idx ON resourcepolicy(action_id); diff --git a/dspace-api/src/main/resources/spring/spring-dspace-addon-import-services.xml b/dspace-api/src/main/resources/spring/spring-dspace-addon-import-services.xml index 5e69ee9c42..b8f02c46ff 100644 --- a/dspace-api/src/main/resources/spring/spring-dspace-addon-import-services.xml +++ b/dspace-api/src/main/resources/spring/spring-dspace-addon-import-services.xml @@ -34,6 +34,14 @@ + + + + + + @@ -43,11 +51,11 @@ class="org.dspace.importer.external.arxiv.metadatamapping.ArXivFieldMapping"> - - + + @@ -56,7 +64,6 @@ - @@ -115,10 +122,81 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + - + \ No newline at end of file diff --git a/dspace-api/src/main/resources/spring/spring-dspace-addon-sherpa-configuration-services.xml b/dspace-api/src/main/resources/spring/spring-dspace-addon-sherpa-configuration-services.xml deleted file mode 100644 index c8197970a9..0000000000 --- a/dspace-api/src/main/resources/spring/spring-dspace-addon-sherpa-configuration-services.xml +++ /dev/null @@ -1,45 +0,0 @@ - - - - - - - - - - - - - dc.identifier.issn - - - - - - - - - diff --git a/dspace-api/src/main/resources/spring/spring-dspace-addon-sherpa-services.xml b/dspace-api/src/main/resources/spring/spring-dspace-addon-sherpa-services.xml index 6fe8ddb07b..76891d169c 100644 --- a/dspace-api/src/main/resources/spring/spring-dspace-addon-sherpa-services.xml +++ b/dspace-api/src/main/resources/spring/spring-dspace-addon-sherpa-services.xml @@ -25,10 +25,38 @@ - + + + + + + + + dc.identifier.issn + + + + + + + + + + + + diff --git a/dspace-api/src/test/data/dspaceFolder/config/item-submission.xml b/dspace-api/src/test/data/dspaceFolder/config/item-submission.xml index a8165dd5d4..f40298db30 100644 --- a/dspace-api/src/test/data/dspaceFolder/config/item-submission.xml +++ b/dspace-api/src/test/data/dspaceFolder/config/item-submission.xml @@ -21,6 +21,7 @@ + @@ -82,6 +83,11 @@ submission-form + + org.dspace.app.rest.submit.step.DescribeStep + submission-form + + submit.progressbar.accessCondition org.dspace.app.rest.submit.step.AccessConditionStep @@ -131,6 +137,12 @@ org.dspace.app.rest.submit.step.DescribeStep submission-form + + + submit.progressbar.sherpapolicy + org.dspace.app.rest.submit.step.SherpaPolicyStep + sherpaPolicy + @@ -166,6 +178,7 @@ + @@ -191,6 +204,10 @@ + + + + diff --git a/dspace-api/src/test/data/dspaceFolder/config/local.cfg b/dspace-api/src/test/data/dspaceFolder/config/local.cfg index 3c19a68e9f..9cc6b7ebea 100644 --- a/dspace-api/src/test/data/dspaceFolder/config/local.cfg +++ b/dspace-api/src/test/data/dspaceFolder/config/local.cfg @@ -70,6 +70,17 @@ mail.server.disabled = true # (Defaults to a dummy/fake prefix of 123456789) handle.prefix = 123456789 +# Whether to enable the DSpace handle resolver endpoints necessary for +# https://github.com/DSpace/Remote-Handle-Resolver +# Defaults to "false" which means these handle resolver endpoints are not available. +handle.remote-resolver.enabled = true + +# Whether to enable the DSpace listhandles resolver that lists all available +# handles for this DSpace installation. +# Defaults to "false" which means is possible to obtain the list of handles +# of this DSpace installation, whenever the `handle.remote-resolver.enabled = true`. +handle.hide.listhandles = false + ##################### # LOGLEVEL SETTINGS # ##################### @@ -84,7 +95,7 @@ loglevel.dspace = INFO # IIIF TEST SETTINGS # ######################## iiif.enabled = true -event.dispatcher.default.consumers = versioning, discovery, eperson, iiif +event.dispatcher.default.consumers = versioning, discovery, eperson, orcidqueue, iiif ########################################### # CUSTOM UNIT / INTEGRATION TEST SETTINGS # @@ -144,3 +155,19 @@ authentication-ip.Student = 6.6.6.6 useProxies = true proxies.trusted.ipranges = 7.7.7.7 proxies.trusted.include_ui_ip = true + +csvexport.dir = dspace-server-webapp/src/test/data/dspaceFolder/exports + +# For the tests we have to disable this health indicator because there isn't a mock server and the calculated status was DOWN +management.health.solrOai.enabled = false + +# Enable researcher profiles and orcid synchronization for tests +researcher-profile.entity-type = Person +orcid.synchronization-enabled = true + +# Configuration settings required for Researcher Profiles +# These settings ensure "dspace.object.owner" field are indexed by Authority Control +choices.plugin.dspace.object.owner = EPersonAuthority +choices.presentation.dspace.object.owner = suggest +authority.controlled.dspace.object.owner = true + diff --git a/dspace-api/src/test/data/dspaceFolder/config/spring/api/access-conditions.xml b/dspace-api/src/test/data/dspaceFolder/config/spring/api/access-conditions.xml index e21a85cca4..450ed3ad0b 100644 --- a/dspace-api/src/test/data/dspaceFolder/config/spring/api/access-conditions.xml +++ b/dspace-api/src/test/data/dspaceFolder/config/spring/api/access-conditions.xml @@ -3,10 +3,9 @@ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://www.springframework.org/schema/beans http://www.springframework.org/schema/beans/spring-beans-2.5.xsd"> - + - - + @@ -18,7 +17,7 @@ - + @@ -31,7 +30,7 @@ - + @@ -43,13 +42,13 @@ - + diff --git a/dspace-api/src/test/data/dspaceFolder/config/spring/api/external-openaire.xml b/dspace-api/src/test/data/dspaceFolder/config/spring/api/external-openaire.xml index e10d04a16f..f1e6c30d13 100644 --- a/dspace-api/src/test/data/dspaceFolder/config/spring/api/external-openaire.xml +++ b/dspace-api/src/test/data/dspaceFolder/config/spring/api/external-openaire.xml @@ -1,6 +1,10 @@ - @@ -15,11 +19,71 @@ init-method="init"> + Project + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/dspace-api/src/test/data/dspaceFolder/config/spring/api/external-services.xml b/dspace-api/src/test/data/dspaceFolder/config/spring/api/external-services.xml index ac163d3581..37e1fb5089 100644 --- a/dspace-api/src/test/data/dspaceFolder/config/spring/api/external-services.xml +++ b/dspace-api/src/test/data/dspaceFolder/config/spring/api/external-services.xml @@ -6,6 +6,8 @@ + + @@ -13,13 +15,7 @@ - - - - - - - + Journal @@ -28,13 +24,7 @@ - - - - - - - + Journal @@ -43,13 +33,7 @@ - - - - - - - + OrgUnit @@ -58,10 +42,10 @@ - - - - + + + + @@ -71,26 +55,52 @@ - + - - - - - - - - - - - + + + + + - xml + Publication + + + + + + + Publication + none + + + + + + + + + + + Publication + + + + + + + + + + + Publication + none + + + - diff --git a/dspace-api/src/test/data/dspaceFolder/config/spring/api/iiif-processing.xml b/dspace-api/src/test/data/dspaceFolder/config/spring/api/iiif-processing.xml new file mode 100644 index 0000000000..ea2c654608 --- /dev/null +++ b/dspace-api/src/test/data/dspaceFolder/config/spring/api/iiif-processing.xml @@ -0,0 +1,12 @@ + + + + + + + + \ No newline at end of file diff --git a/dspace-api/src/test/data/dspaceFolder/config/spring/api/scripts.xml b/dspace-api/src/test/data/dspaceFolder/config/spring/api/scripts.xml index 69524e4f14..140fe0abf5 100644 --- a/dspace-api/src/test/data/dspaceFolder/config/spring/api/scripts.xml +++ b/dspace-api/src/test/data/dspaceFolder/config/spring/api/scripts.xml @@ -22,6 +22,11 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/dspace-api/src/test/data/dspaceFolder/config/spring/api/sherpa.xml b/dspace-api/src/test/data/dspaceFolder/config/spring/api/sherpa.xml index fb9e31b9a0..206326f3db 100644 --- a/dspace-api/src/test/data/dspaceFolder/config/spring/api/sherpa.xml +++ b/dspace-api/src/test/data/dspaceFolder/config/spring/api/sherpa.xml @@ -33,4 +33,18 @@ + + + + + + + + + + + + + diff --git a/dspace-api/src/test/data/dspaceFolder/config/spring/api/solr-services.xml b/dspace-api/src/test/data/dspaceFolder/config/spring/api/solr-services.xml index 5f86c73598..32ab90b2cc 100644 --- a/dspace-api/src/test/data/dspaceFolder/config/spring/api/solr-services.xml +++ b/dspace-api/src/test/data/dspaceFolder/config/spring/api/solr-services.xml @@ -47,5 +47,7 @@ + + diff --git a/dspace-api/src/test/data/dspaceFolder/config/spring/api/spring-dspace-addon-sherpa-services.xml b/dspace-api/src/test/data/dspaceFolder/config/spring/api/spring-dspace-addon-sherpa-services.xml deleted file mode 100644 index adb2340f10..0000000000 --- a/dspace-api/src/test/data/dspaceFolder/config/spring/api/spring-dspace-addon-sherpa-services.xml +++ /dev/null @@ -1,37 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - diff --git a/dspace-api/src/test/data/dspaceFolder/config/submission-forms.xml b/dspace-api/src/test/data/dspaceFolder/config/submission-forms.xml index 67946788b5..7438fda852 100644 --- a/dspace-api/src/test/data/dspaceFolder/config/submission-forms.xml +++ b/dspace-api/src/test/data/dspaceFolder/config/submission-forms.xml @@ -140,6 +140,7 @@ ispartofseries true + Technical Report series Enter the series and number assigned to this item by your community. @@ -302,6 +303,75 @@ it, please enter the types and the actual numbers or codes. +

    + + + dc + title + + false + + onebox + Enter the main title of the item. + You must enter a main title for this item. + + + + + + dc + date + issued + false + + + date + Please give the date of previous publication or public distribution. + You can leave out the day and/or month if they aren't + applicable. + You must enter at least the year. + + + + + dc + type + + true + + dropdown + Select the type(s) of content of the item. To select more than one value in the list, you may have to hold down the "CTRL" or "Shift" key. + + + + + + + dc + identifier + isbn + true + + Book + onebox + Enter the ISBN of the book. + An ISBN is required. + + + + dc + identifier + isbn + true + + Book chapter + onebox + Enter the ISBN of the book in which this chapter appears. + + + +
    +
    diff --git a/dspace-api/src/test/java/org/dspace/AbstractDSpaceIntegrationTest.java b/dspace-api/src/test/java/org/dspace/AbstractDSpaceIntegrationTest.java index 1abc4e017d..5a5ce8bf6d 100644 --- a/dspace-api/src/test/java/org/dspace/AbstractDSpaceIntegrationTest.java +++ b/dspace-api/src/test/java/org/dspace/AbstractDSpaceIntegrationTest.java @@ -18,6 +18,7 @@ import java.util.TimeZone; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.dspace.builder.AbstractBuilder; +import org.dspace.discovery.SearchUtils; import org.dspace.servicemanager.DSpaceKernelImpl; import org.dspace.servicemanager.DSpaceKernelInit; import org.junit.AfterClass; @@ -104,6 +105,7 @@ public class AbstractDSpaceIntegrationTest { // Unload DSpace services AbstractBuilder.destroy(); + SearchUtils.clearCachedSearchService(); // NOTE: We explicitly do NOT stop/destroy the kernel, as it is cached // in the Spring ApplicationContext. By default, to speed up tests, diff --git a/dspace-api/src/test/java/org/dspace/AbstractIntegrationTestWithDatabase.java b/dspace-api/src/test/java/org/dspace/AbstractIntegrationTestWithDatabase.java index 402947b966..e27fb19a68 100644 --- a/dspace-api/src/test/java/org/dspace/AbstractIntegrationTestWithDatabase.java +++ b/dspace-api/src/test/java/org/dspace/AbstractIntegrationTestWithDatabase.java @@ -15,6 +15,7 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.dspace.app.launcher.ScriptLauncher; import org.dspace.app.scripts.handler.impl.TestDSpaceRunnableHandler; +import org.dspace.authority.AuthoritySearchService; import org.dspace.authority.MockAuthoritySolrServiceImpl; import org.dspace.authorize.AuthorizeException; import org.dspace.builder.AbstractBuilder; @@ -31,8 +32,9 @@ import org.dspace.kernel.ServiceManager; import org.dspace.services.factory.DSpaceServicesFactory; import org.dspace.statistics.MockSolrLoggerServiceImpl; import org.dspace.statistics.MockSolrStatisticsCore; +import org.dspace.statistics.SolrStatisticsCore; import org.dspace.storage.rdbms.DatabaseUtils; -import org.jdom.Document; +import org.jdom2.Document; import org.junit.After; import org.junit.Before; import org.junit.BeforeClass; @@ -183,15 +185,15 @@ public class AbstractIntegrationTestWithDatabase extends AbstractDSpaceIntegrati searchService.reset(); // Clear the statistics core. serviceManager - .getServiceByName(null, MockSolrStatisticsCore.class) + .getServiceByName(SolrStatisticsCore.class.getName(), MockSolrStatisticsCore.class) .reset(); MockSolrLoggerServiceImpl statisticsService = serviceManager - .getServiceByName(null, MockSolrLoggerServiceImpl.class); + .getServiceByName("solrLoggerService", MockSolrLoggerServiceImpl.class); statisticsService.reset(); MockAuthoritySolrServiceImpl authorityService = serviceManager - .getServiceByName(null, MockAuthoritySolrServiceImpl.class); + .getServiceByName(AuthoritySearchService.class.getName(), MockAuthoritySolrServiceImpl.class); authorityService.reset(); // Reload our ConfigurationService (to reset configs to defaults again) diff --git a/dspace-api/src/test/java/org/dspace/access/status/AccessStatusServiceTest.java b/dspace-api/src/test/java/org/dspace/access/status/AccessStatusServiceTest.java new file mode 100644 index 0000000000..87127f9cf8 --- /dev/null +++ b/dspace-api/src/test/java/org/dspace/access/status/AccessStatusServiceTest.java @@ -0,0 +1,126 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.access.status; + +import static org.junit.Assert.assertNotEquals; +import static org.junit.Assert.fail; + +import java.sql.SQLException; + +import org.apache.logging.log4j.Logger; +import org.dspace.AbstractUnitTest; +import org.dspace.access.status.factory.AccessStatusServiceFactory; +import org.dspace.access.status.service.AccessStatusService; +import org.dspace.authorize.AuthorizeException; +import org.dspace.content.Collection; +import org.dspace.content.Community; +import org.dspace.content.Item; +import org.dspace.content.factory.ContentServiceFactory; +import org.dspace.content.service.CollectionService; +import org.dspace.content.service.CommunityService; +import org.dspace.content.service.InstallItemService; +import org.dspace.content.service.ItemService; +import org.dspace.content.service.WorkspaceItemService; +import org.junit.After; +import org.junit.Before; +import org.junit.Test; + +/** + * Unit Tests for access status service + */ +public class AccessStatusServiceTest extends AbstractUnitTest { + + private static final Logger log = org.apache.logging.log4j.LogManager.getLogger(AccessStatusServiceTest.class); + + private Collection collection; + private Community owningCommunity; + private Item item; + + protected CommunityService communityService = + ContentServiceFactory.getInstance().getCommunityService(); + protected CollectionService collectionService = + ContentServiceFactory.getInstance().getCollectionService(); + protected ItemService itemService = + ContentServiceFactory.getInstance().getItemService(); + protected WorkspaceItemService workspaceItemService = + ContentServiceFactory.getInstance().getWorkspaceItemService(); + protected InstallItemService installItemService = + ContentServiceFactory.getInstance().getInstallItemService(); + protected AccessStatusService accessStatusService = + AccessStatusServiceFactory.getInstance().getAccessStatusService(); + + /** + * This method will be run before every test as per @Before. It will + * initialize resources required for the tests. + * + * Other methods can be annotated with @Before here or in subclasses + * but no execution order is guaranteed + */ + @Before + @Override + public void init() { + super.init(); + try { + context.turnOffAuthorisationSystem(); + owningCommunity = communityService.create(null, context); + collection = collectionService.create(context, owningCommunity); + item = installItemService.installItem(context, + workspaceItemService.create(context, collection, true)); + context.restoreAuthSystemState(); + } catch (AuthorizeException ex) { + log.error("Authorization Error in init", ex); + fail("Authorization Error in init: " + ex.getMessage()); + } catch (SQLException ex) { + log.error("SQL Error in init", ex); + fail("SQL Error in init: " + ex.getMessage()); + } + } + + /** + * This method will be run after every test as per @After. It will + * clean resources initialized by the @Before methods. + * + * Other methods can be annotated with @After here or in subclasses + * but no execution order is guaranteed + */ + @After + @Override + public void destroy() { + context.turnOffAuthorisationSystem(); + try { + itemService.delete(context, item); + } catch (Exception e) { + // ignore + } + try { + collectionService.delete(context, collection); + } catch (Exception e) { + // ignore + } + try { + communityService.delete(context, owningCommunity); + } catch (Exception e) { + // ignore + } + context.restoreAuthSystemState(); + item = null; + collection = null; + owningCommunity = null; + try { + super.destroy(); + } catch (Exception e) { + // ignore + } + } + + @Test + public void testGetAccessStatus() throws Exception { + String status = accessStatusService.getAccessStatus(context, item); + assertNotEquals("testGetAccessStatus 0", status, DefaultAccessStatusHelper.UNKNOWN); + } +} diff --git a/dspace-api/src/test/java/org/dspace/access/status/DefaultAccessStatusHelperTest.java b/dspace-api/src/test/java/org/dspace/access/status/DefaultAccessStatusHelperTest.java new file mode 100644 index 0000000000..a41e985deb --- /dev/null +++ b/dspace-api/src/test/java/org/dspace/access/status/DefaultAccessStatusHelperTest.java @@ -0,0 +1,423 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.access.status; + +import static org.hamcrest.CoreMatchers.equalTo; +import static org.hamcrest.MatcherAssert.assertThat; +import static org.junit.Assert.fail; + +import java.io.ByteArrayInputStream; +import java.nio.charset.StandardCharsets; +import java.sql.SQLException; +import java.util.ArrayList; +import java.util.Date; +import java.util.List; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.dspace.AbstractUnitTest; +import org.dspace.authorize.AuthorizeException; +import org.dspace.authorize.ResourcePolicy; +import org.dspace.authorize.factory.AuthorizeServiceFactory; +import org.dspace.authorize.service.ResourcePolicyService; +import org.dspace.content.Bitstream; +import org.dspace.content.Bundle; +import org.dspace.content.Collection; +import org.dspace.content.Community; +import org.dspace.content.Item; +import org.dspace.content.factory.ContentServiceFactory; +import org.dspace.content.service.BitstreamService; +import org.dspace.content.service.BundleService; +import org.dspace.content.service.CollectionService; +import org.dspace.content.service.CommunityService; +import org.dspace.content.service.InstallItemService; +import org.dspace.content.service.ItemService; +import org.dspace.content.service.WorkspaceItemService; +import org.dspace.core.Constants; +import org.dspace.eperson.Group; +import org.dspace.eperson.factory.EPersonServiceFactory; +import org.dspace.eperson.service.GroupService; +import org.joda.time.LocalDate; +import org.junit.After; +import org.junit.Before; +import org.junit.Test; + +public class DefaultAccessStatusHelperTest extends AbstractUnitTest { + + private static final Logger log = LogManager.getLogger(DefaultAccessStatusHelperTest.class); + + private Collection collection; + private Community owningCommunity; + private Item itemWithoutBundle; + private Item itemWithoutBitstream; + private Item itemWithBitstream; + private Item itemWithEmbargo; + private Item itemWithDateRestriction; + private Item itemWithGroupRestriction; + private Item itemWithoutPolicy; + private Item itemWithoutPrimaryBitstream; + private Item itemWithPrimaryAndMultipleBitstreams; + private Item itemWithoutPrimaryAndMultipleBitstreams; + private DefaultAccessStatusHelper helper; + private Date threshold; + + protected CommunityService communityService = + ContentServiceFactory.getInstance().getCommunityService(); + protected CollectionService collectionService = + ContentServiceFactory.getInstance().getCollectionService(); + protected ItemService itemService = + ContentServiceFactory.getInstance().getItemService(); + protected WorkspaceItemService workspaceItemService = + ContentServiceFactory.getInstance().getWorkspaceItemService(); + protected InstallItemService installItemService = + ContentServiceFactory.getInstance().getInstallItemService(); + protected BundleService bundleService = + ContentServiceFactory.getInstance().getBundleService(); + protected BitstreamService bitstreamService = + ContentServiceFactory.getInstance().getBitstreamService(); + protected ResourcePolicyService resourcePolicyService = + AuthorizeServiceFactory.getInstance().getResourcePolicyService(); + protected GroupService groupService = + EPersonServiceFactory.getInstance().getGroupService(); + + /** + * This method will be run before every test as per @Before. It will + * initialize resources required for the tests. + * + * Other methods can be annotated with @Before here or in subclasses + * but no execution order is guaranteed + */ + @Before + @Override + public void init() { + super.init(); + try { + context.turnOffAuthorisationSystem(); + owningCommunity = communityService.create(null, context); + collection = collectionService.create(context, owningCommunity); + itemWithoutBundle = installItemService.installItem(context, + workspaceItemService.create(context, collection, true)); + itemWithoutBitstream = installItemService.installItem(context, + workspaceItemService.create(context, collection, true)); + itemWithBitstream = installItemService.installItem(context, + workspaceItemService.create(context, collection, true)); + itemWithEmbargo = installItemService.installItem(context, + workspaceItemService.create(context, collection, true)); + itemWithDateRestriction = installItemService.installItem(context, + workspaceItemService.create(context, collection, true)); + itemWithGroupRestriction = installItemService.installItem(context, + workspaceItemService.create(context, collection, true)); + itemWithoutPolicy = installItemService.installItem(context, + workspaceItemService.create(context, collection, true)); + itemWithoutPrimaryBitstream = installItemService.installItem(context, + workspaceItemService.create(context, collection, true)); + itemWithPrimaryAndMultipleBitstreams = installItemService.installItem(context, + workspaceItemService.create(context, collection, true)); + itemWithoutPrimaryAndMultipleBitstreams = installItemService.installItem(context, + workspaceItemService.create(context, collection, true)); + context.restoreAuthSystemState(); + } catch (AuthorizeException ex) { + log.error("Authorization Error in init", ex); + fail("Authorization Error in init: " + ex.getMessage()); + } catch (SQLException ex) { + log.error("SQL Error in init", ex); + fail("SQL Error in init: " + ex.getMessage()); + } + helper = new DefaultAccessStatusHelper(); + threshold = new LocalDate(10000, 1, 1).toDate(); + } + + /** + * This method will be run after every test as per @After. It will + * clean resources initialized by the @Before methods. + * + * Other methods can be annotated with @After here or in subclasses + * but no execution order is guaranteed + */ + @After + @Override + public void destroy() { + context.turnOffAuthorisationSystem(); + try { + itemService.delete(context, itemWithoutBundle); + itemService.delete(context, itemWithoutBitstream); + itemService.delete(context, itemWithBitstream); + itemService.delete(context, itemWithEmbargo); + itemService.delete(context, itemWithDateRestriction); + itemService.delete(context, itemWithGroupRestriction); + itemService.delete(context, itemWithoutPolicy); + itemService.delete(context, itemWithoutPrimaryBitstream); + itemService.delete(context, itemWithPrimaryAndMultipleBitstreams); + itemService.delete(context, itemWithoutPrimaryAndMultipleBitstreams); + } catch (Exception e) { + // ignore + } + try { + collectionService.delete(context, collection); + } catch (Exception e) { + // ignore + } + try { + communityService.delete(context, owningCommunity); + } catch (Exception e) { + // ignore + } + context.restoreAuthSystemState(); + itemWithoutBundle = null; + itemWithoutBitstream = null; + itemWithBitstream = null; + itemWithEmbargo = null; + itemWithDateRestriction = null; + itemWithGroupRestriction = null; + itemWithoutPolicy = null; + itemWithoutPrimaryBitstream = null; + itemWithPrimaryAndMultipleBitstreams = null; + itemWithoutPrimaryAndMultipleBitstreams = null; + collection = null; + owningCommunity = null; + helper = null; + threshold = null; + communityService = null; + collectionService = null; + itemService = null; + workspaceItemService = null; + installItemService = null; + bundleService = null; + bitstreamService = null; + resourcePolicyService = null; + groupService = null; + try { + super.destroy(); + } catch (Exception e) { + // ignore + } + } + + /** + * Test for a null item + * @throws java.lang.Exception passed through. + */ + @Test + public void testWithNullItem() throws Exception { + String status = helper.getAccessStatusFromItem(context, null, threshold); + assertThat("testWithNullItem 0", status, equalTo(DefaultAccessStatusHelper.UNKNOWN)); + } + + /** + * Test for an item with no bundle + * @throws java.lang.Exception passed through. + */ + @Test + public void testWithoutBundle() throws Exception { + String status = helper.getAccessStatusFromItem(context, itemWithoutBundle, threshold); + assertThat("testWithoutBundle 0", status, equalTo(DefaultAccessStatusHelper.METADATA_ONLY)); + } + + /** + * Test for an item with no bitstream + * @throws java.lang.Exception passed through. + */ + @Test + public void testWithoutBitstream() throws Exception { + context.turnOffAuthorisationSystem(); + bundleService.create(context, itemWithoutBitstream, Constants.CONTENT_BUNDLE_NAME); + context.restoreAuthSystemState(); + String status = helper.getAccessStatusFromItem(context, itemWithoutBitstream, threshold); + assertThat("testWithoutBitstream 0", status, equalTo(DefaultAccessStatusHelper.METADATA_ONLY)); + } + + /** + * Test for an item with a basic bitstream (open access) + * @throws java.lang.Exception passed through. + */ + @Test + public void testWithBitstream() throws Exception { + context.turnOffAuthorisationSystem(); + Bundle bundle = bundleService.create(context, itemWithBitstream, Constants.CONTENT_BUNDLE_NAME); + Bitstream bitstream = bitstreamService.create(context, bundle, + new ByteArrayInputStream("1".getBytes(StandardCharsets.UTF_8))); + bitstream.setName(context, "primary"); + bundle.setPrimaryBitstreamID(bitstream); + context.restoreAuthSystemState(); + String status = helper.getAccessStatusFromItem(context, itemWithBitstream, threshold); + assertThat("testWithBitstream 0", status, equalTo(DefaultAccessStatusHelper.OPEN_ACCESS)); + } + + /** + * Test for an item with an embargo + * @throws java.lang.Exception passed through. + */ + @Test + public void testWithEmbargo() throws Exception { + context.turnOffAuthorisationSystem(); + Bundle bundle = bundleService.create(context, itemWithEmbargo, Constants.CONTENT_BUNDLE_NAME); + Bitstream bitstream = bitstreamService.create(context, bundle, + new ByteArrayInputStream("1".getBytes(StandardCharsets.UTF_8))); + bitstream.setName(context, "primary"); + bundle.setPrimaryBitstreamID(bitstream); + List policies = new ArrayList<>(); + ResourcePolicy policy = resourcePolicyService.create(context); + policy.setRpName("Embargo"); + Group group = groupService.findByName(context, Group.ANONYMOUS); + policy.setGroup(group); + policy.setAction(Constants.READ); + policy.setStartDate(new LocalDate(9999, 12, 31).toDate()); + policies.add(policy); + authorizeService.removeAllPolicies(context, bitstream); + authorizeService.addPolicies(context, policies, bitstream); + context.restoreAuthSystemState(); + String status = helper.getAccessStatusFromItem(context, itemWithEmbargo, threshold); + assertThat("testWithEmbargo 0", status, equalTo(DefaultAccessStatusHelper.EMBARGO)); + } + + /** + * Test for an item with an anonymous date restriction + * @throws java.lang.Exception passed through. + */ + @Test + public void testWithDateRestriction() throws Exception { + context.turnOffAuthorisationSystem(); + Bundle bundle = bundleService.create(context, itemWithDateRestriction, Constants.CONTENT_BUNDLE_NAME); + Bitstream bitstream = bitstreamService.create(context, bundle, + new ByteArrayInputStream("1".getBytes(StandardCharsets.UTF_8))); + bitstream.setName(context, "primary"); + bundle.setPrimaryBitstreamID(bitstream); + List policies = new ArrayList<>(); + ResourcePolicy policy = resourcePolicyService.create(context); + policy.setRpName("Restriction"); + Group group = groupService.findByName(context, Group.ANONYMOUS); + policy.setGroup(group); + policy.setAction(Constants.READ); + policy.setStartDate(new LocalDate(10000, 1, 1).toDate()); + policies.add(policy); + authorizeService.removeAllPolicies(context, bitstream); + authorizeService.addPolicies(context, policies, bitstream); + context.restoreAuthSystemState(); + String status = helper.getAccessStatusFromItem(context, itemWithDateRestriction, threshold); + assertThat("testWithDateRestriction 0", status, equalTo(DefaultAccessStatusHelper.RESTRICTED)); + } + + /** + * Test for an item with a group restriction + * @throws java.lang.Exception passed through. + */ + @Test + public void testWithGroupRestriction() throws Exception { + context.turnOffAuthorisationSystem(); + Bundle bundle = bundleService.create(context, itemWithGroupRestriction, Constants.CONTENT_BUNDLE_NAME); + Bitstream bitstream = bitstreamService.create(context, bundle, + new ByteArrayInputStream("1".getBytes(StandardCharsets.UTF_8))); + bitstream.setName(context, "primary"); + bundle.setPrimaryBitstreamID(bitstream); + List policies = new ArrayList<>(); + ResourcePolicy policy = resourcePolicyService.create(context); + policy.setRpName("Restriction"); + Group group = groupService.findByName(context, Group.ADMIN); + policy.setGroup(group); + policy.setAction(Constants.READ); + policies.add(policy); + authorizeService.removeAllPolicies(context, bitstream); + authorizeService.addPolicies(context, policies, bitstream); + context.restoreAuthSystemState(); + String status = helper.getAccessStatusFromItem(context, itemWithGroupRestriction, threshold); + assertThat("testWithGroupRestriction 0", status, equalTo(DefaultAccessStatusHelper.RESTRICTED)); + } + + /** + * Test for an item with no policy + * @throws java.lang.Exception passed through. + */ + @Test + public void testWithoutPolicy() throws Exception { + context.turnOffAuthorisationSystem(); + Bundle bundle = bundleService.create(context, itemWithoutPolicy, Constants.CONTENT_BUNDLE_NAME); + Bitstream bitstream = bitstreamService.create(context, bundle, + new ByteArrayInputStream("1".getBytes(StandardCharsets.UTF_8))); + bitstream.setName(context, "primary"); + bundle.setPrimaryBitstreamID(bitstream); + authorizeService.removeAllPolicies(context, bitstream); + context.restoreAuthSystemState(); + String status = helper.getAccessStatusFromItem(context, itemWithoutPolicy, threshold); + assertThat("testWithoutPolicy 0", status, equalTo(DefaultAccessStatusHelper.RESTRICTED)); + } + + /** + * Test for an item with no primary bitstream + * @throws java.lang.Exception passed through. + */ + @Test + public void testWithoutPrimaryBitstream() throws Exception { + context.turnOffAuthorisationSystem(); + Bundle bundle = bundleService.create(context, itemWithoutPrimaryBitstream, Constants.CONTENT_BUNDLE_NAME); + Bitstream bitstream = bitstreamService.create(context, bundle, + new ByteArrayInputStream("1".getBytes(StandardCharsets.UTF_8))); + bitstream.setName(context, "first"); + context.restoreAuthSystemState(); + String status = helper.getAccessStatusFromItem(context, itemWithoutPrimaryBitstream, threshold); + assertThat("testWithoutPrimaryBitstream 0", status, equalTo(DefaultAccessStatusHelper.OPEN_ACCESS)); + } + + /** + * Test for an item with an open access bitstream + * and another primary bitstream on embargo + * @throws java.lang.Exception passed through. + */ + @Test + public void testWithPrimaryAndMultipleBitstreams() throws Exception { + context.turnOffAuthorisationSystem(); + Bundle bundle = bundleService.create(context, itemWithPrimaryAndMultipleBitstreams, + Constants.CONTENT_BUNDLE_NAME); + bitstreamService.create(context, bundle, + new ByteArrayInputStream("1".getBytes(StandardCharsets.UTF_8))); + Bitstream primaryBitstream = bitstreamService.create(context, bundle, + new ByteArrayInputStream("1".getBytes(StandardCharsets.UTF_8))); + bundle.setPrimaryBitstreamID(primaryBitstream); + List policies = new ArrayList<>(); + ResourcePolicy policy = resourcePolicyService.create(context); + policy.setRpName("Embargo"); + Group group = groupService.findByName(context, Group.ANONYMOUS); + policy.setGroup(group); + policy.setAction(Constants.READ); + policy.setStartDate(new LocalDate(9999, 12, 31).toDate()); + policies.add(policy); + authorizeService.removeAllPolicies(context, primaryBitstream); + authorizeService.addPolicies(context, policies, primaryBitstream); + context.restoreAuthSystemState(); + String status = helper.getAccessStatusFromItem(context, itemWithPrimaryAndMultipleBitstreams, threshold); + assertThat("testWithPrimaryAndMultipleBitstreams 0", status, equalTo(DefaultAccessStatusHelper.EMBARGO)); + } + + /** + * Test for an item with an open access bitstream + * and another bitstream on embargo + * @throws java.lang.Exception passed through. + */ + @Test + public void testWithNoPrimaryAndMultipleBitstreams() throws Exception { + context.turnOffAuthorisationSystem(); + Bundle bundle = bundleService.create(context, itemWithoutPrimaryAndMultipleBitstreams, + Constants.CONTENT_BUNDLE_NAME); + bitstreamService.create(context, bundle, + new ByteArrayInputStream("1".getBytes(StandardCharsets.UTF_8))); + Bitstream anotherBitstream = bitstreamService.create(context, bundle, + new ByteArrayInputStream("1".getBytes(StandardCharsets.UTF_8))); + List policies = new ArrayList<>(); + ResourcePolicy policy = resourcePolicyService.create(context); + policy.setRpName("Embargo"); + Group group = groupService.findByName(context, Group.ANONYMOUS); + policy.setGroup(group); + policy.setAction(Constants.READ); + policy.setStartDate(new LocalDate(9999, 12, 31).toDate()); + policies.add(policy); + authorizeService.removeAllPolicies(context, anotherBitstream); + authorizeService.addPolicies(context, policies, anotherBitstream); + context.restoreAuthSystemState(); + String status = helper.getAccessStatusFromItem(context, itemWithoutPrimaryAndMultipleBitstreams, threshold); + assertThat("testWithNoPrimaryAndMultipleBitstreams 0", status, equalTo(DefaultAccessStatusHelper.OPEN_ACCESS)); + } +} diff --git a/dspace-api/src/test/java/org/dspace/administer/ProcessCleanerIT.java b/dspace-api/src/test/java/org/dspace/administer/ProcessCleanerIT.java new file mode 100644 index 0000000000..4676236cfe --- /dev/null +++ b/dspace-api/src/test/java/org/dspace/administer/ProcessCleanerIT.java @@ -0,0 +1,380 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.administer; + +import static org.apache.commons.lang.time.DateUtils.addDays; +import static org.dspace.content.ProcessStatus.COMPLETED; +import static org.dspace.content.ProcessStatus.FAILED; +import static org.dspace.content.ProcessStatus.RUNNING; +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.empty; +import static org.hamcrest.Matchers.hasItem; +import static org.hamcrest.Matchers.hasSize; +import static org.hamcrest.Matchers.notNullValue; +import static org.hamcrest.Matchers.nullValue; + +import java.sql.SQLException; +import java.util.Date; +import java.util.List; + +import org.dspace.AbstractIntegrationTestWithDatabase; +import org.dspace.app.launcher.ScriptLauncher; +import org.dspace.app.scripts.handler.impl.TestDSpaceRunnableHandler; +import org.dspace.builder.ProcessBuilder; +import org.dspace.content.ProcessStatus; +import org.dspace.scripts.Process; +import org.dspace.scripts.factory.ScriptServiceFactory; +import org.dspace.scripts.service.ProcessService; +import org.dspace.services.ConfigurationService; +import org.dspace.services.factory.DSpaceServicesFactory; +import org.junit.Test; + +/** + * Integration tests for {@link ProcessCleaner}. + * + * @author Luca Giamminonni (luca.giamminonni at 4science.it) + * + */ +public class ProcessCleanerIT extends AbstractIntegrationTestWithDatabase { + + private ConfigurationService configurationService = DSpaceServicesFactory.getInstance().getConfigurationService(); + + private ProcessService processService = ScriptServiceFactory.getInstance().getProcessService(); + + @Test + public void testWithoutProcessToDelete() throws Exception { + + Process process_1 = buildProcess(COMPLETED, addDays(new Date(), -2)); + Process process_2 = buildProcess(RUNNING, addDays(new Date(), -1)); + Process process_3 = buildProcess(FAILED, addDays(new Date(), -3)); + + configurationService.setProperty("process-cleaner.days", 5); + + TestDSpaceRunnableHandler testDSpaceRunnableHandler = new TestDSpaceRunnableHandler(); + + String[] args = new String[] { "process-cleaner" }; + ScriptLauncher.handleScript(args, ScriptLauncher.getConfig(kernelImpl), testDSpaceRunnableHandler, kernelImpl); + + assertThat(testDSpaceRunnableHandler.getErrorMessages(), empty()); + assertThat(testDSpaceRunnableHandler.getWarningMessages(), empty()); + + List messages = testDSpaceRunnableHandler.getInfoMessages(); + assertThat(messages, hasSize(3)); + assertThat(messages, hasItem("Searching for processes with status: [COMPLETED]")); + assertThat(messages, hasItem("Found 0 processes to be deleted")); + assertThat(messages, hasItem("Process cleanup completed")); + + assertThat(processService.find(context, process_1.getID()), notNullValue()); + assertThat(processService.find(context, process_2.getID()), notNullValue()); + assertThat(processService.find(context, process_3.getID()), notNullValue()); + + } + + @Test + public void testWithoutSpecifiedStatus() throws Exception { + + Process process_1 = buildProcess(COMPLETED, addDays(new Date(), -2)); + Process process_2 = buildProcess(RUNNING, addDays(new Date(), -1)); + Process process_3 = buildProcess(FAILED, addDays(new Date(), -3)); + Process process_4 = buildProcess(COMPLETED, addDays(new Date(), -6)); + Process process_5 = buildProcess(COMPLETED, addDays(new Date(), -8)); + Process process_6 = buildProcess(RUNNING, addDays(new Date(), -7)); + Process process_7 = buildProcess(FAILED, addDays(new Date(), -8)); + + configurationService.setProperty("process-cleaner.days", 5); + + TestDSpaceRunnableHandler testDSpaceRunnableHandler = new TestDSpaceRunnableHandler(); + + String[] args = new String[] { "process-cleaner" }; + ScriptLauncher.handleScript(args, ScriptLauncher.getConfig(kernelImpl), testDSpaceRunnableHandler, kernelImpl); + + assertThat(testDSpaceRunnableHandler.getErrorMessages(), empty()); + assertThat(testDSpaceRunnableHandler.getWarningMessages(), empty()); + + List messages = testDSpaceRunnableHandler.getInfoMessages(); + assertThat(messages, hasSize(3)); + assertThat(messages, hasItem("Searching for processes with status: [COMPLETED]")); + assertThat(messages, hasItem("Found 2 processes to be deleted")); + assertThat(messages, hasItem("Process cleanup completed")); + + assertThat(processService.find(context, process_1.getID()), notNullValue()); + assertThat(processService.find(context, process_2.getID()), notNullValue()); + assertThat(processService.find(context, process_3.getID()), notNullValue()); + assertThat(processService.find(context, process_4.getID()), nullValue()); + assertThat(processService.find(context, process_5.getID()), nullValue()); + assertThat(processService.find(context, process_6.getID()), notNullValue()); + assertThat(processService.find(context, process_7.getID()), notNullValue()); + + } + + @Test + public void testWithCompletedStatus() throws Exception { + + Process process_1 = buildProcess(COMPLETED, addDays(new Date(), -2)); + Process process_2 = buildProcess(RUNNING, addDays(new Date(), -1)); + Process process_3 = buildProcess(FAILED, addDays(new Date(), -3)); + Process process_4 = buildProcess(COMPLETED, addDays(new Date(), -6)); + Process process_5 = buildProcess(COMPLETED, addDays(new Date(), -8)); + Process process_6 = buildProcess(RUNNING, addDays(new Date(), -7)); + Process process_7 = buildProcess(FAILED, addDays(new Date(), -8)); + + configurationService.setProperty("process-cleaner.days", 5); + + TestDSpaceRunnableHandler testDSpaceRunnableHandler = new TestDSpaceRunnableHandler(); + + String[] args = new String[] { "process-cleaner", "-c" }; + ScriptLauncher.handleScript(args, ScriptLauncher.getConfig(kernelImpl), testDSpaceRunnableHandler, kernelImpl); + + assertThat(testDSpaceRunnableHandler.getErrorMessages(), empty()); + assertThat(testDSpaceRunnableHandler.getWarningMessages(), empty()); + + List messages = testDSpaceRunnableHandler.getInfoMessages(); + assertThat(messages, hasSize(3)); + assertThat(messages, hasItem("Searching for processes with status: [COMPLETED]")); + assertThat(messages, hasItem("Found 2 processes to be deleted")); + assertThat(messages, hasItem("Process cleanup completed")); + + assertThat(processService.find(context, process_1.getID()), notNullValue()); + assertThat(processService.find(context, process_2.getID()), notNullValue()); + assertThat(processService.find(context, process_3.getID()), notNullValue()); + assertThat(processService.find(context, process_4.getID()), nullValue()); + assertThat(processService.find(context, process_5.getID()), nullValue()); + assertThat(processService.find(context, process_6.getID()), notNullValue()); + assertThat(processService.find(context, process_7.getID()), notNullValue()); + + } + + @Test + public void testWithRunningStatus() throws Exception { + + Process process_1 = buildProcess(COMPLETED, addDays(new Date(), -2)); + Process process_2 = buildProcess(RUNNING, addDays(new Date(), -1)); + Process process_3 = buildProcess(FAILED, addDays(new Date(), -3)); + Process process_4 = buildProcess(COMPLETED, addDays(new Date(), -6)); + Process process_5 = buildProcess(COMPLETED, addDays(new Date(), -8)); + Process process_6 = buildProcess(RUNNING, addDays(new Date(), -7)); + Process process_7 = buildProcess(FAILED, addDays(new Date(), -8)); + Process process_8 = buildProcess(RUNNING, addDays(new Date(), -9)); + + configurationService.setProperty("process-cleaner.days", 5); + + TestDSpaceRunnableHandler testDSpaceRunnableHandler = new TestDSpaceRunnableHandler(); + + String[] args = new String[] { "process-cleaner", "-r" }; + ScriptLauncher.handleScript(args, ScriptLauncher.getConfig(kernelImpl), testDSpaceRunnableHandler, kernelImpl); + + assertThat(testDSpaceRunnableHandler.getErrorMessages(), empty()); + assertThat(testDSpaceRunnableHandler.getWarningMessages(), empty()); + + List messages = testDSpaceRunnableHandler.getInfoMessages(); + assertThat(messages, hasSize(3)); + assertThat(messages, hasItem("Searching for processes with status: [RUNNING]")); + assertThat(messages, hasItem("Found 2 processes to be deleted")); + assertThat(messages, hasItem("Process cleanup completed")); + + assertThat(processService.find(context, process_1.getID()), notNullValue()); + assertThat(processService.find(context, process_2.getID()), notNullValue()); + assertThat(processService.find(context, process_3.getID()), notNullValue()); + assertThat(processService.find(context, process_4.getID()), notNullValue()); + assertThat(processService.find(context, process_5.getID()), notNullValue()); + assertThat(processService.find(context, process_6.getID()), nullValue()); + assertThat(processService.find(context, process_7.getID()), notNullValue()); + assertThat(processService.find(context, process_8.getID()), nullValue()); + + } + + @Test + public void testWithFailedStatus() throws Exception { + + Process process_1 = buildProcess(COMPLETED, addDays(new Date(), -2)); + Process process_2 = buildProcess(RUNNING, addDays(new Date(), -1)); + Process process_3 = buildProcess(FAILED, addDays(new Date(), -3)); + Process process_4 = buildProcess(COMPLETED, addDays(new Date(), -6)); + Process process_5 = buildProcess(COMPLETED, addDays(new Date(), -8)); + Process process_6 = buildProcess(RUNNING, addDays(new Date(), -7)); + Process process_7 = buildProcess(FAILED, addDays(new Date(), -8)); + Process process_8 = buildProcess(FAILED, addDays(new Date(), -9)); + + configurationService.setProperty("process-cleaner.days", 5); + + TestDSpaceRunnableHandler testDSpaceRunnableHandler = new TestDSpaceRunnableHandler(); + + String[] args = new String[] { "process-cleaner", "-f" }; + ScriptLauncher.handleScript(args, ScriptLauncher.getConfig(kernelImpl), testDSpaceRunnableHandler, kernelImpl); + + assertThat(testDSpaceRunnableHandler.getErrorMessages(), empty()); + assertThat(testDSpaceRunnableHandler.getWarningMessages(), empty()); + + List messages = testDSpaceRunnableHandler.getInfoMessages(); + assertThat(messages, hasSize(3)); + assertThat(messages, hasItem("Searching for processes with status: [FAILED]")); + assertThat(messages, hasItem("Found 2 processes to be deleted")); + assertThat(messages, hasItem("Process cleanup completed")); + + assertThat(processService.find(context, process_1.getID()), notNullValue()); + assertThat(processService.find(context, process_2.getID()), notNullValue()); + assertThat(processService.find(context, process_3.getID()), notNullValue()); + assertThat(processService.find(context, process_4.getID()), notNullValue()); + assertThat(processService.find(context, process_5.getID()), notNullValue()); + assertThat(processService.find(context, process_6.getID()), notNullValue()); + assertThat(processService.find(context, process_7.getID()), nullValue()); + assertThat(processService.find(context, process_8.getID()), nullValue()); + + } + + @Test + public void testWithCompletedAndFailedStatus() throws Exception { + + Process process_1 = buildProcess(COMPLETED, addDays(new Date(), -2)); + Process process_2 = buildProcess(RUNNING, addDays(new Date(), -1)); + Process process_3 = buildProcess(FAILED, addDays(new Date(), -3)); + Process process_4 = buildProcess(COMPLETED, addDays(new Date(), -6)); + Process process_5 = buildProcess(COMPLETED, addDays(new Date(), -8)); + Process process_6 = buildProcess(RUNNING, addDays(new Date(), -7)); + Process process_7 = buildProcess(FAILED, addDays(new Date(), -8)); + Process process_8 = buildProcess(FAILED, addDays(new Date(), -9)); + + configurationService.setProperty("process-cleaner.days", 5); + + TestDSpaceRunnableHandler testDSpaceRunnableHandler = new TestDSpaceRunnableHandler(); + + String[] args = new String[] { "process-cleaner", "-c", "-f" }; + ScriptLauncher.handleScript(args, ScriptLauncher.getConfig(kernelImpl), testDSpaceRunnableHandler, kernelImpl); + + List messages = testDSpaceRunnableHandler.getInfoMessages(); + assertThat(messages, hasSize(3)); + assertThat(messages, hasItem("Searching for processes with status: [COMPLETED, FAILED]")); + assertThat(messages, hasItem("Found 4 processes to be deleted")); + assertThat(messages, hasItem("Process cleanup completed")); + + assertThat(processService.find(context, process_1.getID()), notNullValue()); + assertThat(processService.find(context, process_2.getID()), notNullValue()); + assertThat(processService.find(context, process_3.getID()), notNullValue()); + assertThat(processService.find(context, process_4.getID()), nullValue()); + assertThat(processService.find(context, process_5.getID()), nullValue()); + assertThat(processService.find(context, process_6.getID()), notNullValue()); + assertThat(processService.find(context, process_7.getID()), nullValue()); + assertThat(processService.find(context, process_8.getID()), nullValue()); + + } + + @Test + public void testWithCompletedAndRunningStatus() throws Exception { + + Process process_1 = buildProcess(COMPLETED, addDays(new Date(), -2)); + Process process_2 = buildProcess(RUNNING, addDays(new Date(), -1)); + Process process_3 = buildProcess(FAILED, addDays(new Date(), -3)); + Process process_4 = buildProcess(COMPLETED, addDays(new Date(), -6)); + Process process_5 = buildProcess(COMPLETED, addDays(new Date(), -8)); + Process process_6 = buildProcess(RUNNING, addDays(new Date(), -7)); + Process process_7 = buildProcess(FAILED, addDays(new Date(), -8)); + Process process_8 = buildProcess(RUNNING, addDays(new Date(), -9)); + + configurationService.setProperty("process-cleaner.days", 5); + + TestDSpaceRunnableHandler testDSpaceRunnableHandler = new TestDSpaceRunnableHandler(); + + String[] args = new String[] { "process-cleaner", "-c", "-r" }; + ScriptLauncher.handleScript(args, ScriptLauncher.getConfig(kernelImpl), testDSpaceRunnableHandler, kernelImpl); + + List messages = testDSpaceRunnableHandler.getInfoMessages(); + assertThat(messages, hasSize(3)); + assertThat(messages, hasItem("Searching for processes with status: [COMPLETED, RUNNING]")); + assertThat(messages, hasItem("Found 4 processes to be deleted")); + assertThat(messages, hasItem("Process cleanup completed")); + + assertThat(processService.find(context, process_1.getID()), notNullValue()); + assertThat(processService.find(context, process_2.getID()), notNullValue()); + assertThat(processService.find(context, process_3.getID()), notNullValue()); + assertThat(processService.find(context, process_4.getID()), nullValue()); + assertThat(processService.find(context, process_5.getID()), nullValue()); + assertThat(processService.find(context, process_6.getID()), nullValue()); + assertThat(processService.find(context, process_7.getID()), notNullValue()); + assertThat(processService.find(context, process_8.getID()), nullValue()); + + } + + @Test + public void testWithFailedAndRunningStatus() throws Exception { + + Process process_1 = buildProcess(COMPLETED, addDays(new Date(), -2)); + Process process_2 = buildProcess(RUNNING, addDays(new Date(), -1)); + Process process_3 = buildProcess(FAILED, addDays(new Date(), -3)); + Process process_4 = buildProcess(COMPLETED, addDays(new Date(), -6)); + Process process_5 = buildProcess(COMPLETED, addDays(new Date(), -8)); + Process process_6 = buildProcess(RUNNING, addDays(new Date(), -7)); + Process process_7 = buildProcess(FAILED, addDays(new Date(), -8)); + Process process_8 = buildProcess(RUNNING, addDays(new Date(), -9)); + + configurationService.setProperty("process-cleaner.days", 5); + + TestDSpaceRunnableHandler testDSpaceRunnableHandler = new TestDSpaceRunnableHandler(); + + String[] args = new String[] { "process-cleaner", "-f", "-r" }; + ScriptLauncher.handleScript(args, ScriptLauncher.getConfig(kernelImpl), testDSpaceRunnableHandler, kernelImpl); + + List messages = testDSpaceRunnableHandler.getInfoMessages(); + assertThat(messages, hasSize(3)); + assertThat(messages, hasItem("Searching for processes with status: [FAILED, RUNNING]")); + assertThat(messages, hasItem("Found 3 processes to be deleted")); + assertThat(messages, hasItem("Process cleanup completed")); + + assertThat(processService.find(context, process_1.getID()), notNullValue()); + assertThat(processService.find(context, process_2.getID()), notNullValue()); + assertThat(processService.find(context, process_3.getID()), notNullValue()); + assertThat(processService.find(context, process_4.getID()), notNullValue()); + assertThat(processService.find(context, process_5.getID()), notNullValue()); + assertThat(processService.find(context, process_6.getID()), nullValue()); + assertThat(processService.find(context, process_7.getID()), nullValue()); + assertThat(processService.find(context, process_8.getID()), nullValue()); + + } + + @Test + public void testWithCompletedFailedAndRunningStatus() throws Exception { + + Process process_1 = buildProcess(COMPLETED, addDays(new Date(), -2)); + Process process_2 = buildProcess(RUNNING, addDays(new Date(), -1)); + Process process_3 = buildProcess(FAILED, addDays(new Date(), -3)); + Process process_4 = buildProcess(COMPLETED, addDays(new Date(), -6)); + Process process_5 = buildProcess(COMPLETED, addDays(new Date(), -8)); + Process process_6 = buildProcess(RUNNING, addDays(new Date(), -7)); + Process process_7 = buildProcess(FAILED, addDays(new Date(), -8)); + Process process_8 = buildProcess(RUNNING, addDays(new Date(), -9)); + + configurationService.setProperty("process-cleaner.days", 5); + + TestDSpaceRunnableHandler testDSpaceRunnableHandler = new TestDSpaceRunnableHandler(); + + String[] args = new String[] { "process-cleaner", "-f", "-r", "-c" }; + ScriptLauncher.handleScript(args, ScriptLauncher.getConfig(kernelImpl), testDSpaceRunnableHandler, kernelImpl); + + List messages = testDSpaceRunnableHandler.getInfoMessages(); + assertThat(messages, hasSize(3)); + assertThat(messages, hasItem("Searching for processes with status: [COMPLETED, FAILED, RUNNING]")); + assertThat(messages, hasItem("Found 5 processes to be deleted")); + assertThat(messages, hasItem("Process cleanup completed")); + + assertThat(processService.find(context, process_1.getID()), notNullValue()); + assertThat(processService.find(context, process_2.getID()), notNullValue()); + assertThat(processService.find(context, process_3.getID()), notNullValue()); + assertThat(processService.find(context, process_4.getID()), nullValue()); + assertThat(processService.find(context, process_5.getID()), nullValue()); + assertThat(processService.find(context, process_6.getID()), nullValue()); + assertThat(processService.find(context, process_7.getID()), nullValue()); + assertThat(processService.find(context, process_8.getID()), nullValue()); + + } + + private Process buildProcess(ProcessStatus processStatus, Date creationTime) throws SQLException { + return ProcessBuilder.createProcess(context, admin, "test", List.of()) + .withProcessStatus(processStatus) + .withCreationTime(creationTime) + .build(); + } +} diff --git a/dspace-api/src/test/java/org/dspace/administer/StructBuilderIT.java b/dspace-api/src/test/java/org/dspace/administer/StructBuilderIT.java index 7abe3618ed..63340698ac 100644 --- a/dspace-api/src/test/java/org/dspace/administer/StructBuilderIT.java +++ b/dspace-api/src/test/java/org/dspace/administer/StructBuilderIT.java @@ -8,6 +8,7 @@ package org.dspace.administer; import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertTrue; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; @@ -18,9 +19,10 @@ import java.sql.SQLException; import java.util.Iterator; import javax.xml.parsers.ParserConfigurationException; import javax.xml.transform.Source; -import javax.xml.transform.TransformerException; import javax.xml.transform.stream.StreamSource; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; import org.dspace.AbstractIntegrationTest; import org.dspace.authorize.AuthorizeException; import org.dspace.content.Collection; @@ -29,13 +31,11 @@ import org.dspace.content.MetadataSchemaEnum; import org.dspace.content.factory.ContentServiceFactory; import org.dspace.content.service.CollectionService; import org.dspace.content.service.CommunityService; -import org.junit.After; +import org.dspace.handle.Handle; import org.junit.AfterClass; import org.junit.Before; import org.junit.BeforeClass; import org.junit.Test; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; import org.w3c.dom.Attr; import org.w3c.dom.Node; import org.xml.sax.SAXException; @@ -53,7 +53,7 @@ import org.xmlunit.diff.Difference; */ public class StructBuilderIT extends AbstractIntegrationTest { - private static final Logger log = LoggerFactory.getLogger(StructBuilderIT.class); + private static final Logger log = LogManager.getLogger(); private static final CommunityService communityService = ContentServiceFactory.getInstance().getCommunityService(); @@ -89,27 +89,28 @@ public class StructBuilderIT context.restoreAuthSystemState(); } - @After - public void tearDown() { - } + private static final String COMMUNITY_0_HANDLE = "https://hdl.handle.net/1/1"; + private static final String COMMUNITY_0_0_HANDLE = "https://hdl.handle.net/1/1.1"; + private static final String COLLECTION_0_0_0_HANDLE = "https://hdl.handle.net/1/1.1.1"; + private static final String COLLECTION_0_1_HANDLE = "https://hdl.handle.net/1/1.2"; /** Test structure document. */ private static final String IMPORT_DOCUMENT = "\n" + "\n" + - " \n" + + " \n" + " Top Community 0\n" + " A top level community\n" + " Testing 1 2 3\n" + " 1969\n" + " A sidebar\n" + - " \n" + + " \n" + " Sub Community 0.0\n" + " A sub community\n" + " Live from New York....\n" + " 1957\n" + " Another sidebar\n" + - " \n" + + " \n" + " Collection 0.0.0\n" + " A collection\n" + " Our next guest needs no introduction\n" + @@ -119,7 +120,14 @@ public class StructBuilderIT " Testing\n" + " \n" + " \n" + - " \n" + + " \n" + + " Sub Community 0.1\n" + + " A sub community with no handle\n" + + " Stop me if you've heard this one\n" + + " 2525\n" + + " One more sidebar\n" + + " \n" + + " \n" + " Collection 0.1\n" + " Another collection\n" + " Fourscore and seven years ago\n" + @@ -150,7 +158,7 @@ public class StructBuilderIT * @throws java.lang.Exception passed through. */ @Test - public void testImportStructure() + public void testImportStructureWithoutHandles() throws Exception { System.out.println("importStructure"); @@ -160,11 +168,7 @@ public class StructBuilderIT byte[] inputBytes = IMPORT_DOCUMENT.getBytes(StandardCharsets.UTF_8); context.turnOffAuthorisationSystem(); try (InputStream input = new ByteArrayInputStream(inputBytes);) { - StructBuilder.importStructure(context, input, outputDocument); - } catch (IOException | SQLException - | ParserConfigurationException | TransformerException ex) { - System.err.println(ex.getMessage()); - System.exit(1); + StructBuilder.importStructure(context, input, outputDocument, false); } finally { context.restoreAuthSystemState(); } @@ -180,7 +184,81 @@ public class StructBuilderIT IMPORT_DOCUMENT.getBytes(StandardCharsets.UTF_8))); Diff myDiff = DiffBuilder.compare(reference).withTest(output) .normalizeWhitespace() -// .withNodeFilter(new MyNodeFilter()) + .withAttributeFilter((Attr attr) -> + !attr.getName().equals("identifier")) + .checkForIdentical() + .build(); + + // Was there a difference? + // Always output differences -- one is expected. + ComparisonFormatter formatter = new DefaultComparisonFormatter(); + for (Difference difference : myDiff.getDifferences()) { + System.err.println(difference.toString(formatter)); + } + // Test for *significant* differences. + assertFalse("Output does not match input.", isDifferent(myDiff)); + + // TODO spot-check some objects. + } + + /** + * Test of importStructure method, with given Handles. + * + * @throws java.lang.Exception passed through. + */ + @Test + public void testImportStructureWithHandles() + throws Exception { + System.out.println("importStructure"); + + // Run the method under test and collect its output. + ByteArrayOutputStream outputDocument + = new ByteArrayOutputStream(IMPORT_DOCUMENT.length() * 2 * 2); + byte[] inputBytes = IMPORT_DOCUMENT.getBytes(StandardCharsets.UTF_8); + context.turnOffAuthorisationSystem(); + try (InputStream input = new ByteArrayInputStream(inputBytes);) { + StructBuilder.importStructure(context, input, outputDocument, true); + } finally { + context.restoreAuthSystemState(); + } + + boolean found; + + // Check a chosen Community for the right Handle. + found = false; + for (Community community : communityService.findAllTop(context)) { + for (Handle handle : community.getHandles()) { + if (handle.getHandle().equals(COMMUNITY_0_HANDLE)) { + found = true; + break; + } + } + } + assertTrue("A community should have its specified handle", found); + + // Check a chosen Collection for the right Handle. + found = false; + for (Collection collection : collectionService.findAll(context)) { + for (Handle handle : collection.getHandles()) { + if (handle.getHandle().equals(COLLECTION_0_1_HANDLE)) { + found = true; + break; + } + } + } + assertTrue("A collection should have its specified handle", found); + + // Compare import's output with its input. + // N.B. here we rely on StructBuilder to emit communities and + // collections in the same order as the input document. If that changes, + // we will need a smarter NodeMatcher, probably based on children. + Source output = new StreamSource( + new ByteArrayInputStream(outputDocument.toByteArray())); + Source reference = new StreamSource( + new ByteArrayInputStream( + IMPORT_DOCUMENT.getBytes(StandardCharsets.UTF_8))); + Diff myDiff = DiffBuilder.compare(reference).withTest(output) + .normalizeWhitespace() .withAttributeFilter((Attr attr) -> !attr.getName().equals("identifier")) .checkForIdentical() @@ -236,7 +314,6 @@ public class StructBuilderIT EXPORT_DOCUMENT.getBytes(StandardCharsets.UTF_8))); Diff myDiff = DiffBuilder.compare(reference).withTest(output) .normalizeWhitespace() -// .withNodeFilter(new MyNodeFilter()) .withAttributeFilter((Attr attr) -> !attr.getName().equals("identifier")) .checkForIdentical() @@ -310,23 +387,4 @@ public class StructBuilderIT // There must be at most one difference. return diffIterator.hasNext(); } - - /** - * Reject uninteresting nodes. (currently commented out of tests above) - */ - /*private static class MyNodeFilter implements Predicate { - private static final List dontCare = Arrays.asList( - "description", - "intro", - "copyright", - "sidebar", - "license", - "provenance"); - - @Override - public boolean test(Node node) { - String type = node.getLocalName(); - return ! dontCare.contains(type); - } - }*/ } diff --git a/dspace-api/src/test/java/org/dspace/app/bulkedit/MetadataExportSearchIT.java b/dspace-api/src/test/java/org/dspace/app/bulkedit/MetadataExportSearchIT.java new file mode 100644 index 0000000000..3a972692ef --- /dev/null +++ b/dspace-api/src/test/java/org/dspace/app/bulkedit/MetadataExportSearchIT.java @@ -0,0 +1,253 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ + +package org.dspace.app.bulkedit; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertTrue; + +import java.io.File; +import java.io.IOException; +import java.io.Reader; +import java.nio.charset.Charset; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; + +import com.google.common.io.Files; +import com.opencsv.CSVReader; +import com.opencsv.exceptions.CsvException; +import org.apache.log4j.Logger; +import org.dspace.AbstractIntegrationTestWithDatabase; +import org.dspace.app.launcher.ScriptLauncher; +import org.dspace.app.scripts.handler.impl.TestDSpaceRunnableHandler; +import org.dspace.builder.CollectionBuilder; +import org.dspace.builder.CommunityBuilder; +import org.dspace.builder.ItemBuilder; +import org.dspace.content.Collection; +import org.dspace.content.Community; +import org.dspace.content.Item; +import org.dspace.discovery.DiscoverQuery; +import org.dspace.discovery.SearchService; +import org.dspace.discovery.SearchUtils; +import org.dspace.services.ConfigurationService; +import org.dspace.services.factory.DSpaceServicesFactory; +import org.junit.Before; +import org.junit.Test; + +public class MetadataExportSearchIT extends AbstractIntegrationTestWithDatabase { + + private String subject1 = "subject1"; + private String subject2 = "subject2"; + private int numberItemsSubject1 = 30; + private int numberItemsSubject2 = 2; + private Item[] itemsSubject1 = new Item[numberItemsSubject1]; + private Item[] itemsSubject2 = new Item[numberItemsSubject2]; + private String filename; + private Collection collection; + private Logger logger = Logger.getLogger(MetadataExportSearchIT.class); + private ConfigurationService configurationService = DSpaceServicesFactory.getInstance().getConfigurationService(); + private SearchService searchService; + + @Override + @Before + public void setUp() throws Exception { + super.setUp(); + + searchService = SearchUtils.getSearchService(); + + // dummy search so that the SearchService gets called in a test context first + DiscoverQuery query = new DiscoverQuery(); + query.setMaxResults(0); + searchService.search(context, query); + + context.turnOffAuthorisationSystem(); + Community community = CommunityBuilder.createCommunity(context).build(); + collection = CollectionBuilder.createCollection(context, community).build(); + filename = configurationService.getProperty("dspace.dir") + + testProps.get("test.exportcsv").toString(); + + + for (int i = 0; i < numberItemsSubject1; i++) { + itemsSubject1[i] = ItemBuilder.createItem(context, collection) + .withTitle(String.format("%s item %d", subject1, i)) + .withSubject(subject1) + .withIssueDate("2020-09-" + i) + .build(); + } + + for (int i = 0; i < numberItemsSubject2; i++) { + itemsSubject2[i] = ItemBuilder.createItem(context, collection) + .withTitle(String.format("%s item %d", subject2, i)) + .withSubject(subject2) + .withIssueDate("2021-09-" + i) + .build(); + } + context.restoreAuthSystemState(); + } + + private void checkItemsPresentInFile(String filename, Item[] items) throws IOException, CsvException { + File file = new File(filename); + Reader reader = Files.newReader(file, Charset.defaultCharset()); + CSVReader csvReader = new CSVReader(reader); + + + List lines = csvReader.readAll(); + //length + 1 is because of 1 row extra for the headers + assertEquals(items.length + 1, lines.size()); + + List ids = new ArrayList<>(); + //ignoring the first row as this only contains headers; + logger.debug("checking content of lines"); + for (int i = 1; i < lines.size(); i++) { + logger.debug(String.join(", ", lines.get(i))); + ids.add(lines.get(i)[0]); + } + + for (Item item : items) { + assertTrue(ids.contains(item.getID().toString())); + } + } + + @Test + public void metadateExportSearchQueryTest() throws Exception { + int result = runDSpaceScript("metadata-export-search", "-q", "subject:" + subject1, "-n", filename); + + assertEquals(0, result); + checkItemsPresentInFile(filename, itemsSubject1); + + + result = runDSpaceScript("metadata-export-search", "-q", "subject: " + subject2, "-n", filename); + + assertEquals(0, result); + checkItemsPresentInFile(filename, itemsSubject2); + } + + @Test + public void exportMetadataSearchSpecificContainerTest() throws Exception { + context.turnOffAuthorisationSystem(); + Community community2 = CommunityBuilder.createCommunity(context).build(); + Collection collection2 = CollectionBuilder.createCollection(context, community2).build(); + + int numberItemsDifferentCollection = 15; + Item[] itemsDifferentCollection = new Item[numberItemsDifferentCollection]; + for (int i = 0; i < numberItemsDifferentCollection; i++) { + itemsDifferentCollection[i] = ItemBuilder.createItem(context, collection2) + .withTitle("item different collection " + i) + .withSubject(subject1) + .build(); + } + + //creating some items with a different subject to make sure the query still works + for (int i = 0; i < 5; i++) { + ItemBuilder.createItem(context, collection2) + .withTitle("item different collection, different subject " + i) + .withSubject(subject2) + .build(); + } + context.restoreAuthSystemState(); + + int result = runDSpaceScript( + "metadata-export-search", "-q", "subject: " + subject1, "-s", collection2.getID().toString(), "-n", filename + ); + + assertEquals(0, result); + checkItemsPresentInFile(filename, itemsDifferentCollection); + } + + @Test + public void exportMetadataSearchFilter() throws Exception { + int result = runDSpaceScript("metadata-export-search", "-f", "subject,equals=" + subject1, "-n", filename); + + assertEquals(0, result); + checkItemsPresentInFile(filename, itemsSubject1); + } + + @Test + public void exportMetadataSearchFilterDate() throws Exception { + int result = runDSpaceScript( + "metadata-export-search", "-f", "dateIssued,equals=[2000 TO 2020]", "-n", filename + ); + + assertEquals(0, result); + checkItemsPresentInFile(filename, itemsSubject1); + } + + @Test + public void exportMetadataSearchMultipleFilters() throws Exception { + int result = runDSpaceScript( + "metadata-export-search", "-f", "subject,equals=" + subject1, "-f", + "title,equals=" + String.format("%s item %d", subject1, 0), "-n", filename + ); + + assertEquals(0, result); + Item[] expectedResult = Arrays.copyOfRange(itemsSubject1, 0, 1); + checkItemsPresentInFile(filename, expectedResult); + } + + @Test + public void exportMetadataSearchEqualsFilterTest() + throws Exception { + context.turnOffAuthorisationSystem(); + Item wellBeingItem = ItemBuilder.createItem(context, collection) + .withTitle("test item well-being") + .withSubject("well-being") + .build(); + + ItemBuilder.createItem(context, collection) + .withTitle("test item financial well-being") + .withSubject("financial well-being") + .build(); + + context.restoreAuthSystemState(); + + int result = runDSpaceScript("metadata-export-search", "-f", "subject,equals=well-being", "-n", filename); + + assertEquals(0, result); + Item[] expectedResult = new Item[] {wellBeingItem}; + checkItemsPresentInFile(filename, expectedResult); + } + + @Test + public void exportMetadataSearchInvalidDiscoveryQueryTest() throws Exception { + int result = runDSpaceScript("metadata-export-search", "-q", "blabla", "-n", filename); + + assertEquals(0, result); + Item[] items = {}; + checkItemsPresentInFile(filename, items); + } + + @Test + public void exportMetadataSearchNoResultsTest() throws Exception { + int result = runDSpaceScript( + "metadata-export-search", "-f", "subject,equals=notExistingSubject", "-n", filename + ); + + assertEquals(0, result); + Item[] items = {}; + checkItemsPresentInFile(filename, items); + } + + @Test + public void exportMetadataSearchNonExistinFacetsTest() throws Exception { + TestDSpaceRunnableHandler testDSpaceRunnableHandler = new TestDSpaceRunnableHandler(); + + String[] args = new String[] {"metadata-export-search", "-f", "nonExisting,equals=" + subject1, "-f", + "title,equals=" + String.format("%s item %d", subject1, 0), "-n", filename}; + int result = ScriptLauncher.handleScript( + args, ScriptLauncher.getConfig(kernelImpl), testDSpaceRunnableHandler, kernelImpl + ); + + assertEquals(0, result); // exception should be handled, so the script should finish with 0 + + Exception exception = testDSpaceRunnableHandler.getException(); + assertNotNull(exception); + assertEquals("nonExisting is not a valid search filter", exception.getMessage()); + } +} diff --git a/dspace-api/src/test/java/org/dspace/app/bulkedit/MetadataImportIT.java b/dspace-api/src/test/java/org/dspace/app/bulkedit/MetadataImportIT.java index 1bd7242df0..ac5e1e6ae6 100644 --- a/dspace-api/src/test/java/org/dspace/app/bulkedit/MetadataImportIT.java +++ b/dspace-api/src/test/java/org/dspace/app/bulkedit/MetadataImportIT.java @@ -19,6 +19,7 @@ import java.util.List; import org.apache.commons.cli.ParseException; import org.apache.commons.collections4.IteratorUtils; +import org.apache.commons.lang3.ArrayUtils; import org.apache.commons.lang3.StringUtils; import org.dspace.AbstractIntegrationTestWithDatabase; import org.dspace.app.launcher.ScriptLauncher; @@ -93,10 +94,10 @@ public class MetadataImportIT extends AbstractIntegrationTestWithDatabase { } @Test - public void metadataImportIntoCollectionWithEntityTypeTest() throws Exception { + public void metadataImportIntoCollectionWithEntityTypeWithTemplateEnabledTest() throws Exception { String[] csv = {"id,collection,dc.title,dc.contributor.author", "+," + publicationCollection.getHandle() + ",\"Test Import 1\"," + "\"Donald, SmithImported\""}; - performImportScript(csv); + performImportScript(csv, true); Item importedItem = findItemByName("Test Import 1"); assertTrue(StringUtils.equals(itemService.getMetadata(importedItem, "dc", "contributor", "author", Item.ANY) .get(0).getValue(), "Donald, SmithImported")); @@ -110,6 +111,24 @@ public class MetadataImportIT extends AbstractIntegrationTestWithDatabase { context.restoreAuthSystemState(); } + @Test + public void metadataImportIntoCollectionWithEntityTypeWithTemplateDisabledTest() throws Exception { + String[] csv = {"id,collection,dc.title,dc.contributor.author", + "+," + publicationCollection.getHandle() + ",\"Test Import 1\"," + "\"Donald, SmithImported\""}; + performImportScript(csv, false); + Item importedItem = findItemByName("Test Import 1"); + assertTrue(StringUtils.equals(itemService.getMetadata(importedItem, "dc", "contributor", "author", Item.ANY) + .get(0).getValue(), "Donald, SmithImported")); + assertEquals(0, itemService.getMetadata(importedItem, "dspace", "entity", "type", Item.ANY) + .size()); + eperson = ePersonService.findByEmail(context, eperson.getEmail()); + assertEquals(importedItem.getSubmitter(), eperson); + + context.turnOffAuthorisationSystem(); + itemService.delete(context, itemService.find(context, importedItem.getID())); + context.restoreAuthSystemState(); + } + @Test(expected = ParseException.class) public void metadataImportWithoutEPersonParameterTest() throws IllegalAccessException, InstantiationException, ParseException { @@ -227,12 +246,16 @@ public class MetadataImportIT extends AbstractIntegrationTestWithDatabase { return importedItem; } + public void performImportScript(String[] csv) throws Exception { + performImportScript(csv, false); + } + /** * Import mocked CSVs to test item creation behavior, deleting temporary file afterward. * @param csv content for test file. * @throws java.lang.Exception passed through. */ - public void performImportScript(String[] csv) throws Exception { + public void performImportScript(String[] csv, boolean useTemplate) throws Exception { File csvFile = File.createTempFile("dspace-test-import", "csv"); BufferedWriter out = new BufferedWriter(new OutputStreamWriter(new FileOutputStream(csvFile), "UTF-8")); for (String csvLine : csv) { @@ -243,6 +266,9 @@ public class MetadataImportIT extends AbstractIntegrationTestWithDatabase { String fileLocation = csvFile.getAbsolutePath(); try { String[] args = new String[] {"metadata-import", "-f", fileLocation, "-e", eperson.getEmail(), "-s"}; + if (useTemplate) { + args = ArrayUtils.add(args, "-t"); + } TestDSpaceRunnableHandler testDSpaceRunnableHandler = new TestDSpaceRunnableHandler(); ScriptLauncher .handleScript(args, ScriptLauncher.getConfig(kernelImpl), testDSpaceRunnableHandler, kernelImpl); diff --git a/dspace-api/src/test/java/org/dspace/app/itemexport/ItemExportCLIIT.java b/dspace-api/src/test/java/org/dspace/app/itemexport/ItemExportCLIIT.java new file mode 100644 index 0000000000..6db37bdbcd --- /dev/null +++ b/dspace-api/src/test/java/org/dspace/app/itemexport/ItemExportCLIIT.java @@ -0,0 +1,363 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.itemexport; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertTrue; + +import java.io.File; +import java.io.InputStream; +import java.nio.file.Files; +import java.nio.file.Path; +import java.util.stream.Collectors; + +import org.apache.commons.codec.CharEncoding; +import org.apache.commons.io.IOUtils; +import org.apache.commons.io.file.PathUtils; +import org.apache.commons.lang3.StringUtils; +import org.dspace.AbstractIntegrationTestWithDatabase; +import org.dspace.builder.BitstreamBuilder; +import org.dspace.builder.CollectionBuilder; +import org.dspace.builder.CommunityBuilder; +import org.dspace.builder.ItemBuilder; +import org.dspace.content.Bitstream; +import org.dspace.content.Collection; +import org.dspace.content.Item; +import org.dspace.content.factory.ContentServiceFactory; +import org.dspace.content.service.CollectionService; +import org.dspace.content.service.ItemService; +import org.dspace.services.ConfigurationService; +import org.dspace.services.factory.DSpaceServicesFactory; +import org.junit.After; +import org.junit.Before; +import org.junit.Test; + +/** + * Basic integration testing for the SAF Export feature via CLI {@link ItemExportCLI}. + * https://wiki.lyrasis.org/display/DSDOC7x/Importing+and+Exporting+Items+via+Simple+Archive+Format + * + * @author Francesco Pio Scognamiglio (francescopio.scognamiglio at 4science.com) + */ +public class ItemExportCLIIT extends AbstractIntegrationTestWithDatabase { + + private static final String zipFileName = "saf-export.zip"; + private static final String title = "A Tale of Two Cities"; + private static final String dateIssued = "1990"; + private static final String titleAlternative = "J'aime les Printemps"; + + private ItemService itemService = ContentServiceFactory.getInstance().getItemService(); + private CollectionService collectionService = ContentServiceFactory.getInstance().getCollectionService(); + private ConfigurationService configurationService = DSpaceServicesFactory.getInstance().getConfigurationService(); + private Collection collection; + private Path tempDir; + private Path workDir; + + @Before + @Override + public void setUp() throws Exception { + super.setUp(); + context.turnOffAuthorisationSystem(); + parentCommunity = CommunityBuilder.createCommunity(context) + .withName("Parent Community") + .build(); + collection = CollectionBuilder.createCollection(context, parentCommunity) + .withName("Collection") + .withEntityType("Publication") + .build(); + context.restoreAuthSystemState(); + + tempDir = Files.createTempDirectory("safExportTest"); + File file = new File(configurationService.getProperty("org.dspace.app.itemexport.work.dir")); + if (!file.exists()) { + Files.createDirectory(Path.of(file.getAbsolutePath())); + } + workDir = Path.of(file.getAbsolutePath()); + } + + @After + @Override + public void destroy() throws Exception { + PathUtils.deleteDirectory(tempDir); + for (Path path : Files.list(workDir).collect(Collectors.toList())) { + PathUtils.delete(path); + } + super.destroy(); + } + + @Test + public void exportCollection() throws Exception { + // create items + context.turnOffAuthorisationSystem(); + Item item1 = ItemBuilder.createItem(context, collection) + .withTitle(title) + .withMetadata("dc", "date", "issued", dateIssued) + .withMetadata("dc", "title", "alternative", titleAlternative) + .build(); + Item item2 = ItemBuilder.createItem(context, collection) + .withTitle(title + " 2") + .withMetadata("dc", "date", "issued", dateIssued) + .withMetadata("dc", "title", "alternative", titleAlternative) + .build(); + context.restoreAuthSystemState(); + + String[] args = new String[] { "export", "-t", "COLLECTION", + "-i", collection.getHandle(), "-d", tempDir.toString(), "-n", "1" }; + perfomExportScript(args); + + checkDir(); + } + + @Test + public void exportZipCollection() throws Exception { + // create items + context.turnOffAuthorisationSystem(); + Item item1 = ItemBuilder.createItem(context, collection) + .withTitle(title) + .withMetadata("dc", "date", "issued", dateIssued) + .withMetadata("dc", "title", "alternative", titleAlternative) + .build(); + Item item2 = ItemBuilder.createItem(context, collection) + .withTitle(title + " 2") + .withMetadata("dc", "date", "issued", dateIssued) + .withMetadata("dc", "title", "alternative", titleAlternative) + .build(); + context.restoreAuthSystemState(); + + String[] args = new String[] { "export", "-t", "COLLECTION", + "-i", collection.getHandle(), "-d", tempDir.toString(), "-z", zipFileName, "-n", "1" }; + perfomExportScript(args); + + checkDir(); + checkZip(zipFileName); + } + + @Test + public void exportItemWithMetadataOnly() throws Exception { + // create item + context.turnOffAuthorisationSystem(); + Item item = ItemBuilder.createItem(context, collection) + .withTitle(title) + .withMetadata("dc", "date", "issued", dateIssued) + .withMetadata("dc", "title", "alternative", titleAlternative) + .build(); + context.restoreAuthSystemState(); + + String[] args = new String[] { "export", "-t", "ITEM", + "-i", item.getHandle(), "-d", tempDir.toString(), "-n", "1" }; + perfomExportScript(args); + + checkDir(); + } + + @Test + public void exportItemWithBitstreams() throws Exception { + // create item + context.turnOffAuthorisationSystem(); + Item item = ItemBuilder.createItem(context, collection) + .withTitle(title) + .withMetadata("dc", "date", "issued", dateIssued) + .withMetadata("dc", "title", "alternative", titleAlternative) + .build(); + // create bitstream + String bitstreamContent = "TEST TEST TEST"; + try (InputStream is = IOUtils.toInputStream(bitstreamContent, CharEncoding.UTF_8)) { + Bitstream bitstream = BitstreamBuilder.createBitstream(context, item, is) + .withName("Bitstream") + .withMimeType("text/plain") + .build(); + } + context.restoreAuthSystemState(); + + String[] args = new String[] { "export", "-t", "ITEM", + "-i", item.getHandle(), "-d", tempDir.toString(), "-n", "1" }; + perfomExportScript(args); + + checkDir(); + } + + @Test + public void exportItemWithAnotherMetadataSchema() throws Exception { + // create item + context.turnOffAuthorisationSystem(); + Item item = ItemBuilder.createItem(context, collection) + .withTitle(title) + .withMetadata("dc", "date", "issued", dateIssued) + .withMetadata("dc", "title", "alternative", titleAlternative) + .withMetadata("dcterms", "title", "", title) + .build(); + context.restoreAuthSystemState(); + + String[] args = new String[] { "export", "-t", "ITEM", + "-i", item.getHandle(), "-d", tempDir.toString(), "-n", "1" }; + perfomExportScript(args); + + checkDir(); + } + + @Test + public void exportZipItemWithBitstreams() throws Exception { + // create item + context.turnOffAuthorisationSystem(); + Item item = ItemBuilder.createItem(context, collection) + .withTitle(title) + .withMetadata("dc", "date", "issued", dateIssued) + .withMetadata("dc", "title", "alternative", titleAlternative) + .build(); + // create bitstream + String bitstreamContent = "TEST TEST TEST"; + try (InputStream is = IOUtils.toInputStream(bitstreamContent, CharEncoding.UTF_8)) { + Bitstream bitstream = BitstreamBuilder.createBitstream(context, item, is) + .withName("Bitstream") + .withMimeType("text/plain") + .build(); + } + context.restoreAuthSystemState(); + + String[] args = new String[] { "export", "-t", "ITEM", + "-i", item.getHandle(), "-d", tempDir.toString(), "-z", zipFileName, "-n", "1" }; + perfomExportScript(args); + + checkDir(); + checkZip(zipFileName); + } + + @Test + public void migrateCollection() throws Exception { + // create items + context.turnOffAuthorisationSystem(); + Item item1 = ItemBuilder.createItem(context, collection) + .withTitle(title) + .withMetadata("dc", "date", "issued", dateIssued) + .withMetadata("dc", "title", "alternative", titleAlternative) + .build(); + Item item2 = ItemBuilder.createItem(context, collection) + .withTitle(title + " 2") + .withMetadata("dc", "date", "issued", dateIssued) + .withMetadata("dc", "title", "alternative", titleAlternative) + .build(); + context.restoreAuthSystemState(); + + String[] args = new String[] { "export", "-t", "COLLECTION", + "-i", collection.getHandle(), "-d", tempDir.toString(), "-n", "1", "-m" }; + perfomExportScript(args); + + checkDir(); + checkCollectionMigration(); + checkItemMigration(item1); + checkItemMigration(item2); + } + + @Test + public void migrateItemWithMetadataOnly() throws Exception { + // create item + context.turnOffAuthorisationSystem(); + Item item = ItemBuilder.createItem(context, collection) + .withTitle(title) + .withMetadata("dc", "date", "issued", dateIssued) + .withMetadata("dc", "title", "alternative", titleAlternative) + .build(); + context.restoreAuthSystemState(); + + String[] args = new String[] { "export", "-t", "ITEM", + "-i", item.getHandle(), "-d", tempDir.toString(), "-n", "1", "-m" }; + perfomExportScript(args); + + checkDir(); + checkItemMigration(item); + } + + @Test + public void migrateItemWithBitstreams() throws Exception { + // create item + context.turnOffAuthorisationSystem(); + Item item = ItemBuilder.createItem(context, collection) + .withTitle(title) + .withMetadata("dc", "date", "issued", dateIssued) + .withMetadata("dc", "title", "alternative", titleAlternative) + .build(); + // create bitstream + String bitstreamContent = "TEST TEST TEST"; + try (InputStream is = IOUtils.toInputStream(bitstreamContent, CharEncoding.UTF_8)) { + Bitstream bitstream = BitstreamBuilder.createBitstream(context, item, is) + .withName("Bitstream") + .withMimeType("text/plain") + .build(); + } + context.restoreAuthSystemState(); + + String[] args = new String[] { "export", "-t", "ITEM", + "-i", item.getHandle(), "-d", tempDir.toString(), "-n", "1", "-m" }; + perfomExportScript(args); + + checkDir(); + checkItemMigration(item); + } + + @Test + public void migrateItemWithAnotherMetadataSchema() throws Exception { + // create item + context.turnOffAuthorisationSystem(); + Item item = ItemBuilder.createItem(context, collection) + .withTitle(title) + .withMetadata("dc", "date", "issued", dateIssued) + .withMetadata("dc", "title", "alternative", titleAlternative) + .withMetadata("dcterms", "title", "", title) + .build(); + context.restoreAuthSystemState(); + + String[] args = new String[] { "export", "-t", "ITEM", + "-i", item.getHandle(), "-d", tempDir.toString(), "-n", "1", "-m" }; + perfomExportScript(args); + + checkDir(); + checkItemMigration(item); + } + + /** + * Check created export directory + * @throws Exception + */ + private void checkDir() throws Exception { + assertTrue(Files.list(tempDir).findAny().isPresent()); + } + + /** + * Check created export zip + * @param zipFileName + * @throws Exception + */ + private void checkZip(String zipFileName) throws Exception { + assertEquals(1, + Files.list(tempDir) + .filter(b -> StringUtils.equals(b.getFileName().toString(), zipFileName)) + .count()); + } + + /** + * Check migration of collection + * @throws Exception + */ + private void checkCollectionMigration() throws Exception { + assertNotNull(collectionService.find(context, collection.getID())); + } + + /** + * Check migration of item + * @param item + * @throws Exception + */ + private void checkItemMigration(Item item) throws Exception { + assertNotNull(itemService.find(context, item.getID())); + } + + private void perfomExportScript(String[] args) + throws Exception { + runDSpaceScript(args); + } +} diff --git a/dspace-api/src/test/java/org/dspace/app/itemimport/ItemImportCLIIT.java b/dspace-api/src/test/java/org/dspace/app/itemimport/ItemImportCLIIT.java new file mode 100644 index 0000000000..411e8de4df --- /dev/null +++ b/dspace-api/src/test/java/org/dspace/app/itemimport/ItemImportCLIIT.java @@ -0,0 +1,579 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.itemimport; + +import static org.junit.Assert.assertEquals; + +import java.io.File; +import java.nio.file.Files; +import java.nio.file.Path; +import java.util.Iterator; +import java.util.List; +import java.util.stream.Collectors; + +import org.apache.commons.io.file.PathUtils; +import org.dspace.AbstractIntegrationTestWithDatabase; +import org.dspace.builder.CollectionBuilder; +import org.dspace.builder.CommunityBuilder; +import org.dspace.builder.EntityTypeBuilder; +import org.dspace.builder.ItemBuilder; +import org.dspace.builder.RelationshipTypeBuilder; +import org.dspace.content.Bitstream; +import org.dspace.content.Collection; +import org.dspace.content.EntityType; +import org.dspace.content.Item; +import org.dspace.content.Relationship; +import org.dspace.content.factory.ContentServiceFactory; +import org.dspace.content.service.ItemService; +import org.dspace.content.service.RelationshipService; +import org.dspace.services.ConfigurationService; +import org.dspace.services.factory.DSpaceServicesFactory; +import org.junit.After; +import org.junit.Before; +import org.junit.Test; + +/** + * Basic integration testing for the SAF Import feature via CLI {@link ItemImportCLI}. + * https://wiki.lyrasis.org/display/DSDOC7x/Importing+and+Exporting+Items+via+Simple+Archive+Format + * + * @author Francesco Pio Scognamiglio (francescopio.scognamiglio at 4science.com) + */ +public class ItemImportCLIIT extends AbstractIntegrationTestWithDatabase { + + private static final String ZIP_NAME = "saf.zip"; + private static final String publicationTitle = "A Tale of Two Cities"; + private static final String personTitle = "Person Test"; + + private ItemService itemService = ContentServiceFactory.getInstance().getItemService(); + private RelationshipService relationshipService = ContentServiceFactory.getInstance().getRelationshipService(); + private ConfigurationService configurationService = DSpaceServicesFactory.getInstance().getConfigurationService(); + private Collection collection; + private Path tempDir; + private Path workDir; + + @Before + @Override + public void setUp() throws Exception { + super.setUp(); + context.turnOffAuthorisationSystem(); + parentCommunity = CommunityBuilder.createCommunity(context) + .withName("Parent Community") + .build(); + collection = CollectionBuilder.createCollection(context, parentCommunity) + .withName("Collection") + .withEntityType("Publication") + .build(); + + EntityType publication = EntityTypeBuilder.createEntityTypeBuilder(context, "Publication").build(); + EntityType person = EntityTypeBuilder.createEntityTypeBuilder(context, "Person").build(); + RelationshipTypeBuilder.createRelationshipTypeBuilder( + context, publication, person, "isAuthorOfPublication", + "isPublicationOfAuthor", 0, null, 0, null) + .withCopyToLeft(false).withCopyToRight(true).build(); + context.restoreAuthSystemState(); + + tempDir = Files.createTempDirectory("safImportTest"); + File file = new File(configurationService.getProperty("org.dspace.app.batchitemimport.work.dir")); + if (!file.exists()) { + Files.createDirectory(Path.of(file.getAbsolutePath())); + } + workDir = Path.of(file.getAbsolutePath()); + } + + @After + @Override + public void destroy() throws Exception { + PathUtils.deleteDirectory(tempDir); + for (Path path : Files.list(workDir).collect(Collectors.toList())) { + PathUtils.delete(path); + } + super.destroy(); + } + + @Test + public void importItemBySafWithMetadataOnly() throws Exception { + // create simple SAF + Path safDir = Files.createDirectory(Path.of(tempDir.toString() + "/test")); + Path itemDir = Files.createDirectory(Path.of(safDir.toString() + "/item_000")); + Files.copy(getClass().getResourceAsStream("dublin_core.xml"), + Path.of(itemDir.toString() + "/dublin_core.xml")); + + String[] args = new String[] { "import", "-a", "-e", admin.getEmail(), "-c", collection.getID().toString(), + "-s", safDir.toString(), "-m", tempDir.toString() + "/mapfile.out" }; + perfomImportScript(args); + + checkMetadata(); + } + + @Test + public void importItemBySafWithBitstreams() throws Exception { + // create simple SAF + Path safDir = Files.createDirectory(Path.of(tempDir.toString() + "/test")); + Path itemDir = Files.createDirectory(Path.of(safDir.toString() + "/item_000")); + Files.copy(getClass().getResourceAsStream("dublin_core.xml"), + Path.of(itemDir.toString() + "/dublin_core.xml")); + // add bitstream + Path contentsFile = Files.createFile(Path.of(itemDir.toString() + "/contents")); + Files.writeString(contentsFile, + "file1.txt"); + Path bitstreamFile = Files.createFile(Path.of(itemDir.toString() + "/file1.txt")); + Files.writeString(bitstreamFile, + "TEST TEST TEST"); + + String[] args = new String[] { "import", "-a", "-e", admin.getEmail(), "-c", collection.getID().toString(), + "-s", safDir.toString(), "-m", tempDir.toString() + "/mapfile.out" }; + perfomImportScript(args); + + checkMetadata(); + checkBitstream(); + } + + @Test + public void importItemBySafWithAnotherMetadataSchema() throws Exception { + // create simple SAF + Path safDir = Files.createDirectory(Path.of(tempDir.toString() + "/test")); + Path itemDir = Files.createDirectory(Path.of(safDir.toString() + "/item_000")); + Files.copy(getClass().getResourceAsStream("dublin_core.xml"), + Path.of(itemDir.toString() + "/dublin_core.xml")); + // add metadata with another schema + Files.copy(getClass().getResourceAsStream("metadata_dcterms.xml"), + Path.of(itemDir.toString() + "/metadata_dcterms.xml")); + + String[] args = new String[] { "import", "-a", "-e", admin.getEmail(), "-c", collection.getID().toString(), + "-s", safDir.toString(), "-m", tempDir.toString() + "/mapfile.out" }; + perfomImportScript(args); + + checkMetadata(); + checkMetadataWithAnotherSchema(); + } + + @Test + public void importItemsBySafWithRelationships() throws Exception { + context.turnOffAuthorisationSystem(); + // create collection that contains person + Collection collectionPerson = CollectionBuilder.createCollection(context, parentCommunity) + .withName("Collection Person") + .withEntityType("Person") + .build(); + context.restoreAuthSystemState(); + // create simple SAF + Path safDir = Files.createDirectory(Path.of(tempDir.toString() + "/test")); + Path publicationDir = Files.createDirectory(Path.of(safDir.toString() + "/item_000")); + Files.writeString(Path.of(publicationDir.toString() + "/collections"), + collection.getID().toString()); + Files.copy(getClass().getResourceAsStream("dublin_core.xml"), + Path.of(publicationDir.toString() + "/dublin_core.xml")); + Files.copy(getClass().getResourceAsStream("relationships"), + Path.of(publicationDir.toString() + "/relationships")); + Path personDir = Files.createDirectory(Path.of(safDir.toString() + "/item_001")); + Files.writeString(Path.of(personDir.toString() + "/collections"), + collectionPerson.getID().toString()); + Files.copy(getClass().getResourceAsStream("dublin_core-person.xml"), + Path.of(personDir.toString() + "/dublin_core.xml")); + + String[] args = new String[] { "import", "-a", "-p", "-e", admin.getEmail(), + "-s", safDir.toString(), "-m", tempDir.toString() + "/mapfile.out" }; + perfomImportScript(args); + + checkMetadata(); + checkRelationship(); + } + + @Test + public void importItemsBySafWithRelationshipsByRelationSchema() throws Exception { + context.turnOffAuthorisationSystem(); + // create collection that contains person + Collection collectionPerson = CollectionBuilder.createCollection(context, parentCommunity) + .withName("Collection Person") + .withEntityType("Person") + .build(); + Item person = ItemBuilder.createItem(context, collectionPerson) + .withTitle(personTitle) + .build(); + context.restoreAuthSystemState(); + // create simple SAF + Path safDir = Files.createDirectory(Path.of(tempDir.toString() + "/test")); + Path itemDir = Files.createDirectory(Path.of(safDir.toString() + "/item_000")); + Files.copy(getClass().getResourceAsStream("dublin_core.xml"), + Path.of(itemDir.toString() + "/dublin_core.xml")); + Files.writeString(Path.of(itemDir.toString() + "/metadata_relation.xml"), + "\n" + + " " + person.getID() + "\n" + + ""); + + String[] args = new String[] { "import", "-a", "-p", "-e", admin.getEmail(), "-c", + collection.getID().toString(), "-s", safDir.toString(), "-m", tempDir.toString() + "/mapfile.out" }; + perfomImportScript(args); + + checkRelationship(); + } + + @Test + public void importItemByZipSafWithBitstreams() throws Exception { + // use simple SAF in zip format + Files.copy(getClass().getResourceAsStream("saf-bitstreams.zip"), + Path.of(tempDir.toString() + "/" + ZIP_NAME)); + + String[] args = new String[] { "import", "-a", "-e", admin.getEmail(), "-c", collection.getID().toString(), + "-s", tempDir.toString(), "-z", ZIP_NAME, "-m", tempDir.toString() + "/mapfile.out" }; + perfomImportScript(args); + + checkMetadata(); + checkMetadataWithAnotherSchema(); + checkBitstream(); + } + + @Test + public void importItemByZipSafWithRelationships() throws Exception { + context.turnOffAuthorisationSystem(); + // create collection that contains person + Collection collectionPerson = CollectionBuilder.createCollection(context, parentCommunity) + .withName("Collection Person") + .withEntityType("Person") + .build(); + // create person + Item person = ItemBuilder.createItem(context, collectionPerson) + .withTitle(personTitle) + .build(); + context.restoreAuthSystemState(); + // use simple SAF in zip format + Files.copy(getClass().getResourceAsStream("saf-relationships.zip"), + Path.of(tempDir.toString() + "/" + ZIP_NAME)); + + String[] args = new String[] { "import", "-a", "-p", "-e", admin.getEmail(), + "-c", collection.getID().toString(), "-s", tempDir.toString(), "-z", ZIP_NAME, + "-m", tempDir.toString() + "/mapfile.out" }; + perfomImportScript(args); + + checkMetadata(); + checkRelationship(); + } + + @Test + public void resumeImportItemBySafWithMetadataOnly() throws Exception { + // create simple SAF + Path safDir = Files.createDirectory(Path.of(tempDir.toString() + "/test")); + Path itemDir = Files.createDirectory(Path.of(safDir.toString() + "/item_000")); + Files.copy(getClass().getResourceAsStream("dublin_core.xml"), + Path.of(itemDir.toString() + "/dublin_core.xml")); + // add mapfile + Path mapFile = Files.createFile(Path.of(tempDir.toString() + "/mapfile.out")); + + String[] args = new String[] { "import", "-a", "-R", "-e", admin.getEmail(), + "-c", collection.getID().toString(), "-s", safDir.toString(), + "-m", mapFile.toString() }; + perfomImportScript(args); + + checkMetadata(); + } + + @Test + public void resumeImportItemBySafWithBitstreams() throws Exception { + // create simple SAF + Path safDir = Files.createDirectory(Path.of(tempDir.toString() + "/test")); + Path itemDir = Files.createDirectory(Path.of(safDir.toString() + "/item_000")); + Files.copy(getClass().getResourceAsStream("dublin_core.xml"), + Path.of(itemDir.toString() + "/dublin_core.xml")); + // add bitstream + Path contentsFile = Files.createFile(Path.of(itemDir.toString() + "/contents")); + Files.writeString(contentsFile, + "file1.txt"); + Path bitstreamFile = Files.createFile(Path.of(itemDir.toString() + "/file1.txt")); + Files.writeString(bitstreamFile, + "TEST TEST TEST"); + // add mapfile + Path mapFile = Files.createFile(Path.of(tempDir.toString() + "/mapfile.out")); + + String[] args = new String[] { "import", "-a", "-R", "-e", admin.getEmail(), + "-c", collection.getID().toString(), "-s", safDir.toString(), + "-m", mapFile.toString() }; + perfomImportScript(args); + + checkMetadata(); + checkBitstream(); + } + + @Test + public void resumeImportItemBySafWithAnotherMetadataSchema() throws Exception { + // create simple SAF + Path safDir = Files.createDirectory(Path.of(tempDir.toString() + "/test")); + Path itemDir = Files.createDirectory(Path.of(safDir.toString() + "/item_000")); + Files.copy(getClass().getResourceAsStream("dublin_core.xml"), + Path.of(itemDir.toString() + "/dublin_core.xml")); + // add metadata with another schema + Files.copy(getClass().getResourceAsStream("metadata_dcterms.xml"), + Path.of(itemDir.toString() + "/metadata_dcterms.xml")); + // add mapfile + Path mapFile = Files.createFile(Path.of(tempDir.toString() + "/mapfile.out")); + + String[] args = new String[] { "import", "-a", "-R", "-e", admin.getEmail(), + "-c", collection.getID().toString(), "-s", safDir.toString(), + "-m", mapFile.toString() }; + perfomImportScript(args); + + checkMetadata(); + checkMetadataWithAnotherSchema(); + } + + @Test + public void resumeImportItemSkippingTheFirstOneBySafWithMetadataOnly() + throws Exception { + // create item + context.turnOffAuthorisationSystem(); + Item item = ItemBuilder.createItem(context, collection) + .withTitle("Another Title") + .build(); + context.restoreAuthSystemState(); + // create simple SAF + Path safDir = Files.createDirectory(Path.of(tempDir.toString() + "/test")); + Path itemDir = Files.createDirectory(Path.of(safDir.toString() + "/item_001")); + Files.copy(getClass().getResourceAsStream("dublin_core.xml"), + Path.of(itemDir.toString() + "/dublin_core.xml")); + // add mapfile + Path mapFile = Files.createFile(Path.of(tempDir.toString() + "/mapfile.out")); + Files.writeString(mapFile, "item_000 " + item.getHandle()); + + String[] args = new String[] { "import", "-a", "-R", "-e", admin.getEmail(), + "-c", collection.getID().toString(), "-s", safDir.toString(), + "-m", mapFile.toString() }; + perfomImportScript(args); + + checkMetadata(); + } + + @Test + public void resumeImportItemSkippingTheFirstOneBySafWithBitstreams() + throws Exception { + // create item + context.turnOffAuthorisationSystem(); + Item item = ItemBuilder.createItem(context, collection) + .withTitle("Another Title") + .build(); + context.restoreAuthSystemState(); + // create simple SAF + Path safDir = Files.createDirectory(Path.of(tempDir.toString() + "/test")); + Path itemDir = Files.createDirectory(Path.of(safDir.toString() + "/item_001")); + Files.copy(getClass().getResourceAsStream("dublin_core.xml"), + Path.of(itemDir.toString() + "/dublin_core.xml")); + // add bitstream + Path contentsFile = Files.createFile(Path.of(itemDir.toString() + "/contents")); + Files.writeString(contentsFile, + "file1.txt"); + Path bitstreamFile = Files.createFile(Path.of(itemDir.toString() + "/file1.txt")); + Files.writeString(bitstreamFile, + "TEST TEST TEST"); + // add mapfile + Path mapFile = Files.createFile(Path.of(tempDir.toString() + "/mapfile.out")); + Files.writeString(mapFile, "item_000 " + item.getHandle()); + + String[] args = new String[] { "import", "-a", "-R", "-e", admin.getEmail(), + "-c", collection.getID().toString(), "-s", safDir.toString(), + "-m", mapFile.toString() }; + perfomImportScript(args); + + checkMetadata(); + checkBitstream(); + } + + @Test + public void resumeImportItemSkippingTheFirstOneBySafWithAnotherMetadataSchema() + throws Exception { + // create item + context.turnOffAuthorisationSystem(); + Item item = ItemBuilder.createItem(context, collection) + .withTitle("Another Title") + .build(); + context.restoreAuthSystemState(); + // create simple SAF + Path safDir = Files.createDirectory(Path.of(tempDir.toString() + "/test")); + Path itemDir = Files.createDirectory(Path.of(safDir.toString() + "/item_001")); + Files.copy(getClass().getResourceAsStream("dublin_core.xml"), + Path.of(itemDir.toString() + "/dublin_core.xml")); + // add metadata with another schema + Files.copy(getClass().getResourceAsStream("metadata_dcterms.xml"), + Path.of(itemDir.toString() + "/metadata_dcterms.xml")); + // add mapfile + Path mapFile = Files.createFile(Path.of(tempDir.toString() + "/mapfile.out")); + Files.writeString(mapFile, "item_000 " + item.getHandle()); + + String[] args = new String[] { "import", "-a", "-R", "-e", admin.getEmail(), + "-c", collection.getID().toString(), "-s", safDir.toString(), + "-m", mapFile.toString() }; + perfomImportScript(args); + + checkMetadata(); + checkMetadataWithAnotherSchema(); + } + + @Test + public void replaceItemBySafWithMetadataOnly() throws Exception { + // create item + context.turnOffAuthorisationSystem(); + Item item = ItemBuilder.createItem(context, collection) + .withTitle("Another Title") + .build(); + context.restoreAuthSystemState(); + + // create simple SAF + Path safDir = Files.createDirectory(Path.of(tempDir.toString() + "/test")); + Path itemDir = Files.createDirectory(Path.of(safDir.toString() + "/item_000")); + Files.copy(getClass().getResourceAsStream("dublin_core.xml"), + Path.of(itemDir.toString() + "/dublin_core.xml")); + // add mapfile + Path mapFile = Files.createFile(Path.of(tempDir.toString() + "/mapfile.out")); + Files.writeString(mapFile, "item_000 " + item.getHandle()); + + String[] args = new String[] { "import", "-r", "-e", admin.getEmail(), + "-c", collection.getID().toString(), "-s", safDir.toString(), + "-m", mapFile.toString() }; + perfomImportScript(args); + + checkMetadata(); + } + + @Test + public void replaceItemBySafWithBitstreams() throws Exception { + // create item + context.turnOffAuthorisationSystem(); + Item item = ItemBuilder.createItem(context, collection) + .withTitle("Another Title") + .build(); + context.restoreAuthSystemState(); + + // create simple SAF + Path safDir = Files.createDirectory(Path.of(tempDir.toString() + "/test")); + Path itemDir = Files.createDirectory(Path.of(safDir.toString() + "/item_000")); + Files.copy(getClass().getResourceAsStream("dublin_core.xml"), + Path.of(itemDir.toString() + "/dublin_core.xml")); + // add bitstream + Path contentsFile = Files.createFile(Path.of(itemDir.toString() + "/contents")); + Files.writeString(contentsFile, + "file1.txt"); + Path bitstreamFile = Files.createFile(Path.of(itemDir.toString() + "/file1.txt")); + Files.writeString(bitstreamFile, + "TEST TEST TEST"); + // add mapfile + Path mapFile = Files.createFile(Path.of(tempDir.toString() + "/mapfile.out")); + Files.writeString(mapFile, "item_000 " + item.getHandle()); + + String[] args = new String[] { "import", "-r", "-e", admin.getEmail(), + "-c", collection.getID().toString(), "-s", safDir.toString(), + "-m", mapFile.toString() }; + perfomImportScript(args); + + checkMetadata(); + checkBitstream(); + } + + @Test + public void replaceItemBySafWithAnotherMetadataSchema() throws Exception { + // create item + context.turnOffAuthorisationSystem(); + Item item = ItemBuilder.createItem(context, collection) + .withTitle("Another Title") + .build(); + context.restoreAuthSystemState(); + + // create simple SAF + Path safDir = Files.createDirectory(Path.of(tempDir.toString() + "/test")); + Path itemDir = Files.createDirectory(Path.of(safDir.toString() + "/item_000")); + Files.copy(getClass().getResourceAsStream("dublin_core.xml"), + Path.of(itemDir.toString() + "/dublin_core.xml")); + // add metadata with another schema + Files.copy(getClass().getResourceAsStream("metadata_dcterms.xml"), + Path.of(itemDir.toString() + "/metadata_dcterms.xml")); + // add mapfile + Path mapFile = Files.createFile(Path.of(tempDir.toString() + "/mapfile.out")); + Files.writeString(mapFile, "item_000 " + item.getHandle()); + + String[] args = new String[] { "import", "-r", "-e", admin.getEmail(), + "-c", collection.getID().toString(), "-s", safDir.toString(), + "-m", mapFile.toString() }; + perfomImportScript(args); + + checkMetadata(); + checkMetadataWithAnotherSchema(); + } + + @Test + public void deleteItemByMapFile() throws Exception { + // create item + context.turnOffAuthorisationSystem(); + Item item = ItemBuilder.createItem(context, collection) + .withTitle(publicationTitle) + .build(); + context.restoreAuthSystemState(); + // add mapfile + Path mapFile = Files.createFile(Path.of(tempDir.toString() + "/mapfile.out")); + Files.writeString(mapFile, "item_000 " + item.getHandle()); + + String[] args = new String[] { "import", "-d", "-e", admin.getEmail(), + "-m", mapFile.toString() }; + perfomImportScript(args); + + checkItemDeletion(); + } + + /** + * Check metadata on imported item + * @throws Exception + */ + private void checkMetadata() throws Exception { + Item item = itemService.findByMetadataField(context, "dc", "title", null, publicationTitle).next(); + assertEquals(item.getName(), publicationTitle); + assertEquals(itemService.getMetadata(item, "dc.date.issued"), "1990"); + assertEquals(itemService.getMetadata(item, "dc.title.alternative"), "J'aime les Printemps"); + } + + /** + * Check metadata on imported item + * @throws Exception + */ + private void checkMetadataWithAnotherSchema() throws Exception { + Item item = itemService.findByMetadataField(context, "dc", "title", null, publicationTitle).next(); + assertEquals(item.getName(), publicationTitle); + assertEquals(itemService.getMetadata(item, "dcterms.title"), publicationTitle); + } + + /** + * Check bitstreams on imported item + * @throws Exception + */ + private void checkBitstream() throws Exception { + Bitstream bitstream = itemService.findByMetadataField(context, "dc", "title", null, publicationTitle).next() + .getBundles("ORIGINAL").get(0).getBitstreams().get(0); + assertEquals(bitstream.getName(), "file1.txt"); + } + + /** + * Check deletion of item by mapfile + * @throws Exception + */ + private void checkItemDeletion() throws Exception { + Iterator itemIterator = itemService.findByMetadataField(context, "dc", "title", null, publicationTitle); + assertEquals(itemIterator.hasNext(), false); + } + + /** + * Check relationships between imported items + * @throws Exception + */ + private void checkRelationship() throws Exception { + Item item = itemService.findByMetadataField(context, "dc", "title", null, publicationTitle).next(); + Item author = itemService.findByMetadataField(context, "dc", "title", null, personTitle).next(); + List relationships = relationshipService.findByItem(context, item); + assertEquals(1, relationships.size()); + assertEquals(author.getID(), relationships.get(0).getRightItem().getID()); + assertEquals(item.getID(), relationships.get(0).getLeftItem().getID()); + } + + private void perfomImportScript(String[] args) + throws Exception { + runDSpaceScript(args); + } +} diff --git a/dspace-api/src/test/java/org/dspace/app/matcher/LambdaMatcher.java b/dspace-api/src/test/java/org/dspace/app/matcher/LambdaMatcher.java new file mode 100644 index 0000000000..f5c00c340d --- /dev/null +++ b/dspace-api/src/test/java/org/dspace/app/matcher/LambdaMatcher.java @@ -0,0 +1,55 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.matcher; + +import java.util.function.Predicate; + +import org.hamcrest.BaseMatcher; +import org.hamcrest.Description; +import org.hamcrest.Matcher; +import org.hamcrest.Matchers; + +/** + * Matcher based on an {@link Predicate}. + * + * @author Luca Giamminonni (luca.giamminonni at 4science.it) + * @param the type of the instance to match + */ +public class LambdaMatcher extends BaseMatcher { + + private final Predicate matcher; + private final String description; + + public static LambdaMatcher matches(Predicate matcher) { + return new LambdaMatcher(matcher, "Matches the given predicate"); + } + + public static LambdaMatcher matches(Predicate matcher, String description) { + return new LambdaMatcher(matcher, description); + } + + public static Matcher> has(Predicate matcher) { + return Matchers.hasItem(matches(matcher)); + } + + private LambdaMatcher(Predicate matcher, String description) { + this.matcher = matcher; + this.description = description; + } + + @Override + @SuppressWarnings("unchecked") + public boolean matches(Object argument) { + return matcher.test((T) argument); + } + + @Override + public void describeTo(Description description) { + description.appendText(this.description); + } +} diff --git a/dspace-api/src/test/java/org/dspace/app/matcher/OrcidQueueMatcher.java b/dspace-api/src/test/java/org/dspace/app/matcher/OrcidQueueMatcher.java new file mode 100644 index 0000000000..9f83301515 --- /dev/null +++ b/dspace-api/src/test/java/org/dspace/app/matcher/OrcidQueueMatcher.java @@ -0,0 +1,136 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.matcher; + +import static org.hamcrest.Matchers.is; + +import org.dspace.content.Item; +import org.dspace.orcid.OrcidOperation; +import org.dspace.orcid.OrcidQueue; +import org.hamcrest.BaseMatcher; +import org.hamcrest.Description; +import org.hamcrest.Matcher; +import org.hamcrest.TypeSafeMatcher; + +/** + * Implementation of {@link org.hamcrest.Matcher} to match a OrcidQueue by all + * its attributes. + * + * @author Luca Giamminonni (luca.giamminonni at 4science.it) + * + */ +public class OrcidQueueMatcher extends TypeSafeMatcher { + + private final Matcher profileItemMatcher; + + private final Matcher entityMatcher; + + private final Matcher recordTypeMatcher; + + private final Matcher putCodeMatcher; + + private final Matcher descriptionMatcher; + + private final Matcher metadataMatcher; + + private final Matcher operationMatcher; + + private final Matcher attemptsMatcher; + + private OrcidQueueMatcher(Matcher profileItemMatcher, Matcher entityMatcher, + Matcher recordTypeMatcher, Matcher putCodeMatcher, Matcher metadataMatcher, + Matcher descriptionMatcher, Matcher operationMatcher, + Matcher attemptsMatcher) { + this.profileItemMatcher = profileItemMatcher; + this.entityMatcher = entityMatcher; + this.recordTypeMatcher = recordTypeMatcher; + this.putCodeMatcher = putCodeMatcher; + this.metadataMatcher = metadataMatcher; + this.descriptionMatcher = descriptionMatcher; + this.operationMatcher = operationMatcher; + this.attemptsMatcher = attemptsMatcher; + } + + public static OrcidQueueMatcher matches(Item profileItem, Item entity, String recordType, + OrcidOperation operation) { + return new OrcidQueueMatcher(is(profileItem), is(entity), is(recordType), anything(), + anything(), anything(), is(operation), anything()); + } + + public static OrcidQueueMatcher matches(Item profileItem, Item entity, String recordType, + OrcidOperation operation, int attempts) { + return new OrcidQueueMatcher(is(profileItem), is(entity), is(recordType), anything(), + anything(), anything(), is(operation), is(attempts)); + } + + public static OrcidQueueMatcher matches(Item profileItem, Item entity, String recordType, + String putCode, OrcidOperation operation) { + return new OrcidQueueMatcher(is(profileItem), is(entity), is(recordType), is(putCode), + anything(), anything(), is(operation), anything()); + } + + public static OrcidQueueMatcher matches(Item profileItem, Item entity, String recordType, + String putCode, String metadata, String description, OrcidOperation operation) { + return new OrcidQueueMatcher(is(profileItem), is(entity), is(recordType), + is(putCode), is(metadata), is(description), is(operation), anything()); + } + + public static OrcidQueueMatcher matches(Item item, String recordType, + String putCode, String metadata, String description, OrcidOperation operation) { + return new OrcidQueueMatcher(is(item), is(item), is(recordType), + is(putCode), is(metadata), is(description), is(operation), anything()); + } + + public static OrcidQueueMatcher matches(Item profileItem, Item entity, String recordType, + String putCode, Matcher metadata, String description, OrcidOperation operation) { + return new OrcidQueueMatcher(is(profileItem), is(entity), is(recordType), + is(putCode), metadata, is(description), is(operation), anything()); + } + + @Override + public void describeTo(Description description) { + description.appendText("an orcid queue record that with the following attributes:") + .appendText(" item profileItem ").appendDescriptionOf(profileItemMatcher) + .appendText(", item entity ").appendDescriptionOf(entityMatcher) + .appendText(", record type ").appendDescriptionOf(recordTypeMatcher) + .appendText(", metadata ").appendDescriptionOf(metadataMatcher) + .appendText(", description ").appendDescriptionOf(descriptionMatcher) + .appendText(", operation ").appendDescriptionOf(operationMatcher) + .appendText(", attempts ").appendDescriptionOf(attemptsMatcher) + .appendText(" and put code ").appendDescriptionOf(putCodeMatcher); + } + + @Override + protected boolean matchesSafely(OrcidQueue item) { + return profileItemMatcher.matches(item.getProfileItem()) + && entityMatcher.matches(item.getEntity()) + && recordTypeMatcher.matches(item.getRecordType()) + && metadataMatcher.matches(item.getMetadata()) + && putCodeMatcher.matches(item.getPutCode()) + && descriptionMatcher.matches(item.getDescription()) + && operationMatcher.matches(item.getOperation()) + && attemptsMatcher.matches(item.getAttempts()); + } + + private static Matcher anything() { + return new BaseMatcher() { + + @Override + public boolean matches(Object item) { + return true; + } + + @Override + public void describeTo(Description description) { + + } + }; + + } + +} diff --git a/dspace-api/src/test/java/org/dspace/app/mediafilter/PoiWordFilterTest.java b/dspace-api/src/test/java/org/dspace/app/mediafilter/PoiWordFilterTest.java deleted file mode 100644 index 4d2353a29a..0000000000 --- a/dspace-api/src/test/java/org/dspace/app/mediafilter/PoiWordFilterTest.java +++ /dev/null @@ -1,181 +0,0 @@ -/** - * The contents of this file are subject to the license and copyright - * detailed in the LICENSE and NOTICE files at the root of the source - * tree and available online at - * - * http://www.dspace.org/license/ - */ -package org.dspace.app.mediafilter; - -import static org.junit.Assert.assertTrue; - -import java.io.IOException; -import java.io.InputStream; -import java.nio.charset.StandardCharsets; - -import org.dspace.content.Item; -import org.junit.After; -import org.junit.AfterClass; -import org.junit.Before; -import org.junit.BeforeClass; -import org.junit.Test; - -/** - * Drive the POI-based MS Word filter. - * - * @author mwood - */ -public class PoiWordFilterTest { - - public PoiWordFilterTest() { - } - - @BeforeClass - public static void setUpClass() { - } - - @AfterClass - public static void tearDownClass() { - } - - @Before - public void setUp() { - } - - @After - public void tearDown() { - } - - /** - * Test of getFilteredName method, of class PoiWordFilter. - */ -/* - @Test - public void testGetFilteredName() - { - System.out.println("getFilteredName"); - String oldFilename = ""; - PoiWordFilter instance = new PoiWordFilter(); - String expResult = ""; - String result = instance.getFilteredName(oldFilename); - assertEquals(expResult, result); - // TODO review the generated test code and remove the default call to fail. - fail("The test case is a prototype."); - } -*/ - - /** - * Test of getBundleName method, of class PoiWordFilter. - */ -/* - @Test - public void testGetBundleName() - { - System.out.println("getBundleName"); - PoiWordFilter instance = new PoiWordFilter(); - String expResult = ""; - String result = instance.getBundleName(); - assertEquals(expResult, result); - // TODO review the generated test code and remove the default call to fail. - fail("The test case is a prototype."); - } -*/ - - /** - * Test of getFormatString method, of class PoiWordFilter. - */ -/* - @Test - public void testGetFormatString() - { - System.out.println("getFormatString"); - PoiWordFilter instance = new PoiWordFilter(); - String expResult = ""; - String result = instance.getFormatString(); - assertEquals(expResult, result); - // TODO review the generated test code and remove the default call to fail. - fail("The test case is a prototype."); - } -*/ - - /** - * Test of getDescription method, of class PoiWordFilter. - */ -/* - @Test - public void testGetDescription() - { - System.out.println("getDescription"); - PoiWordFilter instance = new PoiWordFilter(); - String expResult = ""; - String result = instance.getDescription(); - assertEquals(expResult, result); - // TODO review the generated test code and remove the default call to fail. - fail("The test case is a prototype."); - } -*/ - - /** - * Test of getDestinationStream method, of class PoiWordFilter. - * Read a constant .doc document and examine the extracted text. - * - * @throws java.lang.Exception passed through. - */ - @Test - public void testGetDestinationStreamDoc() - throws Exception { - System.out.println("getDestinationStream"); - Item currentItem = null; - InputStream source; - boolean verbose = false; - PoiWordFilter instance = new PoiWordFilter(); - InputStream result; - - source = getClass().getResourceAsStream("wordtest.doc"); - result = instance.getDestinationStream(currentItem, source, verbose); - assertTrue("Known content was not found", readAll(result).contains("quick brown fox")); - } - - /** - * Test of getDestinationStream method, of class PoiWordFilter. - * Read a constant .docx document and examine the extracted text. - * - * @throws java.lang.Exception passed through. - */ - @Test - public void testGetDestinationStreamDocx() - throws Exception { - System.out.println("getDestinationStream"); - Item currentItem = null; - InputStream source; - boolean verbose = false; - PoiWordFilter instance = new PoiWordFilter(); - InputStream result; - - source = getClass().getResourceAsStream("wordtest.docx"); - result = instance.getDestinationStream(currentItem, source, verbose); - assertTrue("Known content was not found", readAll(result).contains("quick brown fox")); - } - - /** - * Read the entire content of a stream into a String. - * - * @param stream a stream of UTF-8 characters. - * @return complete content of {@link stream} - * @throws IOException - */ - private static String readAll(InputStream stream) - throws IOException { - if (null == stream) { - return null; - } - - byte[] bytes = new byte[stream.available()]; - StringBuilder resultSb = new StringBuilder(bytes.length / 2); // Guess: average 2 bytes per character - int howmany; - while ((howmany = stream.read(bytes)) > 0) { - resultSb.append(new String(bytes, 0, howmany, StandardCharsets.UTF_8)); - } - return resultSb.toString(); - } -} diff --git a/dspace-api/src/test/java/org/dspace/app/mediafilter/TikaTextExtractionFilterTest.java b/dspace-api/src/test/java/org/dspace/app/mediafilter/TikaTextExtractionFilterTest.java new file mode 100644 index 0000000000..9db1ef7776 --- /dev/null +++ b/dspace-api/src/test/java/org/dspace/app/mediafilter/TikaTextExtractionFilterTest.java @@ -0,0 +1,323 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.mediafilter; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertTrue; + +import java.io.IOException; +import java.io.InputStream; +import java.nio.charset.StandardCharsets; + +import org.apache.commons.io.IOUtils; +import org.dspace.AbstractUnitTest; +import org.dspace.services.ConfigurationService; +import org.dspace.services.factory.DSpaceServicesFactory; +import org.junit.Test; + +/** + * Test the TikaTextExtractionFilter using test files for all major formats. + * The test files used below are all located at [dspace-api]/src/test/resources/org/dspace/app/mediafilter/ + * + * @author mwood + * @author Tim Donohue + */ +public class TikaTextExtractionFilterTest extends AbstractUnitTest { + + private ConfigurationService configurationService = DSpaceServicesFactory.getInstance().getConfigurationService(); + + /** + * Test of getDestinationStream method using temp file for text extraction + * + * @throws java.lang.Exception passed through. + */ + @Test + public void testGetDestinationStreamWithUseTempFile() + throws Exception { + TikaTextExtractionFilter instance = new TikaTextExtractionFilter(); + + // Extract text from file with "use-temp-file=true" + configurationService.setProperty("textextractor.use-temp-file", "true"); + InputStream source = getClass().getResourceAsStream("test.pdf"); + InputStream result = instance.getDestinationStream(null, source, false); + String tempFileExtractedText = readAll(result); + + // Verify text extracted successfully + assertTrue("Known content was not found in .pdf", tempFileExtractedText.contains("quick brown fox")); + + // Now, extract text from same file using default, in-memory + configurationService.setProperty("textextractor.use-temp-file", "false"); + source = getClass().getResourceAsStream("test.pdf"); + result = instance.getDestinationStream(null, source, false); + String inMemoryExtractedText = readAll(result); + + // Verify the two results are equal + assertEquals("Extracted text via temp file is the same as in-memory.", + inMemoryExtractedText, tempFileExtractedText); + } + + /** + * Test of getDestinationStream method when max characters is less than file size + * + * @throws java.lang.Exception passed through. + */ + @Test + public void testGetDestinationStreamWithMaxChars() + throws Exception { + TikaTextExtractionFilter instance = new TikaTextExtractionFilter(); + + // Set "max-chars" to a small value of 100 chars, which is less than the text size of the file. + configurationService.setProperty("textextractor.max-chars", "100"); + InputStream source = getClass().getResourceAsStream("test.pdf"); + InputStream result = instance.getDestinationStream(null, source, false); + String extractedText = readAll(result); + + // Verify we have exactly the first 100 characters + assertEquals(100, extractedText.length()); + // Verify it has some text at the beginning of the file, but NOT text near the end + assertTrue("Known beginning content was found", extractedText.contains("This is a text.")); + assertFalse("Known ending content was not found", extractedText.contains("Emergency Broadcast System")); + } + + /** + * Test of getDestinationStream method using older Microsoft Word document. + * Read a constant .doc document and examine the extracted text. + * + * @throws java.lang.Exception passed through. + */ + @Test + public void testGetDestinationStreamWithDoc() + throws Exception { + TikaTextExtractionFilter instance = new TikaTextExtractionFilter(); + + InputStream source = getClass().getResourceAsStream("test.doc"); + InputStream result = instance.getDestinationStream(null, source, false); + assertTrue("Known content was not found in .doc", readAll(result).contains("quick brown fox")); + } + + /** + * Test of getDestinationStream method using newer Microsoft Word document. + * Read a constant .docx document and examine the extracted text. + * + * @throws java.lang.Exception passed through. + */ + @Test + public void testGetDestinationStreamWithDocx() + throws Exception { + TikaTextExtractionFilter instance = new TikaTextExtractionFilter(); + + InputStream source = getClass().getResourceAsStream("test.docx"); + InputStream result = instance.getDestinationStream(null, source, false); + assertTrue("Known content was not found in .docx", readAll(result).contains("quick brown fox")); + } + + /** + * Test of getDestinationStream method using an ODT document + * Read a constant .odt document and examine the extracted text. + * + * @throws java.lang.Exception passed through. + */ + @Test + public void testGetDestinationStreamWithODT() + throws Exception { + TikaTextExtractionFilter instance = new TikaTextExtractionFilter(); + + InputStream source = getClass().getResourceAsStream("test.odt"); + InputStream result = instance.getDestinationStream(null, source, false); + assertTrue("Known content was not found in .odt", readAll(result).contains("quick brown fox")); + } + + /** + * Test of getDestinationStream method using an RTF document + * Read a constant .rtf document and examine the extracted text. + * + * @throws java.lang.Exception passed through. + */ + @Test + public void testGetDestinationStreamWithRTF() + throws Exception { + TikaTextExtractionFilter instance = new TikaTextExtractionFilter(); + + InputStream source = getClass().getResourceAsStream("test.rtf"); + InputStream result = instance.getDestinationStream(null, source, false); + assertTrue("Known content was not found in .rtf", readAll(result).contains("quick brown fox")); + } + + /** + * Test of getDestinationStream method using a PDF document + * Read a constant .pdf document and examine the extracted text. + * + * @throws java.lang.Exception passed through. + */ + @Test + public void testGetDestinationStreamWithPDF() + throws Exception { + TikaTextExtractionFilter instance = new TikaTextExtractionFilter(); + + InputStream source = getClass().getResourceAsStream("test.pdf"); + InputStream result = instance.getDestinationStream(null, source, false); + assertTrue("Known content was not found in .pdf", readAll(result).contains("quick brown fox")); + } + + /** + * Test of getDestinationStream method using an HTML document + * Read a constant .html document and examine the extracted text. + * + * @throws java.lang.Exception passed through. + */ + @Test + public void testGetDestinationStreamWithHTML() + throws Exception { + TikaTextExtractionFilter instance = new TikaTextExtractionFilter(); + + InputStream source = getClass().getResourceAsStream("test.html"); + InputStream result = instance.getDestinationStream(null, source, false); + assertTrue("Known content was not found in .html", readAll(result).contains("quick brown fox")); + } + + /** + * Test of getDestinationStream method using a TXT document + * Read a constant .txt document and examine the extracted text. + * + * @throws java.lang.Exception passed through. + */ + @Test + public void testGetDestinationStreamWithTxt() + throws Exception { + TikaTextExtractionFilter instance = new TikaTextExtractionFilter(); + + InputStream source = getClass().getResourceAsStream("test.txt"); + InputStream result = instance.getDestinationStream(null, source, false); + assertTrue("Known content was not found in .txt", readAll(result).contains("quick brown fox")); + } + + /** + * Test of getDestinationStream method using a CSV document + * Read a constant .csv document and examine the extracted text. + * + * @throws java.lang.Exception passed through. + */ + @Test + public void testGetDestinationStreamWithCsv() + throws Exception { + TikaTextExtractionFilter instance = new TikaTextExtractionFilter(); + + InputStream source = getClass().getResourceAsStream("test.csv"); + InputStream result = instance.getDestinationStream(null, source, false); + assertTrue("Known content was not found in .csv", readAll(result).contains("data3,3")); + } + + /** + * Test of getDestinationStream method using an XLS document + * Read a constant .xls document and examine the extracted text. + * + * @throws java.lang.Exception passed through. + */ + @Test + public void testGetDestinationStreamWithXLS() + throws Exception { + TikaTextExtractionFilter instance = new TikaTextExtractionFilter(); + + InputStream source = getClass().getResourceAsStream("test.xls"); + InputStream result = instance.getDestinationStream(null, source, false); + assertTrue("Known content was not found in .xls", readAll(result).contains("data3,3")); + } + + /** + * Test of getDestinationStream method using an XLSX document + * Read a constant .xlsx document and examine the extracted text. + * + * @throws java.lang.Exception passed through. + */ + @Test + public void testGetDestinationStreamWithXLSX() + throws Exception { + TikaTextExtractionFilter instance = new TikaTextExtractionFilter(); + + InputStream source = getClass().getResourceAsStream("test.xlsx"); + InputStream result = instance.getDestinationStream(null, source, false); + assertTrue("Known content was not found in .xlsx", readAll(result).contains("data3,3")); + } + + /** + * Test of getDestinationStream method using an ODS document + * Read a constant .ods document and examine the extracted text. + * + * @throws java.lang.Exception passed through. + */ + @Test + public void testGetDestinationStreamWithODS() + throws Exception { + TikaTextExtractionFilter instance = new TikaTextExtractionFilter(); + + InputStream source = getClass().getResourceAsStream("test.ods"); + InputStream result = instance.getDestinationStream(null, source, false); + assertTrue("Known content was not found in .ods", readAll(result).contains("Data on the second sheet")); + } + + /** + * Test of getDestinationStream method using an PPT document + * Read a constant .ppt document and examine the extracted text. + * + * @throws java.lang.Exception passed through. + */ + @Test + public void testGetDestinationStreamWithPPT() + throws Exception { + TikaTextExtractionFilter instance = new TikaTextExtractionFilter(); + + InputStream source = getClass().getResourceAsStream("test.ppt"); + InputStream result = instance.getDestinationStream(null, source, false); + assertTrue("Known content was not found in .ppt", readAll(result).contains("quick brown fox")); + } + + /** + * Test of getDestinationStream method using an PPTX document + * Read a constant .pptx document and examine the extracted text. + * + * @throws java.lang.Exception passed through. + */ + @Test + public void testGetDestinationStreamWithPPTX() + throws Exception { + TikaTextExtractionFilter instance = new TikaTextExtractionFilter(); + + InputStream source = getClass().getResourceAsStream("test.pptx"); + InputStream result = instance.getDestinationStream(null, source, false); + assertTrue("Known content was not found in .pptx", readAll(result).contains("quick brown fox")); + } + + /** + * Test of getDestinationStream method using an ODP document + * Read a constant .odp document and examine the extracted text. + * + * @throws java.lang.Exception passed through. + */ + @Test + public void testGetDestinationStreamWithODP() + throws Exception { + TikaTextExtractionFilter instance = new TikaTextExtractionFilter(); + + InputStream source = getClass().getResourceAsStream("test.odp"); + InputStream result = instance.getDestinationStream(null, source, false); + assertTrue("Known content was not found in .odp", readAll(result).contains("quick brown fox")); + } + + /** + * Read the entire content of a stream into a String. + * + * @param stream a stream of UTF-8 characters. + * @return complete content of stream as a String + * @throws IOException + */ + private static String readAll(InputStream stream) + throws IOException { + return IOUtils.toString(stream, StandardCharsets.UTF_8); + } +} diff --git a/dspace-api/src/test/java/org/dspace/app/packager/PackagerIT.java b/dspace-api/src/test/java/org/dspace/app/packager/PackagerIT.java index c814d2d9f6..7d808ab871 100644 --- a/dspace-api/src/test/java/org/dspace/app/packager/PackagerIT.java +++ b/dspace-api/src/test/java/org/dspace/app/packager/PackagerIT.java @@ -38,7 +38,7 @@ import org.dspace.content.service.ItemService; import org.dspace.content.service.WorkspaceItemService; import org.dspace.services.ConfigurationService; import org.dspace.services.factory.DSpaceServicesFactory; -import org.jdom.Element; +import org.jdom2.Element; import org.junit.After; import org.junit.Before; import org.junit.Test; diff --git a/dspace-api/src/test/java/org/dspace/app/requestitem/CollectionAdministratorsRequestItemStrategyTest.java b/dspace-api/src/test/java/org/dspace/app/requestitem/CollectionAdministratorsRequestItemStrategyTest.java new file mode 100644 index 0000000000..37292e91c8 --- /dev/null +++ b/dspace-api/src/test/java/org/dspace/app/requestitem/CollectionAdministratorsRequestItemStrategyTest.java @@ -0,0 +1,62 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.requestitem; + +import static org.junit.Assert.assertEquals; + +import java.util.List; + +import org.dspace.content.Collection; +import org.dspace.content.Item; +import org.dspace.core.Context; +import org.dspace.eperson.EPerson; +import org.dspace.eperson.Group; +import org.junit.Test; +import org.mockito.Mockito; + +/** + * + * @author Mark H. Wood + */ +public class CollectionAdministratorsRequestItemStrategyTest { + private static final String NAME = "John Q. Public"; + private static final String EMAIL = "jqpublic@example.com"; + + /** + * Test of getRequestItemAuthor method, of class CollectionAdministratorsRequestItemStrategy. + * @throws java.lang.Exception passed through. + */ + @Test + public void testGetRequestItemAuthor() + throws Exception { + System.out.println("getRequestItemAuthor"); + + Context context = Mockito.mock(Context.class); + + EPerson eperson1 = Mockito.mock(EPerson.class); + Mockito.when(eperson1.getEmail()).thenReturn(EMAIL); + Mockito.when(eperson1.getFullName()).thenReturn(NAME); + + Group group1 = Mockito.mock(Group.class); + Mockito.when(group1.getMembers()).thenReturn(List.of(eperson1)); + + Collection collection1 = Mockito.mock(Collection.class); + Mockito.when(collection1.getAdministrators()).thenReturn(group1); + + Item item = Mockito.mock(Item.class); + Mockito.when(item.getOwningCollection()).thenReturn(collection1); + Mockito.when(item.getSubmitter()).thenReturn(eperson1); + + CollectionAdministratorsRequestItemStrategy instance = new CollectionAdministratorsRequestItemStrategy(); + List result = instance.getRequestItemAuthor(context, + item); + assertEquals("Should be one author", 1, result.size()); + assertEquals("Name should match " + NAME, NAME, result.get(0).getFullName()); + assertEquals("Email should match " + EMAIL, EMAIL, result.get(0).getEmail()); + } +} diff --git a/dspace-api/src/test/java/org/dspace/app/requestitem/CombiningRequestItemStrategyTest.java b/dspace-api/src/test/java/org/dspace/app/requestitem/CombiningRequestItemStrategyTest.java new file mode 100644 index 0000000000..c5475612cb --- /dev/null +++ b/dspace-api/src/test/java/org/dspace/app/requestitem/CombiningRequestItemStrategyTest.java @@ -0,0 +1,53 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.requestitem; + +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.collection.IsIterableContainingInAnyOrder.containsInAnyOrder; + +import java.util.List; + +import org.dspace.content.Item; +import org.dspace.core.Context; +import org.junit.Test; +import org.mockito.Mockito; + +/** + * + * @author Mark H. Wood + */ +public class CombiningRequestItemStrategyTest { + /** + * Test of getRequestItemAuthor method, of class CombiningRequestItemStrategy. + * @throws java.lang.Exception passed through. + */ + @Test + public void testGetRequestItemAuthor() + throws Exception { + System.out.println("getRequestItemAuthor"); + Context context = null; + + Item item = Mockito.mock(Item.class); + RequestItemAuthor author1 = new RequestItemAuthor("Pat Paulsen", "ppaulsen@example.com"); + RequestItemAuthor author2 = new RequestItemAuthor("Alfred E. Neuman", "aeneuman@example.com"); + RequestItemAuthor author3 = new RequestItemAuthor("Alias Undercover", "aundercover@example.com"); + + RequestItemAuthorExtractor strategy1 = Mockito.mock(RequestItemHelpdeskStrategy.class); + Mockito.when(strategy1.getRequestItemAuthor(context, item)).thenReturn(List.of(author1)); + + RequestItemAuthorExtractor strategy2 = Mockito.mock(RequestItemMetadataStrategy.class); + Mockito.when(strategy2.getRequestItemAuthor(context, item)).thenReturn(List.of(author2, author3)); + + List strategies = List.of(strategy1, strategy2); + + CombiningRequestItemStrategy instance = new CombiningRequestItemStrategy(strategies); + List result = instance.getRequestItemAuthor(context, + item); + assertThat(result, containsInAnyOrder(author1, author2, author3)); + } +} diff --git a/dspace-api/src/test/java/org/dspace/app/sherpa/MockSHERPAService.java b/dspace-api/src/test/java/org/dspace/app/sherpa/MockSHERPAService.java index b218ba82fe..239d2864bf 100644 --- a/dspace-api/src/test/java/org/dspace/app/sherpa/MockSHERPAService.java +++ b/dspace-api/src/test/java/org/dspace/app/sherpa/MockSHERPAService.java @@ -11,6 +11,7 @@ import java.io.IOException; import java.io.InputStream; import java.net.URI; import java.net.URISyntaxException; +import java.util.Objects; import org.dspace.app.sherpa.v2.SHERPAPublisherResponse; import org.dspace.app.sherpa.v2.SHERPAResponse; @@ -25,20 +26,6 @@ import org.dspace.app.sherpa.v2.SHERPAResponse; */ public class MockSHERPAService extends SHERPAService { - /** - * Simple overridden 'searchByJournalISSN' so that we do attempt to build the URI but rather than make - * an actual HTTP call, return parsed SHERPAResponse for The Lancet based on known-good JSON stored with our - * test resources. - * If URI creation, parsing, or IO fails along the way, a SHERPAResponse with an error message set will be - * returned. - * @param query ISSN string to pass in an "issn equals" API query - * @return SHERPAResponse - */ - @Override - public SHERPAResponse searchByJournalISSN(String query) { - return performRequest("publication", "issn", "equals", query, 0, 1); - } - /** * Simple overridden performRequest so that we do attempt to build the URI but rather than make * an actual HTTP call, return parsed SHERPAResponse for The Lancet based on known-good JSON stored with our @@ -67,8 +54,12 @@ public class MockSHERPAService extends SHERPAService { return new SHERPAResponse("Error building URI"); } - // Get mock JSON - in this case, a known good result for The Lancet - content = getClass().getResourceAsStream("thelancet.json"); + // Get mock JSON + // if a file with the name contained in the value does not exist, returns thelancet.json + content = getContent(value.concat(".json")); + if (Objects.isNull(content)) { + content = getContent("thelancet.json"); + } // Parse JSON input stream and return response for later evaluation return new SHERPAResponse(content, SHERPAResponse.SHERPAFormat.JSON); @@ -88,6 +79,10 @@ public class MockSHERPAService extends SHERPAService { } } + private InputStream getContent(String fileName) { + return getClass().getResourceAsStream(fileName); + } + /** * Simple overridden performPublisherRequest so that we do attempt to build the URI but rather than make * an actual HTTP call, return parsed SHERPAPublisherResponse for PLOS based on known-good JSON stored with our @@ -133,4 +128,5 @@ public class MockSHERPAService extends SHERPAService { return new SHERPAPublisherResponse(e.getMessage()); } } + } diff --git a/dspace-api/src/test/java/org/dspace/app/sherpa/submit/SHERPASubmitServiceTest.java b/dspace-api/src/test/java/org/dspace/app/sherpa/submit/SHERPASubmitServiceTest.java index 1eaa916f56..438d754aa5 100644 --- a/dspace-api/src/test/java/org/dspace/app/sherpa/submit/SHERPASubmitServiceTest.java +++ b/dspace-api/src/test/java/org/dspace/app/sherpa/submit/SHERPASubmitServiceTest.java @@ -11,7 +11,6 @@ import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; import java.sql.SQLException; -import java.util.List; import org.dspace.AbstractUnitTest; import org.dspace.app.sherpa.v2.SHERPAResponse; @@ -109,20 +108,18 @@ public class SHERPASubmitServiceTest extends AbstractUnitTest { // Get responses from SHERPA submit service, which should inspect item ISSNs and perform search // on the mock SHERPA service - List responses = sherpaSubmitService.searchRelatedJournals(context, testItem); + SHERPAResponse response = sherpaSubmitService.searchRelatedJournals(context, testItem); // Make sure response is not null or empty - assertTrue("Response list should not be null or empty", - responses != null && !responses.isEmpty()); + assertTrue("Response should not be null", response != null); // For each response (there should be only one based on test data) perform the standard set // of thorough parsing tests - for (SHERPAResponse response : responses) { - // Assert response is not error, or fail with message - assertFalse("Response was flagged as 'isError'", response.isError()); - // Skip remainder of parsing tests - these are already done in SHERPAServiceTEst - } + // Assert response is not error, or fail with message + assertFalse("Response was flagged as 'isError'", response.isError()); + + // Skip remainder of parsing tests - these are already done in SHERPAServiceTEst } } diff --git a/dspace-api/src/test/java/org/dspace/app/solrdatabaseresync/SolrDatabaseResyncIT.java b/dspace-api/src/test/java/org/dspace/app/solrdatabaseresync/SolrDatabaseResyncIT.java new file mode 100644 index 0000000000..4fa881257e --- /dev/null +++ b/dspace-api/src/test/java/org/dspace/app/solrdatabaseresync/SolrDatabaseResyncIT.java @@ -0,0 +1,154 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.solrdatabaseresync; + +import static org.dspace.discovery.indexobject.ItemIndexFactoryImpl.STATUS_FIELD; +import static org.dspace.discovery.indexobject.ItemIndexFactoryImpl.STATUS_FIELD_PREDB; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotEquals; + +import java.util.List; + +import org.apache.commons.collections.CollectionUtils; +import org.apache.solr.client.solrj.SolrQuery; +import org.apache.solr.client.solrj.response.QueryResponse; +import org.apache.solr.common.SolrDocumentList; +import org.dspace.AbstractIntegrationTestWithDatabase; +import org.dspace.app.launcher.ScriptLauncher; +import org.dspace.app.scripts.handler.impl.TestDSpaceRunnableHandler; +import org.dspace.builder.CollectionBuilder; +import org.dspace.builder.CommunityBuilder; +import org.dspace.builder.ItemBuilder; +import org.dspace.content.Collection; +import org.dspace.content.Item; +import org.dspace.content.factory.ContentServiceFactory; +import org.dspace.content.service.CollectionService; +import org.dspace.discovery.MockSolrSearchCore; +import org.dspace.kernel.ServiceManager; +import org.dspace.services.ConfigurationService; +import org.dspace.services.factory.DSpaceServicesFactory; +import org.junit.Before; +import org.junit.Test; + +public class SolrDatabaseResyncIT extends AbstractIntegrationTestWithDatabase { + + private final ConfigurationService configurationService = + DSpaceServicesFactory.getInstance().getConfigurationService(); + + private final CollectionService collectionService = + ContentServiceFactory.getInstance().getCollectionService(); + + private MockSolrSearchCore searchService; + + private Collection col; + private Item item1; + private Item item2; + + @Before + public void setUp() throws Exception { + super.setUp(); + configurationService.setProperty("solr-database-resync.time-until-reindex", 1); + + ServiceManager serviceManager = DSpaceServicesFactory.getInstance().getServiceManager(); + searchService = serviceManager.getServiceByName(null, MockSolrSearchCore.class); + + context.turnOffAuthorisationSystem(); + + parentCommunity = CommunityBuilder.createCommunity(context).withName("Parent Community").build(); + col = CollectionBuilder.createCollection(context, parentCommunity).withName("Collection").build(); + + item1 = ItemBuilder.createItem(context, col) + .withTitle("Public item 1") + .withIssueDate("2010-10-17") + .withAuthor("Smith, Donald") + .withSubject("ExtraEntry") + .build(); + item2 = ItemBuilder.createItem(context, col) + .withTitle("Public item 2") + .withIssueDate("2011-08-13") + .withAuthor("Smith, Maria") + .withSubject("TestingForMore") + .build(); + + context.setDispatcher("noindex"); + } + + @Test + public void solrPreDBStatusExistingItemTest() throws Exception { + // Items were created, they should contain a predb status in solr + assertHasPreDBStatus(item1); + assertHasPreDBStatus(item2); + + performSolrDatabaseResyncScript(); + + // Database status script was performed, their predb status should be removed + assertHasNoPreDBStatus(item1); + assertHasNoPreDBStatus(item2); + + context.restoreAuthSystemState(); + } + + @Test + public void solrPreDBStatusRemovedItemTest() throws Exception { + // Items were created, they should contain a predb status in solr + assertHasPreDBStatus(item1); + assertHasPreDBStatus(item2); + + collectionService.delete(context, col); + + // Items were deleted, they should still contain a predb status in solr for now + assertHasPreDBStatus(item1); + assertHasPreDBStatus(item2); + + performSolrDatabaseResyncScript(); + + // Database status script was performed, their solr document should have been removed + assertNoSolrDocument(item1); + assertNoSolrDocument(item2); + + context.restoreAuthSystemState(); + } + + public void assertHasNoPreDBStatus(Item item) throws Exception { + assertNotEquals(STATUS_FIELD_PREDB, getStatus(item)); + } + + public void assertHasPreDBStatus(Item item) throws Exception { + assertEquals(STATUS_FIELD_PREDB, getStatus(item)); + } + + public void assertNoSolrDocument(Item item) throws Exception { + SolrDocumentList solrDocumentList = getSolrDocumentList(item); + assertEquals(0, solrDocumentList.size()); + } + + public String getStatus(Item item) throws Exception { + SolrDocumentList solrDocumentList = getSolrDocumentList(item); + List fieldValues = ((List) solrDocumentList.get(0).getFieldValues(STATUS_FIELD)); + if (CollectionUtils.isNotEmpty(fieldValues)) { + return (String) fieldValues.get(0); + } else { + return null; + } + } + + public SolrDocumentList getSolrDocumentList(Item item) throws Exception { + SolrQuery solrQuery = new SolrQuery(); + solrQuery.setQuery("search.resourceid:" + item.getID()); + QueryResponse queryResponse = searchService.getSolr().query(solrQuery); + return queryResponse.getResults(); + } + + public void performSolrDatabaseResyncScript() throws Exception { + String[] args = new String[] {"solr-database-resync"}; + TestDSpaceRunnableHandler testDSpaceRunnableHandler = new TestDSpaceRunnableHandler(); + ScriptLauncher + .handleScript(args, ScriptLauncher.getConfig(kernelImpl), testDSpaceRunnableHandler, kernelImpl); + } + +} diff --git a/dspace-api/src/test/java/org/dspace/app/util/ConfigurationIT.java b/dspace-api/src/test/java/org/dspace/app/util/ConfigurationIT.java new file mode 100644 index 0000000000..388b467e97 --- /dev/null +++ b/dspace-api/src/test/java/org/dspace/app/util/ConfigurationIT.java @@ -0,0 +1,268 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ + +package org.dspace.app.util; + +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.arrayWithSize; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.collection.IsArrayContainingInAnyOrder.arrayContainingInAnyOrder; +import static org.junit.Assert.assertEquals; + +import org.dspace.AbstractDSpaceTest; +import org.dspace.services.ConfigurationService; +import org.junit.AfterClass; +import org.junit.BeforeClass; +import org.junit.Rule; +import org.junit.Test; +import org.junit.contrib.java.lang.system.Assertion; +import org.junit.contrib.java.lang.system.ExpectedSystemExit; +import org.junit.contrib.java.lang.system.SystemErrRule; +import org.junit.contrib.java.lang.system.SystemOutRule; + +/** + * Tests for configuration utilities. + * + * Because our command-line tools call System.exit(), we can't expect any code + * (such as assertions) following the call to main() to be executed. Instead we + * set up expectations in advance and attach them to an exit() trapper. + * + * @author mhwood + */ +public class ConfigurationIT + extends AbstractDSpaceTest { + + private static ConfigurationService cfg; + + private static final String SINGLE_PROPERTY = "test.single"; + private static final String SINGLE_VALUE = "value"; + + private static final String ARRAY_PROPERTY = "test.array"; + private static final String[] ARRAY_VALUE = { "one", "two" }; + + private static final String PLACEHOLDER_PROPERTY = "test.substituted"; + private static final String PLACEHOLDER_VALUE = "insert ${test.single} here"; // Keep aligned with SINGLE_NAME + private static final String SUBSTITUTED_VALUE = "insert value here"; // Keep aligned with SINGLE_VALUE + + private static final String MISSING_PROPERTY = "test.missing"; + + /** Capture standard output. */ + @Rule + public final SystemOutRule systemOutRule = new SystemOutRule(); + + /** Capture standard error. */ + @Rule + public final SystemErrRule systemErrRule = new SystemErrRule(); + + /** Capture System.exit() value. */ + @Rule + public final ExpectedSystemExit expectedSystemExit = ExpectedSystemExit.none(); + + /** + * Create some expected properties before all tests. + */ + @BeforeClass + public static void setupSuite() { + cfg = kernelImpl.getConfigurationService(); + + cfg.setProperty(SINGLE_PROPERTY, SINGLE_VALUE); + cfg.setProperty(ARRAY_PROPERTY, ARRAY_VALUE); + cfg.setProperty(PLACEHOLDER_PROPERTY, PLACEHOLDER_VALUE); + cfg.setProperty(MISSING_PROPERTY, null); // Ensure that this one is undefined + } + + /** + * After all tests, remove the properties that were created at entry. + */ + @AfterClass + public static void teardownSuite() { + if (null != cfg) { + cfg.setProperty(SINGLE_PROPERTY, null); + cfg.setProperty(ARRAY_PROPERTY, null); + cfg.setProperty(PLACEHOLDER_PROPERTY, null); + } + } + + /** + * Test fetching all values of a single-valued property. + */ + @Test + public void testMainAllSingle() { + String[] argv; + argv = new String[] { + "--property", SINGLE_PROPERTY + }; + expectedSystemExit.expectSystemExitWithStatus(0); + expectedSystemExit.checkAssertionAfterwards(new Assertion() { + @Override public void checkAssertion() { + String[] output = systemOutRule.getLogWithNormalizedLineSeparator() + .split("\n"); + assertThat(output, arrayWithSize(1)); + } + }); + expectedSystemExit.checkAssertionAfterwards(new Assertion() { + @Override public void checkAssertion() { + String[] output = systemOutRule.getLogWithNormalizedLineSeparator() + .split("\n"); + assertThat(output[0], equalTo(SINGLE_VALUE)); + } + }); + systemOutRule.enableLog(); + Configuration.main(argv); + } + + /** + * Test fetching all values of an array property. + */ + @Test + public void testMainAllArray() { + String[] argv; + argv = new String[] { + "--property", ARRAY_PROPERTY + }; + expectedSystemExit.expectSystemExitWithStatus(0); + expectedSystemExit.checkAssertionAfterwards(new Assertion() { + @Override public void checkAssertion() { + String[] output = systemOutRule.getLogWithNormalizedLineSeparator() + .split("\n"); + assertThat(output, arrayWithSize(ARRAY_VALUE.length)); + } + }); + expectedSystemExit.checkAssertionAfterwards(new Assertion() { + @Override public void checkAssertion() { + String[] output = systemOutRule.getLogWithNormalizedLineSeparator() + .split("\n"); + assertThat(output, arrayContainingInAnyOrder(ARRAY_VALUE)); + } + }); + systemOutRule.enableLog(); + Configuration.main(argv); + } + + /** + * Test fetching all values of a single-valued property containing property + * placeholders. + */ + @Test + public void testMainAllSubstitution() { + String[] argv; + argv = new String[] { + "--property", PLACEHOLDER_PROPERTY + }; + expectedSystemExit.expectSystemExitWithStatus(0); + expectedSystemExit.checkAssertionAfterwards(new Assertion() { + @Override public void checkAssertion() { + String[] output = systemOutRule.getLogWithNormalizedLineSeparator() + .split("\n"); + assertThat(output, arrayWithSize(1)); + } + }); + expectedSystemExit.checkAssertionAfterwards(new Assertion() { + @Override public void checkAssertion() { + String[] output = systemOutRule.getLogWithNormalizedLineSeparator() + .split("\n"); + assertThat(output[0], equalTo(SUBSTITUTED_VALUE)); + } + }); + systemOutRule.enableLog(); + Configuration.main(argv); + } + + /** + * Test fetching all values of a single-valued property containing property + * placeholders, suppressing property substitution. + */ + @Test + public void testMainAllRaw() { + // Can it handle a raw property (with substitution placeholders)? + String[] argv; + argv = new String[] { + "--property", PLACEHOLDER_PROPERTY, + "--raw" + }; + expectedSystemExit.expectSystemExitWithStatus(0); + expectedSystemExit.checkAssertionAfterwards(new Assertion() { + @Override public void checkAssertion() { + String[] output = systemOutRule.getLogWithNormalizedLineSeparator() + .split("\n"); + assertThat(output, arrayWithSize(1)); + } + }); + expectedSystemExit.checkAssertionAfterwards(new Assertion() { + @Override public void checkAssertion() { + String[] output = systemOutRule.getLogWithNormalizedLineSeparator() + .split("\n"); + assertThat(output[0], equalTo(PLACEHOLDER_VALUE)); + } + }); + systemOutRule.enableLog(); + Configuration.main(argv); + } + + /** + * Test fetching all values of an undefined property. + */ + @Test + public void testMainAllUndefined() { + // Can it handle an undefined property? + String[] argv; + argv = new String[] { + "--property", MISSING_PROPERTY + }; + expectedSystemExit.expectSystemExitWithStatus(0); + expectedSystemExit.checkAssertionAfterwards(new Assertion() { + @Override public void checkAssertion() { + String outputs = systemOutRule.getLogWithNormalizedLineSeparator(); + String[] output = outputs.split("\n"); + assertThat(output, arrayWithSize(0)); // Huh? Shouldn't split() return { "" } ? + } + }); + systemOutRule.enableLog(); + Configuration.main(argv); + } + + /** + * Test fetching only the first value of an array property. + */ + @Test + public void testMainFirstArray() { + String[] argv = new String[] { + "--property", ARRAY_PROPERTY, + "--first" + }; + expectedSystemExit.expectSystemExitWithStatus(0); + expectedSystemExit.checkAssertionAfterwards(() -> { + String outputs = systemOutRule.getLogWithNormalizedLineSeparator(); + String[] output = outputs.split("\n"); + assertThat(output, arrayWithSize(1)); + assertEquals("--first should return first value", output[0], ARRAY_VALUE[0]); + }); + systemOutRule.enableLog(); + Configuration.main(argv); + } + + /** + * Test fetching a single-valued property using {@code --first} + */ + @Test + public void testMainFirstSingle() { + String[] argv = new String[] { + "--property", SINGLE_PROPERTY, + "--first" + }; + expectedSystemExit.expectSystemExitWithStatus(0); + expectedSystemExit.checkAssertionAfterwards(() -> { + String outputs = systemOutRule.getLogWithNormalizedLineSeparator(); + String[] output = outputs.split("\n"); + assertThat(output, arrayWithSize(1)); + assertEquals("--first should return only value", output[0], SINGLE_VALUE); + }); + systemOutRule.enableLog(); + Configuration.main(argv); + } +} diff --git a/dspace-api/src/test/java/org/dspace/app/util/GoogleBitstreamComparatorTest.java b/dspace-api/src/test/java/org/dspace/app/util/GoogleBitstreamComparatorTest.java index 84e776b983..78142c9258 100644 --- a/dspace-api/src/test/java/org/dspace/app/util/GoogleBitstreamComparatorTest.java +++ b/dspace-api/src/test/java/org/dspace/app/util/GoogleBitstreamComparatorTest.java @@ -164,6 +164,12 @@ public class GoogleBitstreamComparatorTest extends AbstractUnitTest { toSort.get(1).getName()); assertEquals("Bitstreams have same size and type, so order should remain unchanged", "bitstream3", toSort.get(2).getName()); + + // Also, verify all bitstreams are considered equal (comparison returns 0) + GoogleBitstreamComparator comparator = new GoogleBitstreamComparator(context, settings); + assertEquals(0, comparator.compare(bitstream1, bitstream2)); + assertEquals(0, comparator.compare(bitstream2, bitstream3)); + assertEquals(0, comparator.compare(bitstream3, bitstream1)); } /** diff --git a/dspace-api/src/test/java/org/dspace/app/util/GoogleMetadataTest.java b/dspace-api/src/test/java/org/dspace/app/util/GoogleMetadataTest.java index e2b49ab76a..ee6723480e 100644 --- a/dspace-api/src/test/java/org/dspace/app/util/GoogleMetadataTest.java +++ b/dspace-api/src/test/java/org/dspace/app/util/GoogleMetadataTest.java @@ -8,18 +8,25 @@ package org.dspace.app.util; import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; import java.io.ByteArrayInputStream; import java.io.IOException; import java.nio.charset.StandardCharsets; import java.sql.SQLException; +import java.util.Date; import java.util.List; +import java.util.Map; import com.google.common.base.Splitter; import org.apache.logging.log4j.Logger; import org.dspace.AbstractUnitTest; import org.dspace.authorize.AuthorizeException; +import org.dspace.authorize.ResourcePolicy; +import org.dspace.authorize.factory.AuthorizeServiceFactory; +import org.dspace.authorize.service.ResourcePolicyService; import org.dspace.content.Bitstream; import org.dspace.content.Bundle; import org.dspace.content.Collection; @@ -30,6 +37,14 @@ import org.dspace.content.factory.ContentServiceFactory; import org.dspace.content.service.BitstreamFormatService; import org.dspace.content.service.BitstreamService; import org.dspace.content.service.BundleService; +import org.dspace.core.Constants; +import org.dspace.eperson.Group; +import org.dspace.eperson.factory.EPersonServiceFactory; +import org.dspace.eperson.service.GroupService; +import org.joda.time.DateTime; +import org.joda.time.DateTimeZone; +import org.joda.time.MutablePeriod; +import org.joda.time.format.PeriodFormat; import org.junit.After; import org.junit.Before; import org.junit.Test; @@ -52,6 +67,10 @@ public class GoogleMetadataTest extends AbstractUnitTest { private BitstreamService bitstreamService; + private ResourcePolicyService resourcePolicyService; + + private GroupService groupService = EPersonServiceFactory.getInstance().getGroupService(); + private Community community; /** @@ -80,6 +99,8 @@ public class GoogleMetadataTest extends AbstractUnitTest { bundleService = ContentServiceFactory.getInstance().getBundleService(); bitstreamFormatService = ContentServiceFactory.getInstance().getBitstreamFormatService(); bitstreamService = ContentServiceFactory.getInstance().getBitstreamService(); + resourcePolicyService = AuthorizeServiceFactory.getInstance().getResourcePolicyService(); + groupService = EPersonServiceFactory.getInstance().getGroupService(); } catch (AuthorizeException ex) { log.error("Authorization Error in init", ex); fail("Authorization Error in init: " + ex.getMessage()); @@ -326,6 +347,45 @@ public class GoogleMetadataTest extends AbstractUnitTest { assertEquals("small", urlSplitted.get(urlSplitted.size() - 1)); } + /** + * Verify there is no mapping for {@link GoogleMetadata#PDF} if there are only embargoed (non-publically accessible + * bitstream) files + */ + @Test + public void testGetPdfUrlOfEmbargoed() throws Exception { + context.turnOffAuthorisationSystem(); + Bundle bundle = ContentServiceFactory.getInstance().getBundleService().create(context, it, "ORIGINAL"); + + Bitstream b = bitstreamService.create( + context, new ByteArrayInputStream("Larger file than primary".getBytes(StandardCharsets.UTF_8))); + b.setName(context, "first"); + b.setFormat(context, bitstreamFormatService.create(context)); + b.getFormat(context).setMIMEType("unknown"); + bundleService.addBitstream(context, bundle, b); + // Set 3 month embargo on pdf + MutablePeriod period = PeriodFormat.getDefault().parseMutablePeriod("3 months"); + Date embargoDate = DateTime.now(DateTimeZone.UTC).plus(period).toDate(); + Group anonGroup = groupService.findByName(context, Group.ANONYMOUS); + authorizeService.removeAllPolicies(context, b); + resourcePolicyService.removeAllPolicies(context, b); + ResourcePolicy rp = authorizeService.createOrModifyPolicy(null, context, null, anonGroup, + null, embargoDate, Constants.READ, "GoogleMetadataTest", b); + if (rp != null) { + resourcePolicyService.update(context, rp); + } + + GoogleMetadata gm = new GoogleMetadata(this.context, it); + assertTrue(gm.getPDFURL().isEmpty()); + // No value for citation_pdf_url because only one embargoed bitstream + boolean containsPdfUrl = false; + for (Map.Entry mapping: gm.getMappings()) { + if (mapping.getKey().equalsIgnoreCase(gm.PDF)) { + containsPdfUrl = true; + } + } + assertFalse(containsPdfUrl); + } + @After @Override public void destroy() { diff --git a/dspace-api/src/test/java/org/dspace/app/util/RegexPatternUtilsTest.java b/dspace-api/src/test/java/org/dspace/app/util/RegexPatternUtilsTest.java new file mode 100644 index 0000000000..30a9100ad4 --- /dev/null +++ b/dspace-api/src/test/java/org/dspace/app/util/RegexPatternUtilsTest.java @@ -0,0 +1,214 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.util; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertNull; +import static org.junit.Assert.assertThrows; +import static org.junit.Assert.assertTrue; + +import java.util.regex.Matcher; +import java.util.regex.Pattern; +import java.util.regex.PatternSyntaxException; + +import org.dspace.AbstractUnitTest; +import org.junit.Test; + +/** + * Tests for RegexPatternUtils + * + * @author Vincenzo Mecca (vins01-4science - vincenzo.mecca at 4science.com) + * + */ +public class RegexPatternUtilsTest extends AbstractUnitTest { + + @Test + public void testValidRegexWithFlag() { + final String insensitiveWord = "/[a-z]+/i"; + Pattern computePattern = Pattern.compile(insensitiveWord); + assertNotNull(computePattern); + + Matcher matcher = computePattern.matcher("Hello"); + assertFalse(matcher.matches()); + matcher = computePattern.matcher("DSpace"); + assertFalse(matcher.matches()); + matcher = computePattern.matcher("Community"); + assertFalse(matcher.matches()); + matcher = computePattern.matcher("/wrongpattern/i"); + assertTrue(matcher.matches()); + matcher = computePattern.matcher("001"); + assertFalse(matcher.matches()); + matcher = computePattern.matcher("?/'`}{][<>.,"); + assertFalse(matcher.matches()); + computePattern = RegexPatternUtils.computePattern(insensitiveWord); + assertNotNull(computePattern); + + matcher = computePattern.matcher("Hello"); + assertTrue(matcher.matches()); + matcher = computePattern.matcher("DSpace"); + assertTrue(matcher.matches()); + matcher = computePattern.matcher("Community"); + assertTrue(matcher.matches()); + matcher = computePattern.matcher("/wrong-pattern/i"); + assertFalse(matcher.matches()); + matcher = computePattern.matcher("001"); + assertFalse(matcher.matches()); + matcher = computePattern.matcher("?/'`}{][<>.,"); + assertFalse(matcher.matches()); + } + + @Test + public void testRegexWithoutFlag() { + final String sensitiveWord = "[a-z]+"; + Pattern computePattern = RegexPatternUtils.computePattern(sensitiveWord); + assertNotNull(computePattern); + + Matcher matcher = computePattern.matcher("hello"); + assertTrue(matcher.matches()); + matcher = computePattern.matcher("dspace"); + assertTrue(matcher.matches()); + matcher = computePattern.matcher("community"); + assertTrue(matcher.matches()); + matcher = computePattern.matcher("Hello"); + assertFalse(matcher.matches()); + matcher = computePattern.matcher("DSpace"); + assertFalse(matcher.matches()); + matcher = computePattern.matcher("Community"); + assertFalse(matcher.matches()); + matcher = computePattern.matcher("/wrongpattern/i"); + assertFalse(matcher.matches()); + matcher = computePattern.matcher("001"); + assertFalse(matcher.matches()); + matcher = computePattern.matcher("?/'`}{][<>.,"); + assertFalse(matcher.matches()); + + final String sensitiveWordWithDelimiter = "/[a-z]+/"; + computePattern = RegexPatternUtils.computePattern(sensitiveWordWithDelimiter); + assertNotNull(computePattern); + + matcher = computePattern.matcher("hello"); + assertTrue(matcher.matches()); + matcher = computePattern.matcher("dspace"); + assertTrue(matcher.matches()); + matcher = computePattern.matcher("community"); + assertTrue(matcher.matches()); + matcher = computePattern.matcher("Hello"); + assertFalse(matcher.matches()); + matcher = computePattern.matcher("DSpace"); + assertFalse(matcher.matches()); + matcher = computePattern.matcher("Community"); + assertFalse(matcher.matches()); + matcher = computePattern.matcher("/wrongpattern/i"); + assertFalse(matcher.matches()); + matcher = computePattern.matcher("001"); + assertFalse(matcher.matches()); + matcher = computePattern.matcher("?/'`}{][<>.,"); + assertFalse(matcher.matches()); + } + + @Test + public void testWithFuzzyRegex() { + String fuzzyRegex = "/[a-z]+"; + Pattern computePattern = RegexPatternUtils.computePattern(fuzzyRegex); + assertNotNull(computePattern); + + Matcher matcher = computePattern.matcher("/hello"); + assertTrue(matcher.matches()); + matcher = computePattern.matcher("hello"); + assertFalse(matcher.matches()); + matcher = computePattern.matcher("Hello"); + assertFalse(matcher.matches()); + + fuzzyRegex = "[a-z]+/"; + computePattern = RegexPatternUtils.computePattern(fuzzyRegex); + matcher = computePattern.matcher("hello/"); + assertTrue(matcher.matches()); + matcher = computePattern.matcher("/hello"); + assertFalse(matcher.matches()); + matcher = computePattern.matcher("hello"); + assertFalse(matcher.matches()); + matcher = computePattern.matcher("Hello"); + assertFalse(matcher.matches()); + + // equals to pattern \\[a-z]+\\ -> searching for a word delimited by '\' + fuzzyRegex = "\\\\[a-z]+\\\\"; + computePattern = RegexPatternUtils.computePattern(fuzzyRegex); + // equals to '\hello\' + matcher = computePattern.matcher("\\hello\\"); + assertTrue(matcher.matches()); + matcher = computePattern.matcher("/hello"); + assertFalse(matcher.matches()); + matcher = computePattern.matcher("hello"); + assertFalse(matcher.matches()); + matcher = computePattern.matcher("Hello"); + assertFalse(matcher.matches()); + + // equals to pattern /[a-z]+/ -> searching for a string delimited by '/' + fuzzyRegex = "\\/[a-z]+\\/"; + computePattern = RegexPatternUtils.computePattern(fuzzyRegex); + matcher = computePattern.matcher("/hello/"); + assertTrue(matcher.matches()); + matcher = computePattern.matcher("/hello"); + assertFalse(matcher.matches()); + matcher = computePattern.matcher("hello"); + assertFalse(matcher.matches()); + matcher = computePattern.matcher("Hello"); + assertFalse(matcher.matches()); + } + + @Test + public void testInvalidRegex() { + String invalidSensitive = "[a-z+"; + assertThrows(PatternSyntaxException.class, () -> RegexPatternUtils.computePattern(invalidSensitive)); + + String invalidRange = "a{1-"; + assertThrows(PatternSyntaxException.class, () -> RegexPatternUtils.computePattern(invalidRange)); + + String invalidGroupPattern = "(abc"; + assertThrows(PatternSyntaxException.class, () -> RegexPatternUtils.computePattern(invalidGroupPattern)); + + String emptyPattern = ""; + Pattern computePattern = RegexPatternUtils.computePattern(emptyPattern); + assertNull(computePattern); + + String blankPattern = " "; + computePattern = RegexPatternUtils.computePattern(blankPattern); + assertNull(computePattern); + + String nullPattern = null; + computePattern = RegexPatternUtils.computePattern(nullPattern); + assertNull(computePattern); + } + + @Test + public void testMultiFlagRegex() { + String multilineSensitive = "/[a-z]+/gi"; + Pattern computePattern = RegexPatternUtils.computePattern(multilineSensitive); + assertNotNull(computePattern); + Matcher matcher = computePattern.matcher("hello"); + assertTrue(matcher.matches()); + matcher = computePattern.matcher("Hello"); + assertTrue(matcher.matches()); + + multilineSensitive = "/[a-z]+/gim"; + computePattern = RegexPatternUtils.computePattern(multilineSensitive); + assertNotNull(computePattern); + matcher = computePattern.matcher("Hello" + System.lineSeparator() + "Everyone"); + assertTrue(matcher.find()); + assertEquals("Hello", matcher.group()); + assertTrue(matcher.find()); + assertEquals("Everyone", matcher.group()); + + matcher = computePattern.matcher("hello"); + assertTrue(matcher.matches()); + matcher = computePattern.matcher("HELLO"); + assertTrue(matcher.matches()); + } +} diff --git a/dspace-api/src/test/java/org/dspace/app/util/SubmissionConfigTest.java b/dspace-api/src/test/java/org/dspace/app/util/SubmissionConfigTest.java new file mode 100644 index 0000000000..be4d6a12da --- /dev/null +++ b/dspace-api/src/test/java/org/dspace/app/util/SubmissionConfigTest.java @@ -0,0 +1,88 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.util; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotNull; + +import java.util.ArrayList; +import java.util.List; + +import org.dspace.AbstractUnitTest; +import org.junit.After; +import org.junit.AfterClass; +import org.junit.Before; +import org.junit.BeforeClass; +import org.junit.Test; + +/** + * Tests for parsing and utilities on submission config forms / readers + * + * @author Kim Shepherd + */ +public class SubmissionConfigTest extends AbstractUnitTest { + + DCInputsReader inputReader; + + @BeforeClass + public static void setUpClass() { + } + + @AfterClass + public static void tearDownClass() { + } + + @Before + public void setUp() throws DCInputsReaderException { + inputReader = new DCInputsReader(); + } + + @After + public void tearDown() { + inputReader = null; + } + + @Test + public void testReadAndProcessTypeBindSubmissionConfig() + throws SubmissionConfigReaderException, DCInputsReaderException { + // Set up test data. This should match the typebind test submission / form config + String typeBindHandle = "123456789/typebind-test"; + String typeBindSubmissionName = "typebindtest"; + String typeBindSubmissionStepName = "typebindtest"; + + // Expected field lists from typebindtest form + List allConfiguredFields = new ArrayList<>(); + allConfiguredFields.add("dc.title"); + allConfiguredFields.add("dc.date.issued"); + allConfiguredFields.add("dc.type"); + allConfiguredFields.add("dc.identifier.isbn"); + List unboundFields = allConfiguredFields.subList(0, 3); + + // Get submission configuration + SubmissionConfig submissionConfig = + new SubmissionConfigReader().getSubmissionConfigByCollection(typeBindHandle); + // Submission name should match name defined in item-submission.xml + assertEquals(typeBindSubmissionName, submissionConfig.getSubmissionName()); + // Step 0 - our process only has one step. It should not be null and have the ID typebindtest + SubmissionStepConfig submissionStepConfig = submissionConfig.getStep(0); + assertNotNull(submissionStepConfig); + assertEquals(typeBindSubmissionStepName, submissionStepConfig.getId()); + // Get inputs and allowed fields + DCInputSet inputConfig = inputReader.getInputsByFormName(submissionStepConfig.getId()); + List allowedFieldsForBook = inputConfig.populateAllowedFieldNames("Book"); + List allowedFieldsForBookChapter = inputConfig.populateAllowedFieldNames("Book chapter"); + List allowedFieldsForArticle = inputConfig.populateAllowedFieldNames("Article"); + List allowedFieldsForNoType = inputConfig.populateAllowedFieldNames(null); + // Book and book chapter should be allowed all 5 fields (each is bound to dc.identifier.isbn) + assertEquals(allConfiguredFields, allowedFieldsForBook); + assertEquals(allConfiguredFields, allowedFieldsForBookChapter); + // Article and type should match a subset of the fields without ISBN + assertEquals(unboundFields, allowedFieldsForArticle); + assertEquals(unboundFields, allowedFieldsForNoType); + } +} diff --git a/dspace-api/src/test/java/org/dspace/authorize/AuthorizeServiceTest.java b/dspace-api/src/test/java/org/dspace/authorize/AuthorizeServiceTest.java index 46435ec8f1..70eaa2a0b9 100644 --- a/dspace-api/src/test/java/org/dspace/authorize/AuthorizeServiceTest.java +++ b/dspace-api/src/test/java/org/dspace/authorize/AuthorizeServiceTest.java @@ -27,7 +27,7 @@ import org.junit.Assert; import org.junit.Test; /** - * Created by pbecker as he wanted to write a test against DS-3572. + * Created by pbecker to write a test against DS-3572. * This definitely needs to be extended, but it's at least a start. */ public class AuthorizeServiceTest extends AbstractUnitTest { @@ -80,7 +80,7 @@ public class AuthorizeServiceTest extends AbstractUnitTest { } try { - // eperson1 should be able to write as he is member of a group that has write permissions + // eperson1 should be able to write as it is a member of a group that has write permissions Assert.assertTrue(authorizeService.authorizeActionBoolean(context, eperson1, dso, Constants.WRITE, true)); // person2 shouldn't have write access Assert.assertFalse(authorizeService.authorizeActionBoolean(context, eperson2, dso, Constants.WRITE, true)); diff --git a/dspace-api/src/test/java/org/dspace/authorize/RegexPasswordValidatorTest.java b/dspace-api/src/test/java/org/dspace/authorize/RegexPasswordValidatorTest.java new file mode 100644 index 0000000000..df333fa500 --- /dev/null +++ b/dspace-api/src/test/java/org/dspace/authorize/RegexPasswordValidatorTest.java @@ -0,0 +1,84 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.authorize; + +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.is; +import static org.mockito.Mockito.when; + +import org.dspace.AbstractIntegrationTest; +import org.dspace.services.ConfigurationService; +import org.junit.Before; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.mockito.InjectMocks; +import org.mockito.Mock; +import org.mockito.junit.MockitoJUnitRunner; + +/** + * Unit tests for {@link RegexPasswordValidator}. + * + * @author Luca Giamminonni (luca.giamminonni at 4science.it) + */ +@RunWith(MockitoJUnitRunner.class) +public class RegexPasswordValidatorTest extends AbstractIntegrationTest { + + @Mock + private ConfigurationService configurationService; + + @InjectMocks + private RegexPasswordValidator regexPasswordValidator; + + @Before + public void setup() { + when(configurationService.getProperty("authentication-password.regex-validation.pattern")) + .thenReturn("^(?=.*[a-z])(?=.*[A-Z])(?=.*\\d)(?=.*[^\\da-zA-Z]).{8,15}$"); + } + + @Test + public void testValidPassword() { + assertThat(regexPasswordValidator.isPasswordValid("TestPassword01!"), is(true)); + } + + @Test + public void testInvalidPasswordForMissingSpecialCharacter() { + assertThat(regexPasswordValidator.isPasswordValid("TestPassword01"), is(false)); + assertThat(regexPasswordValidator.isPasswordValid("TestPassword01?"), is(true)); + } + + @Test + public void testInvalidPasswordForMissingNumber() { + assertThat(regexPasswordValidator.isPasswordValid("TestPassword!"), is(false)); + assertThat(regexPasswordValidator.isPasswordValid("TestPassword1!"), is(true)); + } + + @Test + public void testInvalidPasswordForMissingUppercaseCharacter() { + assertThat(regexPasswordValidator.isPasswordValid("testpassword01!"), is(false)); + assertThat(regexPasswordValidator.isPasswordValid("testPassword01!"), is(true)); + } + + @Test + public void testInvalidPasswordForMissingLowercaseCharacter() { + assertThat(regexPasswordValidator.isPasswordValid("TESTPASSWORD01!"), is(false)); + assertThat(regexPasswordValidator.isPasswordValid("TESTPASSWORd01!"), is(true)); + } + + @Test + public void testInvalidPasswordForTooShortValue() { + assertThat(regexPasswordValidator.isPasswordValid("Test01!"), is(false)); + assertThat(regexPasswordValidator.isPasswordValid("Test012!"), is(true)); + } + + @Test + public void testInvalidPasswordForTooLongValue() { + assertThat(regexPasswordValidator.isPasswordValid("ThisIsAVeryLongPassword01!"), is(false)); + assertThat(regexPasswordValidator.isPasswordValid("IsAPassword012!"), is(true)); + } + +} \ No newline at end of file diff --git a/dspace-api/src/test/java/org/dspace/builder/AbstractBuilder.java b/dspace-api/src/test/java/org/dspace/builder/AbstractBuilder.java index 06deacaca4..3306ced8f4 100644 --- a/dspace-api/src/test/java/org/dspace/builder/AbstractBuilder.java +++ b/dspace-api/src/test/java/org/dspace/builder/AbstractBuilder.java @@ -42,6 +42,10 @@ import org.dspace.eperson.factory.EPersonServiceFactory; import org.dspace.eperson.service.EPersonService; import org.dspace.eperson.service.GroupService; import org.dspace.eperson.service.RegistrationDataService; +import org.dspace.orcid.factory.OrcidServiceFactory; +import org.dspace.orcid.service.OrcidHistoryService; +import org.dspace.orcid.service.OrcidQueueService; +import org.dspace.orcid.service.OrcidTokenService; import org.dspace.scripts.factory.ScriptServiceFactory; import org.dspace.scripts.service.ProcessService; import org.dspace.services.factory.DSpaceServicesFactory; @@ -95,6 +99,9 @@ public abstract class AbstractBuilder { static ProcessService processService; static RequestItemService requestItemService; static VersioningService versioningService; + static OrcidHistoryService orcidHistoryService; + static OrcidQueueService orcidQueueService; + static OrcidTokenService orcidTokenService; protected Context context; @@ -151,6 +158,9 @@ public abstract class AbstractBuilder { inProgressUserService = XmlWorkflowServiceFactory.getInstance().getInProgressUserService(); poolTaskService = XmlWorkflowServiceFactory.getInstance().getPoolTaskService(); workflowItemRoleService = XmlWorkflowServiceFactory.getInstance().getWorkflowItemRoleService(); + orcidHistoryService = OrcidServiceFactory.getInstance().getOrcidHistoryService(); + orcidQueueService = OrcidServiceFactory.getInstance().getOrcidQueueService(); + orcidTokenService = OrcidServiceFactory.getInstance().getOrcidTokenService(); } @@ -183,6 +193,7 @@ public abstract class AbstractBuilder { processService = null; requestItemService = null; versioningService = null; + orcidTokenService = null; } diff --git a/dspace-api/src/test/java/org/dspace/builder/AbstractDSpaceObjectBuilder.java b/dspace-api/src/test/java/org/dspace/builder/AbstractDSpaceObjectBuilder.java index a2a8aa9d42..ff1083d318 100644 --- a/dspace-api/src/test/java/org/dspace/builder/AbstractDSpaceObjectBuilder.java +++ b/dspace-api/src/test/java/org/dspace/builder/AbstractDSpaceObjectBuilder.java @@ -63,7 +63,7 @@ public abstract class AbstractDSpaceObjectBuilder final String qualifier, final String value) { try { - getService().addMetadata(context, dso, schema, element, qualifier, Item.ANY, value); + getService().addMetadata(context, dso, schema, element, qualifier, null, value); } catch (Exception e) { return handleException(e); } diff --git a/dspace-api/src/test/java/org/dspace/builder/BitstreamBuilder.java b/dspace-api/src/test/java/org/dspace/builder/BitstreamBuilder.java index f98befe57f..424833e5cc 100644 --- a/dspace-api/src/test/java/org/dspace/builder/BitstreamBuilder.java +++ b/dspace-api/src/test/java/org/dspace/builder/BitstreamBuilder.java @@ -18,6 +18,7 @@ import org.dspace.content.BitstreamFormat; import org.dspace.content.Bundle; import org.dspace.content.Item; import org.dspace.content.service.DSpaceObjectService; +import org.dspace.core.Constants; import org.dspace.core.Context; import org.dspace.eperson.Group; @@ -26,8 +27,6 @@ import org.dspace.eperson.Group; */ public class BitstreamBuilder extends AbstractDSpaceObjectBuilder { - public static final String ORIGINAL = "ORIGINAL"; - private Bitstream bitstream; private Item item; private Group readerGroup; @@ -158,12 +157,12 @@ public class BitstreamBuilder extends AbstractDSpaceObjectBuilder { } private Bundle getOriginalBundle(Item item) throws SQLException, AuthorizeException { - List bundles = itemService.getBundles(item, ORIGINAL); + List bundles = itemService.getBundles(item, Constants.CONTENT_BUNDLE_NAME); Bundle targetBundle = null; if (bundles.size() < 1) { // not found, create a new one - targetBundle = bundleService.create(context, item, ORIGINAL); + targetBundle = bundleService.create(context, item, Constants.CONTENT_BUNDLE_NAME); } else { // put bitstreams into first bundle targetBundle = bundles.iterator().next(); @@ -206,6 +205,7 @@ public class BitstreamBuilder extends AbstractDSpaceObjectBuilder { @Override public void cleanup() throws Exception { try (Context c = new Context()) { + c.setDispatcher("noindex"); c.turnOffAuthorisationSystem(); // Ensure object and any related objects are reloaded before checking to see what needs cleanup bitstream = c.reloadEntity(bitstream); diff --git a/dspace-api/src/test/java/org/dspace/builder/BitstreamFormatBuilder.java b/dspace-api/src/test/java/org/dspace/builder/BitstreamFormatBuilder.java index 1051712326..a13783ceef 100644 --- a/dspace-api/src/test/java/org/dspace/builder/BitstreamFormatBuilder.java +++ b/dspace-api/src/test/java/org/dspace/builder/BitstreamFormatBuilder.java @@ -34,6 +34,7 @@ public class BitstreamFormatBuilder extends AbstractCRUDBuilder @Override public void cleanup() throws Exception { try (Context c = new Context()) { + c.setDispatcher("noindex"); c.turnOffAuthorisationSystem(); // Ensure object and any related objects are reloaded before checking to see what needs cleanup bitstreamFormat = c.reloadEntity(bitstreamFormat); diff --git a/dspace-api/src/test/java/org/dspace/builder/BundleBuilder.java b/dspace-api/src/test/java/org/dspace/builder/BundleBuilder.java index 614cd54c6d..1776921ac6 100644 --- a/dspace-api/src/test/java/org/dspace/builder/BundleBuilder.java +++ b/dspace-api/src/test/java/org/dspace/builder/BundleBuilder.java @@ -55,6 +55,7 @@ public class BundleBuilder extends AbstractDSpaceObjectBuilder { @Override public void cleanup() throws Exception { try (Context c = new Context()) { + c.setDispatcher("noindex"); c.turnOffAuthorisationSystem(); // Ensure object and any related objects are reloaded before checking to see what needs cleanup bundle = c.reloadEntity(bundle); diff --git a/dspace-api/src/test/java/org/dspace/builder/ClaimedTaskBuilder.java b/dspace-api/src/test/java/org/dspace/builder/ClaimedTaskBuilder.java index 63c03c4a91..aed712f2d2 100644 --- a/dspace-api/src/test/java/org/dspace/builder/ClaimedTaskBuilder.java +++ b/dspace-api/src/test/java/org/dspace/builder/ClaimedTaskBuilder.java @@ -124,6 +124,7 @@ public class ClaimedTaskBuilder extends AbstractBuilder { @Override public void cleanup() throws Exception { try (Context c = new Context()) { + c.setDispatcher("noindex"); c.turnOffAuthorisationSystem(); // Ensure object and any related objects are reloaded before checking to see what needs cleanup collection = c.reloadEntity(collection); diff --git a/dspace-api/src/test/java/org/dspace/builder/CommunityBuilder.java b/dspace-api/src/test/java/org/dspace/builder/CommunityBuilder.java index 5ba36af8f4..a01aef8498 100644 --- a/dspace-api/src/test/java/org/dspace/builder/CommunityBuilder.java +++ b/dspace-api/src/test/java/org/dspace/builder/CommunityBuilder.java @@ -116,6 +116,7 @@ public class CommunityBuilder extends AbstractDSpaceObjectBuilder { @Override public void cleanup() throws Exception { try (Context c = new Context()) { + c.setDispatcher("noindex"); c.turnOffAuthorisationSystem(); // Ensure object and any related objects are reloaded before checking to see what needs cleanup community = c.reloadEntity(community); diff --git a/dspace-api/src/test/java/org/dspace/builder/EPersonBuilder.java b/dspace-api/src/test/java/org/dspace/builder/EPersonBuilder.java index 2010aef2c1..a28462eea7 100644 --- a/dspace-api/src/test/java/org/dspace/builder/EPersonBuilder.java +++ b/dspace-api/src/test/java/org/dspace/builder/EPersonBuilder.java @@ -32,6 +32,7 @@ public class EPersonBuilder extends AbstractDSpaceObjectBuilder { @Override public void cleanup() throws Exception { try (Context c = new Context()) { + c.setDispatcher("noindex"); c.turnOffAuthorisationSystem(); // Ensure object and any related objects are reloaded before checking to see what needs cleanup ePerson = c.reloadEntity(ePerson); @@ -128,6 +129,16 @@ public class EPersonBuilder extends AbstractDSpaceObjectBuilder { return this; } + public EPersonBuilder withOrcid(final String orcid) { + setMetadataSingleValue(ePerson, "eperson", "orcid", null, orcid); + return this; + } + + public EPersonBuilder withOrcidScope(final String scope) { + addMetadataValue(ePerson, "eperson", "orcid", "scope", scope); + return this; + } + public static void deleteEPerson(UUID uuid) throws SQLException, IOException { try (Context c = new Context()) { c.turnOffAuthorisationSystem(); diff --git a/dspace-api/src/test/java/org/dspace/builder/EntityTypeBuilder.java b/dspace-api/src/test/java/org/dspace/builder/EntityTypeBuilder.java index ef3c840bc2..36d9654adf 100644 --- a/dspace-api/src/test/java/org/dspace/builder/EntityTypeBuilder.java +++ b/dspace-api/src/test/java/org/dspace/builder/EntityTypeBuilder.java @@ -36,6 +36,7 @@ public class EntityTypeBuilder extends AbstractBuilder { @Override public void cleanup() throws Exception { try (Context c = new Context()) { + c.setDispatcher("noindex"); c.turnOffAuthorisationSystem(); // Ensure object and any related objects are reloaded before checking to see what needs cleanup group = c.reloadEntity(group); diff --git a/dspace-api/src/test/java/org/dspace/builder/ItemBuilder.java b/dspace-api/src/test/java/org/dspace/builder/ItemBuilder.java index f00104014d..70dea309f2 100644 --- a/dspace-api/src/test/java/org/dspace/builder/ItemBuilder.java +++ b/dspace-api/src/test/java/org/dspace/builder/ItemBuilder.java @@ -7,6 +7,10 @@ */ package org.dspace.builder; +import static org.dspace.content.LicenseUtils.getLicenseText; +import static org.dspace.content.MetadataSchemaEnum.DC; +import static org.dspace.content.authority.Choices.CF_ACCEPTED; + import java.io.IOException; import java.sql.SQLException; import java.util.UUID; @@ -15,12 +19,16 @@ import org.dspace.authorize.AuthorizeException; import org.dspace.content.Collection; import org.dspace.content.DCDate; import org.dspace.content.Item; +import org.dspace.content.LicenseUtils; import org.dspace.content.MetadataSchemaEnum; import org.dspace.content.WorkspaceItem; import org.dspace.content.service.DSpaceObjectService; import org.dspace.core.Context; import org.dspace.eperson.EPerson; import org.dspace.eperson.Group; +import org.dspace.profile.OrcidEntitySyncPreference; +import org.dspace.profile.OrcidProfileSyncPreference; +import org.dspace.profile.OrcidSynchronizationMode; /** * Builder to construct Item objects @@ -31,6 +39,7 @@ import org.dspace.eperson.Group; public class ItemBuilder extends AbstractDSpaceObjectBuilder { private boolean withdrawn = false; + private String handle = null; private WorkspaceItem workspaceItem; private Item item; private Group readerGroup = null; @@ -48,7 +57,7 @@ public class ItemBuilder extends AbstractDSpaceObjectBuilder { this.context = context; try { - workspaceItem = workspaceItemService.create(context, col, false); + workspaceItem = workspaceItemService.create(context, col, true); item = workspaceItem.getItem(); } catch (Exception e) { return handleException(e); @@ -73,11 +82,48 @@ public class ItemBuilder extends AbstractDSpaceObjectBuilder { public ItemBuilder withAuthor(final String authorName) { return addMetadataValue(item, MetadataSchemaEnum.DC.getName(), "contributor", "author", authorName); } + public ItemBuilder withAuthor(final String authorName, final String authority, final int confidence) { return addMetadataValue(item, MetadataSchemaEnum.DC.getName(), "contributor", "author", null, authorName, authority, confidence); } + public ItemBuilder withEditor(final String editorName) { + return addMetadataValue(item, MetadataSchemaEnum.DC.getName(), "contributor", "editor", editorName); + } + + public ItemBuilder withDescriptionAbstract(String description) { + return addMetadataValue(item, MetadataSchemaEnum.DC.getName(), "description", "abstract", description); + } + + public ItemBuilder withLanguage(String language) { + return addMetadataValue(item, "dc", "language", "iso", language); + } + + public ItemBuilder withIsPartOf(String isPartOf) { + return addMetadataValue(item, "dc", "relation", "ispartof", isPartOf); + } + + public ItemBuilder withDoiIdentifier(String doi) { + return addMetadataValue(item, "dc", "identifier", "doi", doi); + } + + public ItemBuilder withScopusIdentifier(String scopus) { + return addMetadataValue(item, "dc", "identifier", "scopus", scopus); + } + + public ItemBuilder withRelationFunding(String funding) { + return addMetadataValue(item, "dc", "relation", "funding", funding); + } + + public ItemBuilder withRelationFunding(String funding, String authority) { + return addMetadataValue(item, DC.getName(), "relation", "funding", null, funding, authority, 600); + } + + public ItemBuilder withRelationGrantno(String grantno) { + return addMetadataValue(item, "dc", "relation", "grantno", grantno); + } + public ItemBuilder withPersonIdentifierFirstName(final String personIdentifierFirstName) { return addMetadataValue(item, "person", "givenName", null, personIdentifierFirstName); } @@ -144,11 +190,96 @@ public class ItemBuilder extends AbstractDSpaceObjectBuilder { return addMetadataValue(item, schema, element, qualifier, value); } + public ItemBuilder withDspaceObjectOwner(String value, String authority) { + return addMetadataValue(item, "dspace", "object", "owner", null, value, authority, CF_ACCEPTED); + } + + public ItemBuilder withOrcidIdentifier(String orcid) { + return addMetadataValue(item, "person", "identifier", "orcid", orcid); + } + + public ItemBuilder withOrcidAccessToken(String accessToken, EPerson owner) { + + try { + + OrcidTokenBuilder.create(context, owner, accessToken) + .withProfileItem(item) + .build(); + + } catch (SQLException | AuthorizeException e) { + throw new RuntimeException(e); + } + + return this; + + } + + public ItemBuilder withOrcidAuthenticated(String authenticated) { + return addMetadataValue(item, "dspace", "orcid", "authenticated", authenticated); + } + + public ItemBuilder withOrcidSynchronizationPublicationsPreference(OrcidEntitySyncPreference value) { + return withOrcidSynchronizationPublicationsPreference(value.name()); + } + + public ItemBuilder withOrcidSynchronizationPublicationsPreference(String value) { + return setMetadataSingleValue(item, "dspace", "orcid", "sync-publications", value); + } + + public ItemBuilder withOrcidSynchronizationFundingsPreference(OrcidEntitySyncPreference value) { + return withOrcidSynchronizationFundingsPreference(value.name()); + } + + public ItemBuilder withOrcidSynchronizationFundingsPreference(String value) { + return setMetadataSingleValue(item, "dspace", "orcid", "sync-fundings", value); + } + + public ItemBuilder withOrcidSynchronizationProfilePreference(OrcidProfileSyncPreference value) { + return withOrcidSynchronizationProfilePreference(value.name()); + } + + public ItemBuilder withOrcidSynchronizationProfilePreference(String value) { + return addMetadataValue(item, "dspace", "orcid", "sync-profile", value); + } + + public ItemBuilder withOrcidSynchronizationMode(OrcidSynchronizationMode mode) { + return withOrcidSynchronizationMode(mode.name()); + } + + private ItemBuilder withOrcidSynchronizationMode(String mode) { + return setMetadataSingleValue(item, "dspace", "orcid", "sync-mode", mode); + } + + public ItemBuilder withPersonCountry(String country) { + return addMetadataValue(item, "person", "country", null, country); + } + + public ItemBuilder withScopusAuthorIdentifier(String id) { + return addMetadataValue(item, "person", "identifier", "scopus-author-id", id); + } + + public ItemBuilder withResearcherIdentifier(String rid) { + return addMetadataValue(item, "person", "identifier", "rid", rid); + } + + public ItemBuilder withVernacularName(String vernacularName) { + return setMetadataSingleValue(item, "person", "name", "translated", vernacularName); + } + + public ItemBuilder withVariantName(String variant) { + return addMetadataValue(item, "person", "name", "variant", variant); + } + public ItemBuilder makeUnDiscoverable() { item.setDiscoverable(false); return this; } + public ItemBuilder withHandle(String handle) { + this.handle = handle; + return this; + } + /** * Withdrawn the item under build. Please note that an user need to be loggedin the context to avoid NPE during the * creation of the provenance metadata @@ -169,10 +300,62 @@ public class ItemBuilder extends AbstractDSpaceObjectBuilder { return this; } + public ItemBuilder withOrgUnitLegalName(String name) { + return addMetadataValue(item, "organization", "legalName", null, name); + } + + public ItemBuilder withOrgUnitCountry(String addressCountry) { + return addMetadataValue(item, "organization", "address", "addressCountry", addressCountry); + } + + public ItemBuilder withOrgUnitLocality(String addressLocality) { + return addMetadataValue(item, "organization", "address", "addressLocality", addressLocality); + } + + public ItemBuilder withOrgUnitCrossrefIdentifier(String crossrefid) { + return addMetadataValue(item, "organization", "identifier", "crossrefid", crossrefid); + } + + public ItemBuilder withProjectStartDate(String startDate) { + return addMetadataValue(item, "project", "startDate", null, startDate); + } + + public ItemBuilder withProjectEndDate(String endDate) { + return addMetadataValue(item, "project", "endDate", null, endDate); + } + + public ItemBuilder withProjectInvestigator(String investigator) { + return addMetadataValue(item, "project", "investigator", null, investigator); + } + + public ItemBuilder withDescription(String description) { + return addMetadataValue(item, MetadataSchemaEnum.DC.getName(), "description", null, description); + } + + public ItemBuilder withProjectAmount(String amount) { + return addMetadataValue(item, "project", "amount", null, amount); + } + + public ItemBuilder withProjectAmountCurrency(String currency) { + return addMetadataValue(item, "project", "amount", "currency", currency); + } + + public ItemBuilder withUriIdentifier(String uri) { + return addMetadataValue(item, "dc", "identifier", "uri", uri); + } + + public ItemBuilder withIdentifier(String identifier) { + return addMetadataValue(item, "dc", "identifier", null, identifier); + } + + public ItemBuilder withOtherIdentifier(String identifier) { + return addMetadataValue(item, "dc", "identifier", "other", identifier); + } + /** * Create an admin group for the collection with the specified members * - * @param members epersons to add to the admin group + * @param ePerson epersons to add to the admin group * @return this builder * @throws SQLException * @throws AuthorizeException @@ -181,11 +364,14 @@ public class ItemBuilder extends AbstractDSpaceObjectBuilder { return setAdminPermission(item, ePerson, null); } + public ItemBuilder withPersonEmail(String email) { + return addMetadataValue(item, "person", "email", null, email); + } @Override public Item build() { try { - installItemService.installItem(context, workspaceItem); + installItemService.installItem(context, workspaceItem, this.handle); itemService.update(context, item); //Check if we need to make this item private. This has to be done after item install. @@ -209,13 +395,19 @@ public class ItemBuilder extends AbstractDSpaceObjectBuilder { @Override public void cleanup() throws Exception { try (Context c = new Context()) { + c.setDispatcher("noindex"); c.turnOffAuthorisationSystem(); + // If the workspaceItem used to create this item still exists, delete it + workspaceItem = c.reloadEntity(workspaceItem); + if (workspaceItem != null) { + workspaceItemService.deleteAll(c, workspaceItem); + } // Ensure object and any related objects are reloaded before checking to see what needs cleanup item = c.reloadEntity(item); if (item != null) { delete(c, item); - c.complete(); } + c.complete(); } } @@ -245,4 +437,17 @@ public class ItemBuilder extends AbstractDSpaceObjectBuilder { } } + public ItemBuilder grantLicense() { + String license; + try { + EPerson submitter = workspaceItem.getSubmitter(); + submitter = context.reloadEntity(submitter); + license = getLicenseText(context.getCurrentLocale(), workspaceItem.getCollection(), item, submitter); + LicenseUtils.grantLicense(context, item, license, null); + } catch (Exception e) { + handleException(e); + } + return this; + } + } diff --git a/dspace-api/src/test/java/org/dspace/builder/MetadataFieldBuilder.java b/dspace-api/src/test/java/org/dspace/builder/MetadataFieldBuilder.java index dfc9112a3f..52acf9d5ed 100644 --- a/dspace-api/src/test/java/org/dspace/builder/MetadataFieldBuilder.java +++ b/dspace-api/src/test/java/org/dspace/builder/MetadataFieldBuilder.java @@ -38,6 +38,7 @@ public class MetadataFieldBuilder extends AbstractBuilder { + + private static final Logger log = Logger.getLogger(OrcidHistoryBuilder.class); + + private OrcidHistory orcidHistory; + + protected OrcidHistoryBuilder(Context context) { + super(context); + } + + @Override + protected OrcidHistoryService getService() { + return orcidHistoryService; + } + + @Override + public void cleanup() throws Exception { + delete(orcidHistory); + } + + public static OrcidHistoryBuilder createOrcidHistory(Context context, Item profileItem, Item entity) { + OrcidHistoryBuilder builder = new OrcidHistoryBuilder(context); + return builder.create(context, profileItem, entity); + } + + private OrcidHistoryBuilder create(Context context, Item profileItem, Item entity) { + try { + this.context = context; + this.orcidHistory = getService().create(context, profileItem, entity); + } catch (Exception e) { + log.error("Error in OrcidHistoryBuilder.create(..), error: ", e); + } + return this; + } + + @Override + public OrcidHistory build() throws SQLException { + try { + getService().update(context, orcidHistory); + context.dispatchEvents(); + + indexingService.commit(); + } catch (Exception e) { + log.error("Error in OrcidHistoryBuilder.build(), error: ", e); + } + return orcidHistory; + } + + @Override + public void delete(Context c, OrcidHistory orcidHistory) throws Exception { + if (orcidHistory != null) { + getService().delete(c, orcidHistory); + } + } + + /** + * Delete the Test OrcidHistory referred to by the given ID + * + * @param id Integer of Test OrcidHistory to delete + * @throws SQLException + * @throws IOException + */ + public static void deleteOrcidHistory(Integer id) throws SQLException, IOException { + if (id == null) { + return; + } + + try (Context c = new Context()) { + OrcidHistory orcidHistory = orcidHistoryService.find(c, id); + if (orcidHistory != null) { + orcidHistoryService.delete(c, orcidHistory); + } + c.complete(); + } + } + + public void delete(OrcidHistory orcidHistory) throws Exception { + try (Context c = new Context()) { + c.turnOffAuthorisationSystem(); + OrcidHistory attachedTab = c.reloadEntity(orcidHistory); + if (attachedTab != null) { + getService().delete(c, attachedTab); + } + c.complete(); + } + indexingService.commit(); + } + + public OrcidHistoryBuilder withResponseMessage(String responseMessage) throws SQLException { + orcidHistory.setResponseMessage(responseMessage); + return this; + } + + public OrcidHistoryBuilder withPutCode(String putCode) throws SQLException { + orcidHistory.setPutCode(putCode); + return this; + } + + public OrcidHistoryBuilder withStatus(Integer status) throws SQLException { + orcidHistory.setStatus(status); + return this; + } + + public OrcidHistoryBuilder withMetadata(String metadata) throws SQLException { + orcidHistory.setMetadata(metadata); + return this; + } + + public OrcidHistoryBuilder withRecordType(String recordType) throws SQLException { + orcidHistory.setRecordType(recordType); + return this; + } + + public OrcidHistoryBuilder withOperation(OrcidOperation operation) throws SQLException { + orcidHistory.setOperation(operation); + return this; + } + + public OrcidHistoryBuilder withDescription(String description) throws SQLException { + orcidHistory.setDescription(description); + return this; + } + + public OrcidHistoryBuilder withTimestamp(Date timestamp) { + orcidHistory.setTimestamp(timestamp); + return this; + } +} diff --git a/dspace-api/src/test/java/org/dspace/builder/OrcidQueueBuilder.java b/dspace-api/src/test/java/org/dspace/builder/OrcidQueueBuilder.java new file mode 100644 index 0000000000..bbc0e0e532 --- /dev/null +++ b/dspace-api/src/test/java/org/dspace/builder/OrcidQueueBuilder.java @@ -0,0 +1,146 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.builder; + +import java.sql.SQLException; + +import org.dspace.authorize.AuthorizeException; +import org.dspace.content.Item; +import org.dspace.core.Context; +import org.dspace.orcid.OrcidOperation; +import org.dspace.orcid.OrcidQueue; +import org.dspace.orcid.service.OrcidQueueService; + +/** + * Builder to construct OrcidQueue objects + * + * @author Mykhaylo Boychuk (4science) + */ +public class OrcidQueueBuilder extends AbstractBuilder { + + private OrcidQueue orcidQueue; + + protected OrcidQueueBuilder(Context context) { + super(context); + } + + @Override + protected OrcidQueueService getService() { + return orcidQueueService; + } + + @Override + public void cleanup() throws Exception { + delete(orcidQueue); + } + + public static OrcidQueueBuilder createOrcidQueue(Context context, Item profileItem, Item entity) { + OrcidQueueBuilder builder = new OrcidQueueBuilder(context); + return builder.createEntityInsertionRecord(context, profileItem, entity); + } + + public static OrcidQueueBuilder createOrcidQueue(Context context, Item profileItem, Item entity, String putCode) { + OrcidQueueBuilder builder = new OrcidQueueBuilder(context); + return builder.createEntityUpdateRecord(context, profileItem, entity, putCode); + } + + public static OrcidQueueBuilder createOrcidQueue(Context context, Item profileItem, String description, + String type, String putCode) { + OrcidQueueBuilder builder = new OrcidQueueBuilder(context); + return builder.createEntityDeletionRecord(context, profileItem, description, type, putCode); + } + + private OrcidQueueBuilder createEntityDeletionRecord(Context context, Item profileItem, + String description, String type, String putCode) { + try { + this.context = context; + this.orcidQueue = getService().createEntityDeletionRecord(context, profileItem, description, type, putCode); + } catch (Exception e) { + throw new RuntimeException(e); + } + return this; + } + + private OrcidQueueBuilder createEntityUpdateRecord(Context context, Item profileItem, Item entity, String putCode) { + try { + this.context = context; + this.orcidQueue = getService().createEntityUpdateRecord(context, profileItem, entity, putCode); + } catch (Exception e) { + throw new RuntimeException(e); + } + return this; + } + + private OrcidQueueBuilder createEntityInsertionRecord(Context context, Item profileItem, Item entity) { + try { + this.context = context; + this.orcidQueue = getService().createEntityInsertionRecord(context, profileItem, entity); + } catch (Exception e) { + throw new RuntimeException(e); + } + return this; + } + + @Override + public OrcidQueue build() throws SQLException, AuthorizeException { + try { + getService().update(context, orcidQueue); + context.dispatchEvents(); + + indexingService.commit(); + } catch (Exception e) { + throw new RuntimeException(e); + } + return orcidQueue; + } + + public OrcidQueueBuilder withPutCode(String putCode) { + orcidQueue.setPutCode(putCode); + return this; + } + + public OrcidQueueBuilder withMetadata(String metadata) throws SQLException { + orcidQueue.setMetadata(metadata); + return this; + } + + public OrcidQueueBuilder withRecordType(String recordType) throws SQLException { + orcidQueue.setRecordType(recordType); + return this; + } + + public OrcidQueueBuilder withOperation(OrcidOperation operation) throws SQLException { + orcidQueue.setOperation(operation); + return this; + } + + public OrcidQueueBuilder withDescription(String description) throws SQLException { + orcidQueue.setDescription(description); + return this; + } + + @Override + public void delete(Context c, OrcidQueue orcidQueue) throws Exception { + if (orcidQueue != null) { + getService().delete(c, orcidQueue); + } + } + + public void delete(OrcidQueue orcidQueue) throws Exception { + try (Context c = new Context()) { + c.turnOffAuthorisationSystem(); + OrcidQueue attachedTab = c.reloadEntity(orcidQueue); + if (attachedTab != null) { + getService().delete(c, attachedTab); + } + c.complete(); + } + indexingService.commit(); + } + +} diff --git a/dspace-api/src/test/java/org/dspace/builder/OrcidTokenBuilder.java b/dspace-api/src/test/java/org/dspace/builder/OrcidTokenBuilder.java new file mode 100644 index 0000000000..e3e149a9ec --- /dev/null +++ b/dspace-api/src/test/java/org/dspace/builder/OrcidTokenBuilder.java @@ -0,0 +1,76 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.builder; + +import java.sql.SQLException; + +import org.dspace.authorize.AuthorizeException; +import org.dspace.content.Item; +import org.dspace.core.Context; +import org.dspace.eperson.EPerson; +import org.dspace.orcid.OrcidToken; +import org.dspace.orcid.service.OrcidTokenService; + +/** + * Builder for {@link OrcidToken} entities. + * + * @author Luca Giamminonni (luca.giamminonni at 4science.it) + * + */ +public class OrcidTokenBuilder extends AbstractBuilder { + + private OrcidToken orcidToken; + + protected OrcidTokenBuilder(Context context) { + super(context); + } + + public static OrcidTokenBuilder create(Context context, EPerson ePerson, String accessToken) { + OrcidTokenBuilder builder = new OrcidTokenBuilder(context); + builder.create(ePerson, accessToken); + return builder; + } + + private void create(EPerson ePerson, String accessToken) { + orcidToken = orcidTokenService.create(context, ePerson, accessToken); + } + + public OrcidTokenBuilder withProfileItem(Item profileItem) { + orcidToken.setProfileItem(profileItem); + return this; + } + + @Override + public OrcidToken build() throws SQLException, AuthorizeException { + return orcidToken; + } + + @Override + public void delete(Context c, OrcidToken orcidToken) throws Exception { + orcidTokenService.delete(c, orcidToken); + } + + @Override + public void cleanup() throws Exception { + try (Context context = new Context()) { + context.setDispatcher("noindex"); + context.turnOffAuthorisationSystem(); + orcidToken = context.reloadEntity(orcidToken); + if (orcidToken != null) { + delete(context, orcidToken); + context.complete(); + } + } + } + + @Override + protected OrcidTokenService getService() { + return orcidTokenService; + } + +} diff --git a/dspace-api/src/test/java/org/dspace/builder/PoolTaskBuilder.java b/dspace-api/src/test/java/org/dspace/builder/PoolTaskBuilder.java index c0de46e1e5..633d025f2e 100644 --- a/dspace-api/src/test/java/org/dspace/builder/PoolTaskBuilder.java +++ b/dspace-api/src/test/java/org/dspace/builder/PoolTaskBuilder.java @@ -104,6 +104,7 @@ public class PoolTaskBuilder extends AbstractBuilder @Override public void cleanup() throws Exception { try (Context c = new Context()) { + c.setDispatcher("noindex"); c.turnOffAuthorisationSystem(); // Ensure object and any related objects are reloaded before checking to see what needs cleanup workspaceItem = c.reloadEntity(workspaceItem); diff --git a/dspace-api/src/test/java/org/dspace/builder/ProcessBuilder.java b/dspace-api/src/test/java/org/dspace/builder/ProcessBuilder.java index 6970cd57c3..86573940e4 100644 --- a/dspace-api/src/test/java/org/dspace/builder/ProcessBuilder.java +++ b/dspace-api/src/test/java/org/dspace/builder/ProcessBuilder.java @@ -11,12 +11,15 @@ import java.io.IOException; import java.sql.SQLException; import java.text.ParseException; import java.text.SimpleDateFormat; +import java.util.Date; import java.util.List; +import java.util.Set; import org.dspace.authorize.AuthorizeException; import org.dspace.content.ProcessStatus; import org.dspace.core.Context; import org.dspace.eperson.EPerson; +import org.dspace.eperson.Group; import org.dspace.scripts.DSpaceCommandLineParameter; import org.dspace.scripts.Process; import org.dspace.scripts.service.ProcessService; @@ -33,14 +36,22 @@ public class ProcessBuilder extends AbstractBuilder { List parameters) throws SQLException { ProcessBuilder processBuilder = new ProcessBuilder(context); - return processBuilder.create(context, ePerson, scriptName, parameters); + return processBuilder.create(context, ePerson, scriptName, parameters, null); + } + + public static ProcessBuilder createProcess(Context context, EPerson ePerson, String scriptName, + List parameters, + Set specialGroups) + throws SQLException { + ProcessBuilder processBuilder = new ProcessBuilder(context); + return processBuilder.create(context, ePerson, scriptName, parameters, specialGroups); } private ProcessBuilder create(Context context, EPerson ePerson, String scriptName, - List parameters) + List parameters, final Set specialGroups) throws SQLException { this.context = context; - this.process = processService.create(context, ePerson, scriptName, parameters); + this.process = processService.create(context, ePerson, scriptName, parameters, specialGroups); this.process.setProcessStatus(ProcessStatus.SCHEDULED); return this; } @@ -50,6 +61,11 @@ public class ProcessBuilder extends AbstractBuilder { return this; } + public ProcessBuilder withCreationTime(Date creationTime) { + process.setCreationTime(creationTime); + return this; + } + public ProcessBuilder withStartAndEndTime(String startTime, String endTime) throws ParseException { SimpleDateFormat simpleDateFormat = new SimpleDateFormat("dd/MM/yyyy"); process.setStartTime(simpleDateFormat.parse(startTime)); @@ -60,6 +76,7 @@ public class ProcessBuilder extends AbstractBuilder { @Override public void cleanup() throws Exception { try (Context c = new Context()) { + c.setDispatcher("noindex"); c.turnOffAuthorisationSystem(); // Ensure object and any related objects are reloaded before checking to see what needs cleanup process = c.reloadEntity(process); diff --git a/dspace-api/src/test/java/org/dspace/builder/RelationshipBuilder.java b/dspace-api/src/test/java/org/dspace/builder/RelationshipBuilder.java index 8746033419..c8c5cf85bf 100644 --- a/dspace-api/src/test/java/org/dspace/builder/RelationshipBuilder.java +++ b/dspace-api/src/test/java/org/dspace/builder/RelationshipBuilder.java @@ -39,6 +39,7 @@ public class RelationshipBuilder extends AbstractBuilder { - private static final Logger log = Logger.getLogger(VersionBuilder.class); + private static final Logger log = LogManager.getLogger(VersionBuilder.class); private Version version; @@ -85,6 +86,7 @@ public class VersionBuilder extends AbstractBuilder public void delete(Version version) throws Exception { try (Context context = new Context()) { context.turnOffAuthorisationSystem(); + context.setDispatcher("noindex"); Version attachedTab = context.reloadEntity(version); if (attachedTab != null) { getService().delete(context, attachedTab); diff --git a/dspace-api/src/test/java/org/dspace/builder/WorkflowItemBuilder.java b/dspace-api/src/test/java/org/dspace/builder/WorkflowItemBuilder.java index b4a3b930fb..e06819d0ca 100644 --- a/dspace-api/src/test/java/org/dspace/builder/WorkflowItemBuilder.java +++ b/dspace-api/src/test/java/org/dspace/builder/WorkflowItemBuilder.java @@ -116,6 +116,7 @@ public class WorkflowItemBuilder extends AbstractBuilder()); + map.put(OrcidHistoryBuilder.class.getName(), new ArrayList<>()); + map.put(OrcidTokenBuilder.class.getName(), new ArrayList<>()); map.put(ResourcePolicyBuilder.class.getName(), new ArrayList<>()); map.put(RelationshipBuilder.class.getName(), new ArrayList<>()); map.put(RequestItemBuilder.class.getName(), new ArrayList<>()); diff --git a/dspace-api/src/test/java/org/dspace/content/ItemTest.java b/dspace-api/src/test/java/org/dspace/content/ItemTest.java index 6af1cd5e02..15e425e23a 100644 --- a/dspace-api/src/test/java/org/dspace/content/ItemTest.java +++ b/dspace-api/src/test/java/org/dspace/content/ItemTest.java @@ -19,6 +19,8 @@ import static org.mockito.ArgumentMatchers.any; import static org.mockito.ArgumentMatchers.eq; import static org.mockito.Mockito.doNothing; import static org.mockito.Mockito.spy; +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; import java.io.File; @@ -41,6 +43,7 @@ import org.dspace.authorize.factory.AuthorizeServiceFactory; import org.dspace.authorize.service.AuthorizeService; import org.dspace.content.factory.ContentServiceFactory; import org.dspace.content.service.BitstreamFormatService; +import org.dspace.content.service.ItemService; import org.dspace.content.service.MetadataFieldService; import org.dspace.content.service.MetadataSchemaService; import org.dspace.core.Constants; @@ -679,7 +682,7 @@ public class ItemTest extends AbstractDSpaceObjectTest { String schema = "dc"; String element = "contributor"; - String qualifier = "author"; + String qualifier = "editor"; String lang = Item.ANY; String values = "value0"; String authorities = "auth0"; @@ -1410,6 +1413,27 @@ public class ItemTest extends AbstractDSpaceObjectTest { assertThat("testMove 1", it.getOwningCollection(), equalTo(to)); } + /** + * Test of move method, of class Item, where both Collections are the same. + */ + @Test + public void testMoveSameCollection() throws Exception { + context.turnOffAuthorisationSystem(); + while (it.getCollections().size() > 1) { + it.removeCollection(it.getCollections().get(0)); + } + + Collection collection = it.getCollections().get(0); + it.setOwningCollection(collection); + ItemService itemServiceSpy = spy(itemService); + + itemService.move(context, it, collection, collection); + context.restoreAuthSystemState(); + assertThat("testMoveSameCollection 0", it.getOwningCollection(), notNullValue()); + assertThat("testMoveSameCollection 1", it.getOwningCollection(), equalTo(collection)); + verify(itemServiceSpy, times(0)).delete(context, it); + } + /** * Test of hasUploadedFiles method, of class Item. */ diff --git a/dspace-api/src/test/java/org/dspace/content/LeftTiltedRelationshipMetadataServiceIT.java b/dspace-api/src/test/java/org/dspace/content/LeftTiltedRelationshipMetadataServiceIT.java index 4aa0677bc5..1ba2bc73a5 100644 --- a/dspace-api/src/test/java/org/dspace/content/LeftTiltedRelationshipMetadataServiceIT.java +++ b/dspace-api/src/test/java/org/dspace/content/LeftTiltedRelationshipMetadataServiceIT.java @@ -8,6 +8,7 @@ package org.dspace.content; import static org.hamcrest.CoreMatchers.equalTo; +import static org.hamcrest.CoreMatchers.nullValue; import static org.hamcrest.MatcherAssert.assertThat; import java.util.List; @@ -71,19 +72,28 @@ public class LeftTiltedRelationshipMetadataServiceIT extends RelationshipMetadat //request the virtual metadata of the publication only List leftList = relationshipMetadataService .getRelationshipMetadata(leftItem, true); - assertThat(leftList.size(), equalTo(2)); - assertThat(leftList.get(0).getValue(), equalTo("familyName, firstName")); - assertThat(leftList.get(0).getMetadataField().getMetadataSchema().getName(), equalTo("dc")); - assertThat(leftList.get(0).getMetadataField().getElement(), equalTo("contributor")); - assertThat(leftList.get(0).getMetadataField().getQualifier(), equalTo("author")); + assertThat(leftList.size(), equalTo(3)); + + assertThat(leftList.get(0).getValue(), equalTo(String.valueOf(rightItem.getID()))); + assertThat(leftList.get(0).getMetadataField().getMetadataSchema().getName(), + equalTo(MetadataSchemaEnum.RELATION.getName())); + assertThat(leftList.get(0).getMetadataField().getElement(), equalTo("isAuthorOfPublication")); + assertThat(leftList.get(0).getMetadataField().getQualifier(), equalTo("latestForDiscovery")); assertThat(leftList.get(0).getAuthority(), equalTo("virtual::" + relationship.getID())); - assertThat(leftList.get(1).getValue(), equalTo(String.valueOf(rightItem.getID()))); - assertThat(leftList.get(1).getMetadataField().getMetadataSchema().getName(), - equalTo(MetadataSchemaEnum.RELATION.getName())); - assertThat(leftList.get(1).getMetadataField().getElement(), equalTo("isAuthorOfPublication")); + assertThat(leftList.get(1).getValue(), equalTo("familyName, firstName")); + assertThat(leftList.get(1).getMetadataField().getMetadataSchema().getName(), equalTo("dc")); + assertThat(leftList.get(1).getMetadataField().getElement(), equalTo("contributor")); + assertThat(leftList.get(1).getMetadataField().getQualifier(), equalTo("author")); assertThat(leftList.get(1).getAuthority(), equalTo("virtual::" + relationship.getID())); + assertThat(leftList.get(2).getValue(), equalTo(String.valueOf(rightItem.getID()))); + assertThat(leftList.get(2).getMetadataField().getMetadataSchema().getName(), + equalTo(MetadataSchemaEnum.RELATION.getName())); + assertThat(leftList.get(2).getMetadataField().getElement(), equalTo("isAuthorOfPublication")); + assertThat(leftList.get(2).getMetadataField().getQualifier(), nullValue()); + assertThat(leftList.get(2).getAuthority(), equalTo("virtual::" + relationship.getID())); + // rightItem is the author List rightRelationshipMetadataList = itemService .getMetadata(rightItem, MetadataSchemaEnum.RELATION.getName(), "isPublicationOfAuthor", null, Item.ANY); diff --git a/dspace-api/src/test/java/org/dspace/content/RelationshipMetadataServiceIT.java b/dspace-api/src/test/java/org/dspace/content/RelationshipMetadataServiceIT.java index a1996a64fc..b0761946fe 100644 --- a/dspace-api/src/test/java/org/dspace/content/RelationshipMetadataServiceIT.java +++ b/dspace-api/src/test/java/org/dspace/content/RelationshipMetadataServiceIT.java @@ -8,6 +8,7 @@ package org.dspace.content; import static org.hamcrest.CoreMatchers.equalTo; +import static org.hamcrest.CoreMatchers.nullValue; import static org.hamcrest.MatcherAssert.assertThat; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNull; @@ -186,19 +187,28 @@ public class RelationshipMetadataServiceIT extends AbstractIntegrationTestWithDa //request the virtual metadata of the publication only List leftList = relationshipMetadataService .getRelationshipMetadata(leftItem, true); - assertThat(leftList.size(), equalTo(2)); - assertThat(leftList.get(0).getValue(), equalTo("familyName, firstName")); - assertThat(leftList.get(0).getMetadataField().getMetadataSchema().getName(), equalTo("dc")); - assertThat(leftList.get(0).getMetadataField().getElement(), equalTo("contributor")); - assertThat(leftList.get(0).getMetadataField().getQualifier(), equalTo("author")); + assertThat(leftList.size(), equalTo(3)); + + assertThat(leftList.get(0).getValue(), equalTo(String.valueOf(rightItem.getID()))); + assertThat(leftList.get(0).getMetadataField().getMetadataSchema().getName(), + equalTo(MetadataSchemaEnum.RELATION.getName())); + assertThat(leftList.get(0).getMetadataField().getElement(), equalTo("isAuthorOfPublication")); + assertThat(leftList.get(0).getMetadataField().getQualifier(), equalTo("latestForDiscovery")); assertThat(leftList.get(0).getAuthority(), equalTo("virtual::" + relationship.getID())); - assertThat(leftList.get(1).getValue(), equalTo(String.valueOf(rightItem.getID()))); - assertThat(leftList.get(1).getMetadataField().getMetadataSchema().getName(), - equalTo(MetadataSchemaEnum.RELATION.getName())); - assertThat(leftList.get(1).getMetadataField().getElement(), equalTo("isAuthorOfPublication")); + assertThat(leftList.get(1).getValue(), equalTo("familyName, firstName")); + assertThat(leftList.get(1).getMetadataField().getMetadataSchema().getName(), equalTo("dc")); + assertThat(leftList.get(1).getMetadataField().getElement(), equalTo("contributor")); + assertThat(leftList.get(1).getMetadataField().getQualifier(), equalTo("author")); assertThat(leftList.get(1).getAuthority(), equalTo("virtual::" + relationship.getID())); + assertThat(leftList.get(2).getValue(), equalTo(String.valueOf(rightItem.getID()))); + assertThat(leftList.get(2).getMetadataField().getMetadataSchema().getName(), + equalTo(MetadataSchemaEnum.RELATION.getName())); + assertThat(leftList.get(2).getMetadataField().getElement(), equalTo("isAuthorOfPublication")); + assertThat(leftList.get(2).getMetadataField().getQualifier(), nullValue()); + assertThat(leftList.get(2).getAuthority(), equalTo("virtual::" + relationship.getID())); + // rightItem is the author List rightRelationshipMetadataList = itemService .getMetadata(rightItem, MetadataSchemaEnum.RELATION.getName(), "isPublicationOfAuthor", null, Item.ANY); @@ -208,12 +218,21 @@ public class RelationshipMetadataServiceIT extends AbstractIntegrationTestWithDa //request the virtual metadata of the publication List rightList = relationshipMetadataService .getRelationshipMetadata(rightItem, true); - assertThat(rightList.size(), equalTo(1)); + assertThat(rightList.size(), equalTo(2)); + assertThat(rightList.get(0).getValue(), equalTo(String.valueOf(leftItem.getID()))); assertThat(rightList.get(0).getMetadataField().getMetadataSchema().getName(), equalTo(MetadataSchemaEnum.RELATION.getName())); assertThat(rightList.get(0).getMetadataField().getElement(), equalTo("isPublicationOfAuthor")); + assertThat(rightList.get(0).getMetadataField().getQualifier(), equalTo("latestForDiscovery")); assertThat(rightList.get(0).getAuthority(), equalTo("virtual::" + relationship.getID())); + + assertThat(rightList.get(1).getValue(), equalTo(String.valueOf(leftItem.getID()))); + assertThat(rightList.get(1).getMetadataField().getMetadataSchema().getName(), + equalTo(MetadataSchemaEnum.RELATION.getName())); + assertThat(rightList.get(1).getMetadataField().getElement(), equalTo("isPublicationOfAuthor")); + assertThat(rightList.get(1).getMetadataField().getQualifier(), nullValue()); + assertThat(rightList.get(1).getAuthority(), equalTo("virtual::" + relationship.getID())); } @Test @@ -380,34 +399,52 @@ public class RelationshipMetadataServiceIT extends AbstractIntegrationTestWithDa //request the virtual metadata of the journal issue List issueRelList = relationshipMetadataService.getRelationshipMetadata(leftItem, true); - assertThat(issueRelList.size(), equalTo(2)); - assertThat(issueRelList.get(0).getValue(), equalTo("30")); - assertThat(issueRelList.get(0).getMetadataField().getMetadataSchema().getName(), equalTo("publicationvolume")); - assertThat(issueRelList.get(0).getMetadataField().getElement(), equalTo("volumeNumber")); - assertThat(issueRelList.get(0).getMetadataField().getQualifier(), equalTo(null)); + assertThat(issueRelList.size(), equalTo(3)); + + assertThat(issueRelList.get(0).getValue(), equalTo(String.valueOf(rightItem.getID()))); + assertThat(issueRelList.get(0).getMetadataField().getMetadataSchema().getName(), + equalTo(MetadataSchemaEnum.RELATION.getName())); + assertThat(issueRelList.get(0).getMetadataField().getElement(), equalTo("isJournalVolumeOfIssue")); + assertThat(issueRelList.get(0).getMetadataField().getQualifier(), equalTo("latestForDiscovery")); assertThat(issueRelList.get(0).getAuthority(), equalTo("virtual::" + relationship.getID())); - assertThat(issueRelList.get(1).getValue(), equalTo(String.valueOf(rightItem.getID()))); - assertThat(issueRelList.get(1).getMetadataField().getMetadataSchema().getName(), - equalTo(MetadataSchemaEnum.RELATION.getName())); - assertThat(issueRelList.get(1).getMetadataField().getElement(), equalTo("isJournalVolumeOfIssue")); + assertThat(issueRelList.get(1).getValue(), equalTo("30")); + assertThat(issueRelList.get(1).getMetadataField().getMetadataSchema().getName(), equalTo("publicationvolume")); + assertThat(issueRelList.get(1).getMetadataField().getElement(), equalTo("volumeNumber")); + assertThat(issueRelList.get(1).getMetadataField().getQualifier(), equalTo(null)); assertThat(issueRelList.get(1).getAuthority(), equalTo("virtual::" + relationship.getID())); + assertThat(issueRelList.get(2).getValue(), equalTo(String.valueOf(rightItem.getID()))); + assertThat(issueRelList.get(2).getMetadataField().getMetadataSchema().getName(), + equalTo(MetadataSchemaEnum.RELATION.getName())); + assertThat(issueRelList.get(2).getMetadataField().getElement(), equalTo("isJournalVolumeOfIssue")); + assertThat(issueRelList.get(2).getMetadataField().getQualifier(), nullValue()); + assertThat(issueRelList.get(2).getAuthority(), equalTo("virtual::" + relationship.getID())); + //request the virtual metadata of the journal volume List volumeRelList = relationshipMetadataService.getRelationshipMetadata(rightItem, true); - assertThat(volumeRelList.size(), equalTo(2)); - assertThat(volumeRelList.get(0).getValue(), equalTo("2")); - assertThat(volumeRelList.get(0).getMetadataField().getMetadataSchema().getName(), equalTo("publicationissue")); - assertThat(volumeRelList.get(0).getMetadataField().getElement(), equalTo("issueNumber")); - assertThat(volumeRelList.get(0).getMetadataField().getQualifier(), equalTo(null)); + assertThat(volumeRelList.size(), equalTo(3)); + + assertThat(volumeRelList.get(0).getValue(), equalTo(String.valueOf(leftItem.getID()))); + assertThat(volumeRelList.get(0).getMetadataField().getMetadataSchema().getName(), + equalTo(MetadataSchemaEnum.RELATION.getName())); + assertThat(volumeRelList.get(0).getMetadataField().getElement(), equalTo("isIssueOfJournalVolume")); + assertThat(volumeRelList.get(0).getMetadataField().getQualifier(), equalTo("latestForDiscovery")); assertThat(volumeRelList.get(0).getAuthority(), equalTo("virtual::" + relationship.getID())); - assertThat(volumeRelList.get(1).getValue(), equalTo(String.valueOf(leftItem.getID()))); - assertThat(volumeRelList.get(1).getMetadataField().getMetadataSchema().getName(), - equalTo(MetadataSchemaEnum.RELATION.getName())); - assertThat(volumeRelList.get(1).getMetadataField().getElement(), equalTo("isIssueOfJournalVolume")); + assertThat(volumeRelList.get(1).getValue(), equalTo("2")); + assertThat(volumeRelList.get(1).getMetadataField().getMetadataSchema().getName(), equalTo("publicationissue")); + assertThat(volumeRelList.get(1).getMetadataField().getElement(), equalTo("issueNumber")); + assertThat(volumeRelList.get(1).getMetadataField().getQualifier(), equalTo(null)); assertThat(volumeRelList.get(1).getAuthority(), equalTo("virtual::" + relationship.getID())); + + assertThat(volumeRelList.get(2).getValue(), equalTo(String.valueOf(leftItem.getID()))); + assertThat(volumeRelList.get(2).getMetadataField().getMetadataSchema().getName(), + equalTo(MetadataSchemaEnum.RELATION.getName())); + assertThat(volumeRelList.get(2).getMetadataField().getElement(), equalTo("isIssueOfJournalVolume")); + assertThat(volumeRelList.get(2).getMetadataField().getQualifier(), nullValue()); + assertThat(volumeRelList.get(2).getAuthority(), equalTo("virtual::" + relationship.getID())); } @Test @@ -614,45 +651,6 @@ public class RelationshipMetadataServiceIT extends AbstractIntegrationTestWithDa .size(), equalTo(1)); } - @Test - public void testGetNextRightPlace() throws Exception { - assertThat(relationshipService.findNextRightPlaceByRightItem(context, rightItem), equalTo(0)); - initPublicationAuthor(); - - assertThat(relationshipService.findNextRightPlaceByRightItem(context, rightItem), equalTo(1)); - - context.turnOffAuthorisationSystem(); - - Item secondItem = ItemBuilder.createItem(context, col).build(); - RelationshipBuilder.createRelationshipBuilder(context, secondItem, rightItem, - isAuthorOfPublicationRelationshipType).build(); - context.restoreAuthSystemState(); - - assertThat(relationshipService.findNextRightPlaceByRightItem(context, rightItem), equalTo(2)); - } - - @Test - public void testGetNextLeftPlace() throws Exception { - assertThat(relationshipService.findNextLeftPlaceByLeftItem(context, leftItem), equalTo(0)); - initPublicationAuthor(); - - assertThat(relationshipService.findNextLeftPlaceByLeftItem(context, leftItem), equalTo(1)); - - context.turnOffAuthorisationSystem(); - - Item secondAuthor = ItemBuilder.createItem(context, col2) - .withPersonIdentifierFirstName("firstName") - .withPersonIdentifierLastName("familyName").build(); - - RelationshipBuilder.createRelationshipBuilder(context, leftItem, secondAuthor, - isAuthorOfPublicationRelationshipType).build(); - context.restoreAuthSystemState(); - - assertThat(relationshipService.findNextLeftPlaceByLeftItem(context, leftItem), equalTo(2)); - - - } - @Test public void testGetVirtualMetadata() throws SQLException, AuthorizeException { // Journal, JournalVolume, JournalIssue, Publication items, related to each other using the relationship types diff --git a/dspace-api/src/test/java/org/dspace/content/RelationshipServiceImplPlaceTest.java b/dspace-api/src/test/java/org/dspace/content/RelationshipServiceImplPlaceTest.java index 305de076a2..3e36f77c68 100644 --- a/dspace-api/src/test/java/org/dspace/content/RelationshipServiceImplPlaceTest.java +++ b/dspace-api/src/test/java/org/dspace/content/RelationshipServiceImplPlaceTest.java @@ -9,11 +9,15 @@ package org.dspace.content; import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.MatcherAssert.assertThat; +import static org.junit.Assert.assertEquals; import static org.junit.Assert.fail; import java.sql.SQLException; +import java.util.Arrays; import java.util.List; +import java.util.stream.Collectors; +import org.apache.commons.lang3.StringUtils; import org.apache.logging.log4j.Logger; import org.dspace.AbstractUnitTest; import org.dspace.authorize.AuthorizeException; @@ -27,6 +31,7 @@ import org.dspace.content.service.MetadataValueService; import org.dspace.content.service.RelationshipService; import org.dspace.content.service.RelationshipTypeService; import org.dspace.content.service.WorkspaceItemService; +import org.dspace.core.Constants; import org.junit.After; import org.junit.Before; import org.junit.Test; @@ -39,6 +44,8 @@ public class RelationshipServiceImplPlaceTest extends AbstractUnitTest { protected RelationshipService relationshipService = ContentServiceFactory.getInstance().getRelationshipService(); protected RelationshipTypeService relationshipTypeService = ContentServiceFactory.getInstance() .getRelationshipTypeService(); + protected RelationshipMetadataService relationshipMetadataService = + ContentServiceFactory.getInstance().getRelationshipMetadataService(); protected EntityTypeService entityTypeService = ContentServiceFactory.getInstance().getEntityTypeService(); protected CommunityService communityService = ContentServiceFactory.getInstance().getCommunityService(); protected CollectionService collectionService = ContentServiceFactory.getInstance().getCollectionService(); @@ -52,9 +59,33 @@ public class RelationshipServiceImplPlaceTest extends AbstractUnitTest { Item item; Item authorItem; + + Item author1; + Item author2; + Item author3; + Item author4; + Item author5; + Item author6; + Item publication1; + Item publication2; + Item publication3; + Item publication4; + Item publication5; + Item publication6; + Item project1; + Item project2; + Item project3; + Item project4; + Item project5; + Item project6; + RelationshipType isAuthorOfPublication; + RelationshipType isProjectOfPublication; + RelationshipType isProjectOfPerson; + EntityType publicationEntityType; - EntityType authorEntityType; + EntityType projectEntityType; + EntityType personEntityType; String authorQualifier = "author"; String contributorElement = "contributor"; @@ -84,12 +115,120 @@ public class RelationshipServiceImplPlaceTest extends AbstractUnitTest { itemService.addMetadata(context, authorItem, "person", "familyName", null, null, "familyName"); itemService.addMetadata(context, authorItem, "person", "givenName", null, null, "firstName"); + WorkspaceItem wi; + + wi = workspaceItemService.create(context, col, false); + author1 = installItemService.installItem(context, wi); + itemService.addMetadata(context, author1, "dspace", "entity", "type", null, "Person"); + itemService.addMetadata(context, author1, "person", "familyName", null, null, "Author"); + itemService.addMetadata(context, author1, "person", "givenName", null, null, "First"); + + wi = workspaceItemService.create(context, col, false); + author2 = installItemService.installItem(context, wi); + itemService.addMetadata(context, author2, "dspace", "entity", "type", null, "Person"); + itemService.addMetadata(context, author2, "person", "familyName", null, null, "Author"); + itemService.addMetadata(context, author2, "person", "givenName", null, null, "Second"); + + wi = workspaceItemService.create(context, col, false); + author3 = installItemService.installItem(context, wi); + itemService.addMetadata(context, author3, "dspace", "entity", "type", null, "Person"); + itemService.addMetadata(context, author3, "person", "familyName", null, null, "Author"); + itemService.addMetadata(context, author3, "person", "givenName", null, null, "Third"); + + wi = workspaceItemService.create(context, col, false); + author4 = installItemService.installItem(context, wi); + itemService.addMetadata(context, author4, "dspace", "entity", "type", null, "Person"); + itemService.addMetadata(context, author4, "person", "familyName", null, null, "Author"); + itemService.addMetadata(context, author4, "person", "givenName", null, null, "Fourth"); + + wi = workspaceItemService.create(context, col, false); + author5 = installItemService.installItem(context, wi); + itemService.addMetadata(context, author5, "dspace", "entity", "type", null, "Person"); + itemService.addMetadata(context, author5, "person", "familyName", null, null, "Author"); + itemService.addMetadata(context, author5, "person", "givenName", null, null, "Fifth"); + + wi = workspaceItemService.create(context, col, false); + author6 = installItemService.installItem(context, wi); + itemService.addMetadata(context, author6, "dspace", "entity", "type", null, "Person"); + itemService.addMetadata(context, author6, "person", "familyName", null, null, "Author"); + itemService.addMetadata(context, author6, "person", "givenName", null, null, "Sixth"); + + wi = workspaceItemService.create(context, col, false); + publication1 = installItemService.installItem(context, wi); + itemService.addMetadata(context, publication1, "dspace", "entity", "type", null, "Publication"); + itemService.addMetadata(context, publication1, "dc", "title", null, null, "Publication 1"); + + wi = workspaceItemService.create(context, col, false); + publication2 = installItemService.installItem(context, wi); + itemService.addMetadata(context, publication2, "dspace", "entity", "type", null, "Publication"); + itemService.addMetadata(context, publication2, "dc", "title", null, null, "Publication 2"); + + wi = workspaceItemService.create(context, col, false); + publication3 = installItemService.installItem(context, wi); + itemService.addMetadata(context, publication3, "dspace", "entity", "type", null, "Publication"); + itemService.addMetadata(context, publication3, "dc", "title", null, null, "Publication 3"); + + wi = workspaceItemService.create(context, col, false); + publication4 = installItemService.installItem(context, wi); + itemService.addMetadata(context, publication4, "dspace", "entity", "type", null, "Publication"); + itemService.addMetadata(context, publication4, "dc", "title", null, null, "Publication 4"); + + wi = workspaceItemService.create(context, col, false); + publication5 = installItemService.installItem(context, wi); + itemService.addMetadata(context, publication5, "dspace", "entity", "type", null, "Publication"); + itemService.addMetadata(context, publication5, "dc", "title", null, null, "Publication 5"); + + wi = workspaceItemService.create(context, col, false); + publication6 = installItemService.installItem(context, wi); + itemService.addMetadata(context, publication6, "dspace", "entity", "type", null, "Publication"); + itemService.addMetadata(context, publication6, "dc", "title", null, null, "Publication 6"); + + wi = workspaceItemService.create(context, col, false); + project1 = installItemService.installItem(context, wi); + itemService.addMetadata(context, project1, "dspace", "entity", "type", null, "Project"); + itemService.addMetadata(context, project1, "dc", "title", null, null, "Project 1"); + + wi = workspaceItemService.create(context, col, false); + project2 = installItemService.installItem(context, wi); + itemService.addMetadata(context, project2, "dspace", "entity", "type", null, "Project"); + itemService.addMetadata(context, project2, "dc", "title", null, null, "Project 2"); + + wi = workspaceItemService.create(context, col, false); + project3 = installItemService.installItem(context, wi); + itemService.addMetadata(context, project3, "dspace", "entity", "type", null, "Project"); + itemService.addMetadata(context, project3, "dc", "title", null, null, "Project 3"); + + wi = workspaceItemService.create(context, col, false); + project4 = installItemService.installItem(context, wi); + itemService.addMetadata(context, project4, "dspace", "entity", "type", null, "Project"); + itemService.addMetadata(context, project4, "dc", "title", null, null, "Project 4"); + + wi = workspaceItemService.create(context, col, false); + project5 = installItemService.installItem(context, wi); + itemService.addMetadata(context, project5, "dspace", "entity", "type", null, "Project"); + itemService.addMetadata(context, project5, "dc", "title", null, null, "Project 5"); + + wi = workspaceItemService.create(context, col, false); + project6 = installItemService.installItem(context, wi); + itemService.addMetadata(context, project6, "dspace", "entity", "type", null, "Project"); + itemService.addMetadata(context, project6, "dc", "title", null, null, "Project 6"); + + publicationEntityType = entityTypeService.create(context, "Publication"); - authorEntityType = entityTypeService.create(context, "Person"); + projectEntityType = entityTypeService.create(context, "Project"); + personEntityType = entityTypeService.create(context, "Person"); isAuthorOfPublication = relationshipTypeService - .create(context, publicationEntityType, authorEntityType, + .create(context, publicationEntityType, personEntityType, "isAuthorOfPublication", "isPublicationOfAuthor", null, null, null, null); + isProjectOfPublication = relationshipTypeService + .create(context, publicationEntityType, projectEntityType, + "isProjectOfPublication", "isPublicationOfProject", + null, null, null, null); + isProjectOfPerson = relationshipTypeService + .create(context, personEntityType, projectEntityType, + "isProjectOfPerson", "isPersonOfProject", + null, null, null, null); context.restoreAuthSystemState(); } catch (AuthorizeException ex) { @@ -226,7 +365,7 @@ public class RelationshipServiceImplPlaceTest extends AbstractUnitTest { itemService.addMetadata(context, secondAuthorItem, "person", "familyName", null, null, "familyNameTwo"); itemService.addMetadata(context, secondAuthorItem, "person", "givenName", null, null, "firstNameTwo"); Relationship relationshipTwo = relationshipService - .create(context, item, secondAuthorItem, isAuthorOfPublication, 5, -1); + .create(context, item, secondAuthorItem, isAuthorOfPublication, 1, -1); context.restoreAuthSystemState(); @@ -234,16 +373,19 @@ public class RelationshipServiceImplPlaceTest extends AbstractUnitTest { list = itemService.getMetadata(item, dcSchema, contributorElement, authorQualifier, Item.ANY); assertMetadataValue(authorQualifier, contributorElement, dcSchema, "test, one", null, 0, list.get(0)); - assertMetadataValue(authorQualifier, contributorElement, dcSchema, "test, two", null, 1, list.get(1)); - assertMetadataValue(authorQualifier, contributorElement, dcSchema, "familyName, firstName", - "virtual::" + relationship.getID(), 2, list.get(2)); - assertThat(relationship.getLeftPlace(), equalTo(2)); - assertMetadataValue(authorQualifier, contributorElement, dcSchema, "test, three", null, 3, list.get(3)); - assertMetadataValue(authorQualifier, contributorElement, dcSchema, "test, four", null, 4, list.get(4)); assertMetadataValue(authorQualifier, contributorElement, dcSchema, "familyNameTwo, firstNameTwo", - "virtual::" + relationshipTwo.getID(), 5, list.get(5)); - assertThat(relationshipTwo.getLeftPlace(), equalTo(5)); + "virtual::" + relationshipTwo.getID(), 1, list.get(1)); + assertThat(relationshipTwo.getLeftPlace(), equalTo(1)); + + assertMetadataValue(authorQualifier, contributorElement, dcSchema, "test, two", null, 2, list.get(2)); + + assertMetadataValue(authorQualifier, contributorElement, dcSchema, "familyName, firstName", + "virtual::" + relationship.getID(), 3, list.get(3)); + assertThat(relationship.getLeftPlace(), equalTo(3)); + + assertMetadataValue(authorQualifier, contributorElement, dcSchema, "test, three", null, 4, list.get(4)); + assertMetadataValue(authorQualifier, contributorElement, dcSchema, "test, four", null, 5, list.get(5)); } @@ -425,4 +567,2768 @@ public class RelationshipServiceImplPlaceTest extends AbstractUnitTest { } + /* RelationshipService#create */ + + @Test + public void createUseForPlaceRelationshipAppendingLeftNoMetadataTest() throws Exception { + context.turnOffAuthorisationSystem(); + + // Add three Authors to the same Publication, appending to the end + Relationship r1 = relationshipService.create(context, publication1, author1, isAuthorOfPublication, -1, -1); + Relationship r2 = relationshipService.create(context, publication1, author2, isAuthorOfPublication, -1, -1); + Relationship r3 = relationshipService.create(context, publication1, author3, isAuthorOfPublication, -1, -1); + + context.restoreAuthSystemState(); + + // Check relationship order + assertLeftPlace(r1, 0); + assertLeftPlace(r2, 1); + assertLeftPlace(r3, 2); + assertRelationMetadataOrder(publication1, isAuthorOfPublication, List.of(r1, r2, r3)); + } + + @Test + public void createUseForPlaceRelationshipWithLeftPlaceAtTheStartNoMetadataTest() throws Exception { + context.turnOffAuthorisationSystem(); + + // Add two Authors to the same Publication, appending to the end + Relationship r1 = relationshipService.create(context, publication1, author1, isAuthorOfPublication, -1, -1); + Relationship r2 = relationshipService.create(context, publication1, author2, isAuthorOfPublication, -1, -1); + + // Add another Author @ leftPlace 0. The existing relationships should get pushed one place forward + Relationship r3 = relationshipService.create(context, publication1, author3, isAuthorOfPublication, 0, -1); + + context.restoreAuthSystemState(); + + // Check relationship order + assertLeftPlace(r3, 0); + assertLeftPlace(r1, 1); + assertLeftPlace(r2, 2); + assertRelationMetadataOrder(publication1, isAuthorOfPublication, List.of(r3, r1, r2)); + } + + @Test + public void createUseForPlaceRelationshipWithLeftPlaceInTheMiddleNoMetadataTest() throws Exception { + context.turnOffAuthorisationSystem(); + + // Add two Authors to the same Publication, appending to the end + Relationship r1 = relationshipService.create(context, publication1, author1, isAuthorOfPublication, -1, -1); + Relationship r2 = relationshipService.create(context, publication1, author2, isAuthorOfPublication, -1, -1); + + // Add another Author @ leftPlace 1. The second relationship should get pushed by one place + Relationship r3 = relationshipService.create(context, publication1, author3, isAuthorOfPublication, 1, -1); + + context.restoreAuthSystemState(); + + // Check relationship order + assertLeftPlace(r1, 0); + assertLeftPlace(r3, 1); + assertLeftPlace(r2, 2); + assertRelationMetadataOrder(publication1, isAuthorOfPublication, List.of(r1, r3, r2)); + } + + @Test + public void createUseForPlaceRelationshipWithLeftPlaceAtTheEndNoMetadataTest() throws Exception { + context.turnOffAuthorisationSystem(); + + // Add three Authors to the same Publication, appending to the end + Relationship r1 = relationshipService.create(context, publication1, author1, isAuthorOfPublication, -1, -1); + Relationship r2 = relationshipService.create(context, publication1, author2, isAuthorOfPublication, -1, -1); + + // Add another Author @ leftPlace 2. This should have the same effect as just appending it + Relationship r3 = relationshipService.create(context, publication1, author3, isAuthorOfPublication, 2, -1); + + context.restoreAuthSystemState(); + + // Check relationship order + assertLeftPlace(r1, 0); + assertLeftPlace(r2, 1); + assertLeftPlace(r3, 2); + assertRelationMetadataOrder(publication1, isAuthorOfPublication, List.of(r1, r2, r3)); + } + + @Test + public void createUseForPlaceRelationshipAppendingLeftWithMetadataTest() throws Exception { + context.turnOffAuthorisationSystem(); + + // Add a dc.contributor.author MDV + itemService.addMetadata(context, publication1, dcSchema, contributorElement, authorQualifier, null, "MDV 1"); + + // Add two Authors to the same Publication, appending to the end + Relationship r1 = relationshipService.create(context, publication1, author1, isAuthorOfPublication, -1, -1); + Relationship r2 = relationshipService.create(context, publication1, author2, isAuthorOfPublication, -1, -1); + + // Add another dc.contributor.author MDV + itemService.addMetadata(context, publication1, dcSchema, contributorElement, authorQualifier, null, "MDV 2"); + + // Add another Author to the same Publication, appending to the end + Relationship r3 = relationshipService.create(context, publication1, author3, isAuthorOfPublication, -1, -1); + + context.restoreAuthSystemState(); + + // Check relationship order + assertLeftPlace(r1, 1); + assertLeftPlace(r2, 2); + assertLeftPlace(r3, 4); + assertRelationMetadataOrder(publication1, isAuthorOfPublication, List.of(r1, r2, r3)); + assertMetadataOrder(publication1, "dc.contributor.author", List.of( + "MDV 1", + "Author, First", + "Author, Second", + "MDV 2", + "Author, Third" + )); + } + + @Test + public void createUseForPlaceRelationshipWithLeftPlaceAtTheStartWithMetadataTest() throws Exception { + context.turnOffAuthorisationSystem(); + + // Add a dc.contributor.author MDV + itemService.addMetadata(context, publication1, dcSchema, contributorElement, authorQualifier, null, "MDV 1"); + + // Add two Authors to the same Publication, appending to the end + Relationship r1 = relationshipService.create(context, publication1, author1, isAuthorOfPublication, -1, -1); + Relationship r2 = relationshipService.create(context, publication1, author2, isAuthorOfPublication, -1, -1); + + // Add another dc.contributor.author MDV + itemService.addMetadata(context, publication1, dcSchema, contributorElement, authorQualifier, null, "MDV 2"); + + // Add another Author @ leftPlace 0. All MDVs & relationships after it should get pushed by one place + Relationship r3 = relationshipService.create(context, publication1, author3, isAuthorOfPublication, 0, -1); + + context.restoreAuthSystemState(); + + // Check relationship order + assertLeftPlace(r3, 0); + assertLeftPlace(r1, 2); + assertLeftPlace(r2, 3); + assertRelationMetadataOrder(publication1, isAuthorOfPublication, List.of(r3, r1, r2)); + assertMetadataOrder(publication1, "dc.contributor.author", List.of( + "Author, Third", + "MDV 1", + "Author, First", + "Author, Second", + "MDV 2" + )); + } + + @Test + public void createUseForPlaceRelationshipWithLeftPlaceInTheMiddleWithMetadataTest() throws Exception { + context.turnOffAuthorisationSystem(); + + // Add a dc.contributor.author MDV + itemService.addMetadata(context, publication1, dcSchema, contributorElement, authorQualifier, null, "MDV 1"); + + // Add two Authors to the same Publication, appending to the end + Relationship r1 = relationshipService.create(context, publication1, author1, isAuthorOfPublication, -1, -1); + Relationship r2 = relationshipService.create(context, publication1, author2, isAuthorOfPublication, -1, -1); + + // Add another dc.contributor.author MDV + itemService.addMetadata(context, publication1, dcSchema, contributorElement, authorQualifier, null, "MDV 2"); + + // Add another Author @ leftPlace 2. All MDVs & relationships after it should get pushed by one place + Relationship r3 = relationshipService.create(context, publication1, author3, isAuthorOfPublication, 2, -1); + + context.restoreAuthSystemState(); + + // Check relationship order + assertLeftPlace(r1, 1); + assertLeftPlace(r3, 2); + assertLeftPlace(r2, 3); + assertRelationMetadataOrder(publication1, isAuthorOfPublication, List.of(r1, r3, r2)); + assertMetadataOrder(publication1, "dc.contributor.author", List.of( + "MDV 1", + "Author, First", + "Author, Third", + "Author, Second", + "MDV 2" + )); + } + + @Test + public void createUseForPlaceRelationshipWithLeftPlaceInTheMiddleWithMetadataTest_ignoreOtherRels( + ) throws Exception { + context.turnOffAuthorisationSystem(); + + // Add a dc.contributor.author MDV + itemService.addMetadata(context, publication1, dcSchema, contributorElement, authorQualifier, null, "MDV 1"); + + // Add two Authors to the same Publication, appending to the end + Relationship r1 = relationshipService.create(context, publication1, author1, isAuthorOfPublication, -1, -1); + Relationship r2 = relationshipService.create(context, publication1, author2, isAuthorOfPublication, -1, -1); + + // NOTE: unrelated relationship => should not be affected + Relationship ur1 = relationshipService.create(context, publication1, project1, isProjectOfPublication, -1, -1); + + // Add another dc.contributor.author MDV + itemService.addMetadata(context, publication1, dcSchema, contributorElement, authorQualifier, null, "MDV 2"); + + // NOTE: unrelated relationship => should not be affected + Relationship ur2 = relationshipService.create(context, author2, project2, isProjectOfPerson, -1, -1); + + // Add another Author @ leftPlace 2. All MDVs & relationships after it should get pushed by one place + Relationship r3 = relationshipService.create(context, publication1, author3, isAuthorOfPublication, 2, -1); + + context.restoreAuthSystemState(); + + // Check relationship order + assertLeftPlace(r1, 1); + assertLeftPlace(r3, 2); + assertLeftPlace(r2, 3); + assertRelationMetadataOrder(publication1, isAuthorOfPublication, List.of(r1, r3, r2)); + assertMetadataOrder(publication1, "dc.contributor.author", List.of( + "MDV 1", + "Author, First", + "Author, Third", + "Author, Second", + "MDV 2" + )); + + // check unaffected relationships + assertLeftPlace(ur1, 0); + assertRightPlace(ur1, 0); + assertLeftPlace(ur2, 0); + assertRightPlace(ur2, 0); + } + + @Test + public void createUseForPlaceRelationshipWithLeftPlaceAtTheEndWithMetadataTest() throws Exception { + context.turnOffAuthorisationSystem(); + + // Add a dc.contributor.author MDV + itemService.addMetadata(context, publication1, dcSchema, contributorElement, authorQualifier, null, "MDV 1"); + + // Add two Authors to the same Publication, appending to the end + Relationship r1 = relationshipService.create(context, publication1, author1, isAuthorOfPublication, -1, -1); + Relationship r2 = relationshipService.create(context, publication1, author2, isAuthorOfPublication, -1, -1); + + // Add another dc.contributor.author MDV + itemService.addMetadata(context, publication1, dcSchema, contributorElement, authorQualifier, null, "MDV 2"); + + // Add another Author @ leftPlace 4. This should have the same effect as just appending it + Relationship r3 = relationshipService.create(context, publication1, author3, isAuthorOfPublication, 4, -1); + + context.restoreAuthSystemState(); + + // Check relationship order + assertLeftPlace(r1, 1); + assertLeftPlace(r2, 2); + assertLeftPlace(r3, 4); + assertRelationMetadataOrder(publication1, isAuthorOfPublication, List.of(r1, r2, r3)); + assertMetadataOrder(publication1, "dc.contributor.author", List.of( + "MDV 1", + "Author, First", + "Author, Second", + "MDV 2", + "Author, Third" + )); + } + + @Test + public void createUseForPlaceRelationshipAppendingRightNoMetadataTest() throws Exception { + context.turnOffAuthorisationSystem(); + + // Add three Publications to the same Author, appending to the end + Relationship r1 = relationshipService.create(context, publication1, author1, isAuthorOfPublication, -1, -1); + Relationship r2 = relationshipService.create(context, publication2, author1, isAuthorOfPublication, -1, -1); + Relationship r3 = relationshipService.create(context, publication3, author1, isAuthorOfPublication, -1, -1); + + context.restoreAuthSystemState(); + + // Check relationship order + assertRightPlace(r1, 0); + assertRightPlace(r2, 1); + assertRightPlace(r3, 2); + assertRelationMetadataOrder(author1, isAuthorOfPublication, List.of(r1, r2, r3)); + } + + @Test + public void createUseForPlaceRelationshipWithRightPlaceAtTheStartNoMetadataTest() throws Exception { + context.turnOffAuthorisationSystem(); + + // Add two Publications to the same Author, appending to the end + Relationship r1 = relationshipService.create(context, publication1, author1, isAuthorOfPublication, -1, -1); + Relationship r2 = relationshipService.create(context, publication2, author1, isAuthorOfPublication, -1, -1); + + // Add another Publication @ rightPlace 0. The existing relationships should get pushed one place forward + Relationship r3 = relationshipService.create(context, publication3, author1, isAuthorOfPublication, -1, 0); + + context.restoreAuthSystemState(); + + // Check relationship order + assertRightPlace(r3, 0); + assertRightPlace(r1, 1); + assertRightPlace(r2, 2); + assertRelationMetadataOrder(author1, isAuthorOfPublication, List.of(r3, r1, r2)); + } + + @Test + public void createUseForPlaceRelationshipWithRightPlaceInTheMiddleNoMetadataTest() throws Exception { + context.turnOffAuthorisationSystem(); + + // Add two Publications to the same Author, appending to the end + Relationship r1 = relationshipService.create(context, publication1, author1, isAuthorOfPublication, -1, -1); + Relationship r2 = relationshipService.create(context, publication2, author1, isAuthorOfPublication, -1, -1); + + // Add another Publication @ rightPlace 1. The second relationship should get pushed by one place + Relationship r3 = relationshipService.create(context, publication3, author1, isAuthorOfPublication, -1, 1); + + context.restoreAuthSystemState(); + + // Check relationship order + assertRightPlace(r1, 0); + assertRightPlace(r3, 1); + assertRightPlace(r2, 2); + assertRelationMetadataOrder(author1, isAuthorOfPublication, List.of(r1, r3, r2)); + } + + @Test + public void createUseForPlaceRelationshipWithRightPlaceInTheMiddleNoMetadataTest_ignoreOtherRels( + ) throws Exception { + context.turnOffAuthorisationSystem(); + + // Add two Publications to the same Author, appending to the end + Relationship r1 = relationshipService.create(context, publication1, author1, isAuthorOfPublication, -1, -1); + Relationship r2 = relationshipService.create(context, publication2, author1, isAuthorOfPublication, -1, -1); + + // NOTE: unrelated relationship => should not be affected + Relationship ur1 = relationshipService.create(context, author1, project1, isProjectOfPerson, -1, -1); + + // NOTE: unrelated relationship => should not be affected + Relationship ur2 = relationshipService.create(context, author1, project2, isProjectOfPerson, -1, -1); + + // Add another Publication @ rightPlace 1. The second relationship should get pushed by one place + Relationship r3 = relationshipService.create(context, publication3, author1, isAuthorOfPublication, -1, 1); + + context.restoreAuthSystemState(); + + // Check relationship order + assertRightPlace(r1, 0); + assertRightPlace(r3, 1); + assertRightPlace(r2, 2); + assertRelationMetadataOrder(author1, isAuthorOfPublication, List.of(r1, r3, r2)); + + // check unaffected relationships + assertLeftPlace(ur1, 0); + assertRightPlace(ur1, 0); + assertLeftPlace(ur2, 1); + assertRightPlace(ur2, 0); + } + + @Test + public void createUseForPlaceRelationshipWithRightPlaceAtTheEndNoMetadataTest() throws Exception { + context.turnOffAuthorisationSystem(); + + // Add three Publications to the same Author, appending to the end + Relationship r1 = relationshipService.create(context, publication1, author1, isAuthorOfPublication, -1, -1); + Relationship r2 = relationshipService.create(context, publication2, author1, isAuthorOfPublication, -1, -1); + + // Add another Publication @ rightPlace 2. This should have the same effect as just appending it + Relationship r3 = relationshipService.create(context, publication3, author1, isAuthorOfPublication, -1, 2); + + context.restoreAuthSystemState(); + + // Check relationship order + assertRightPlace(r1, 0); + assertRightPlace(r2, 1); + assertRightPlace(r3, 2); + assertRelationMetadataOrder(author1, isAuthorOfPublication, List.of(r1, r2, r3)); + } + + @Test + public void createNonUseForPlaceRelationshipAppendingLeftTest() throws Exception { + context.turnOffAuthorisationSystem(); + + // Add three Projects to the same Author, appending to the end + Relationship r1 = relationshipService.create(context, author1, project1, isProjectOfPerson, -1, -1); + Relationship r2 = relationshipService.create(context, author1, project2, isProjectOfPerson, -1, -1); + Relationship r3 = relationshipService.create(context, author1, project3, isProjectOfPerson, -1, -1); + + context.restoreAuthSystemState(); + + // Check relationship order + assertLeftPlace(r1, 0); + assertLeftPlace(r2, 1); + assertLeftPlace(r3, 2); + assertRelationMetadataOrder(author1, isProjectOfPerson, List.of(r1, r2, r3)); + } + + @Test + public void createNonUseForPlaceRelationshipWithLeftPlaceAtTheStartTest() throws Exception { + context.turnOffAuthorisationSystem(); + + // Add two Projects to the same Author, appending to the end + Relationship r1 = relationshipService.create(context, author1, project1, isProjectOfPerson, -1, -1); + Relationship r2 = relationshipService.create(context, author1, project2, isProjectOfPerson, -1, -1); + + // Add another Project @ leftPlace 0. The existing relationships should get pushed one place forward + Relationship r3 = relationshipService.create(context, author1, project3, isProjectOfPerson, 0, -1); + + context.restoreAuthSystemState(); + + // Check relationship order + assertLeftPlace(r3, 0); + assertLeftPlace(r1, 1); + assertLeftPlace(r2, 2); + assertRelationMetadataOrder(author1, isProjectOfPerson, List.of(r3, r1, r2)); + } + + @Test + public void createNonUseForPlaceRelationshipWithLeftPlaceInTheMiddleTest() throws Exception { + context.turnOffAuthorisationSystem(); + + // Add two Projects to the same Author, appending to the end + Relationship r1 = relationshipService.create(context, author1, project1, isProjectOfPerson, -1, -1); + Relationship r2 = relationshipService.create(context, author1, project2, isProjectOfPerson, -1, -1); + + // Add another Project @ leftPlace 1. The second relationship should get pushed by one place + Relationship r3 = relationshipService.create(context, author1, project3, isProjectOfPerson, 1, -1); + + context.restoreAuthSystemState(); + + // Check relationship order + assertLeftPlace(r1, 0); + assertLeftPlace(r3, 1); + assertLeftPlace(r2, 2); + assertRelationMetadataOrder(author1, isProjectOfPerson, List.of(r1, r3, r2)); + } + + @Test + public void createNonUseForPlaceRelationshipWithLeftPlaceInTheMiddleTest_ignoreOtherRels() throws Exception { + context.turnOffAuthorisationSystem(); + + // Add two Projects to the same Author, appending to the end + Relationship r1 = relationshipService.create(context, author1, project1, isProjectOfPerson, -1, -1); + Relationship r2 = relationshipService.create(context, author1, project2, isProjectOfPerson, -1, -1); + + // NOTE: unrelated relationship => should not be affected + Relationship ur1 = relationshipService.create(context, publication1, author1, isAuthorOfPublication, -1, -1); + + // Add another Project @ leftPlace 1. The second relationship should get pushed by one place + Relationship r3 = relationshipService.create(context, author1, project3, isProjectOfPerson, 1, -1); + + context.restoreAuthSystemState(); + + // Check relationship order + assertLeftPlace(r1, 0); + assertLeftPlace(r3, 1); + assertLeftPlace(r2, 2); + assertRelationMetadataOrder(author1, isProjectOfPerson, List.of(r1, r3, r2)); + + // check unaffected relationships + assertLeftPlace(ur1, 0); + assertRightPlace(ur1, 0); + } + + @Test + public void createNonUseForPlaceRelationshipWithLeftPlaceAtTheEndTest() throws Exception { + context.turnOffAuthorisationSystem(); + + // Add two Projects to the same Author, appending to the end + Relationship r1 = relationshipService.create(context, author1, project1, isProjectOfPerson, -1, -1); + Relationship r2 = relationshipService.create(context, author1, project2, isProjectOfPerson, -1, -1); + + // Add another Project @ leftPlace 2. This should have the same effect as just appending it + Relationship r3 = relationshipService.create(context, author1, project3, isProjectOfPerson, 2, -1); + + context.restoreAuthSystemState(); + + // Check relationship order + assertLeftPlace(r1, 0); + assertLeftPlace(r2, 1); + assertLeftPlace(r3, 2); + assertRelationMetadataOrder(author1, isProjectOfPerson, List.of(r1, r2, r3)); + } + + @Test + public void createNonUseForPlaceRelationshipAppendingRightTest() throws Exception { + context.turnOffAuthorisationSystem(); + + // Add three Authors to the same Project, appending to the end + Relationship r1 = relationshipService.create(context, author1, project1, isProjectOfPerson, -1, -1); + Relationship r2 = relationshipService.create(context, author2, project1, isProjectOfPerson, -1, -1); + Relationship r3 = relationshipService.create(context, author3, project1, isProjectOfPerson, -1, -1); + + context.restoreAuthSystemState(); + + // Check relationship order + assertRightPlace(r1, 0); + assertRightPlace(r2, 1); + assertRightPlace(r3, 2); + assertRelationMetadataOrder(project1, isProjectOfPerson, List.of(r1, r2, r3)); + } + + @Test + public void createNonUseForPlaceRelationshipWithRightPlaceAtTheStartTest() throws Exception { + context.turnOffAuthorisationSystem(); + + // Add two Authors to the same Project, appending to the end + Relationship r1 = relationshipService.create(context, author1, project1, isProjectOfPerson, -1, -1); + Relationship r2 = relationshipService.create(context, author2, project1, isProjectOfPerson, -1, -1); + + // Add another Author @ rightPlace 0. The existing relationships should get pushed one place forward + Relationship r3 = relationshipService.create(context, author3, project1, isProjectOfPerson, -1, 0); + + context.restoreAuthSystemState(); + + // Check relationship order + assertRightPlace(r3, 0); + assertRightPlace(r1, 1); + assertRightPlace(r2, 2); + assertRelationMetadataOrder(project1, isProjectOfPerson, List.of(r3, r1, r2)); + } + + @Test + public void createNonUseForPlaceRelationshipWithRightPlaceInTheMiddleTest() throws Exception { + context.turnOffAuthorisationSystem(); + + // Add two Authors to the same Project, appending to the end + Relationship r1 = relationshipService.create(context, author1, project1, isProjectOfPerson, -1, -1); + Relationship r2 = relationshipService.create(context, author2, project1, isProjectOfPerson, -1, -1); + + // Add another Author @ rightPlace 1. The second relationship should get pushed by one place + Relationship r3 = relationshipService.create(context, author3, project1, isProjectOfPerson, -1, 1); + + context.restoreAuthSystemState(); + + // Check relationship order + assertRightPlace(r1, 0); + assertRightPlace(r3, 1); + assertRightPlace(r2, 2); + assertRelationMetadataOrder(project1, isProjectOfPerson, List.of(r1, r3, r2)); + } + + @Test + public void createNonUseForPlaceRelationshipWithRightPlaceInTheMiddleTest_ignoreOtherRels() throws Exception { + context.turnOffAuthorisationSystem(); + + // Add two Authors to the same Project, appending to the end + Relationship r1 = relationshipService.create(context, author1, project1, isProjectOfPerson, -1, -1); + Relationship r2 = relationshipService.create(context, author2, project1, isProjectOfPerson, -1, -1); + + // Add another Author @ rightPlace 1. The second relationship should get pushed by one place + Relationship r3 = relationshipService.create(context, author3, project1, isProjectOfPerson, -1, 1); + + // NOTE: unrelated relationship => should not be affected + Relationship ur1 = relationshipService.create(context, publication1, project1, isProjectOfPublication, -1, -1); + + context.restoreAuthSystemState(); + + // Check relationship order + assertRightPlace(r1, 0); + assertRightPlace(r3, 1); + assertRightPlace(r2, 2); + assertRelationMetadataOrder(project1, isProjectOfPerson, List.of(r1, r3, r2)); + + // check unaffected relationships + assertLeftPlace(ur1, 0); + assertRightPlace(ur1, 0); + } + + @Test + public void createNonUseForPlaceRelationshipWithRightPlaceAtTheEndTest() throws Exception { + context.turnOffAuthorisationSystem(); + + // Add two Authors to the same Project, appending to the end + Relationship r1 = relationshipService.create(context, author1, project1, isProjectOfPerson, -1, -1); + Relationship r2 = relationshipService.create(context, author2, project1, isProjectOfPerson, -1, -1); + + // Add another Author @ rightPlace 2. This should have the same effect as just appending it + Relationship r3 = relationshipService.create(context, author3, project1, isProjectOfPerson, -1, 2); + + context.restoreAuthSystemState(); + + // Check relationship order + assertRightPlace(r1, 0); + assertRightPlace(r2, 1); + assertRightPlace(r3, 2); + assertRelationMetadataOrder(project1, isProjectOfPerson, List.of(r1, r2, r3)); + } + + /* RelationshipService#move */ + + @Test + public void moveUseForPlaceRelationshipToCurrentLeftPlaceNoMetadataTest() throws Exception { + context.turnOffAuthorisationSystem(); + + // Add three Authors to the same Publication, appending to the end + Relationship r1 = relationshipService.create(context, publication1, author1, isAuthorOfPublication, -1, -1); + Relationship r2 = relationshipService.create(context, publication1, author2, isAuthorOfPublication, -1, -1); + Relationship r3 = relationshipService.create(context, publication1, author3, isAuthorOfPublication, -1, -1); + + relationshipService.move(context, r1, 0, null); + relationshipService.move(context, r2, 1, null); + relationshipService.move(context, r3, 2, null); + + context.restoreAuthSystemState(); + + // Check relationship order + assertLeftPlace(r1, 0); + assertLeftPlace(r2, 1); + assertLeftPlace(r3, 2); + assertRelationMetadataOrder(publication1, isAuthorOfPublication, List.of(r1, r2, r3)); + } + + @Test + public void moveUseForPlaceRelationshipToLeftPlaceAtTheStartNoMetadataTest() throws Exception { + context.turnOffAuthorisationSystem(); + + // Add three Authors to the same Publication, appending to the end + Relationship r1 = relationshipService.create(context, publication1, author1, isAuthorOfPublication, -1, -1); + Relationship r2 = relationshipService.create(context, publication1, author2, isAuthorOfPublication, -1, -1); + Relationship r3 = relationshipService.create(context, publication1, author3, isAuthorOfPublication, -1, -1); + + relationshipService.move(context, r3, 0, null); + + context.restoreAuthSystemState(); + + // Check relationship order + assertLeftPlace(r3, 0); + assertLeftPlace(r1, 1); + assertLeftPlace(r2, 2); + assertRelationMetadataOrder(publication1, isAuthorOfPublication, List.of(r3, r1, r2)); + } + + @Test + public void moveUseForPlaceRelationshipUpToLeftPlaceInTheMiddleNoMetadataTest() throws Exception { + context.turnOffAuthorisationSystem(); + + // Add three Authors to the same Publication, appending to the end + Relationship r1 = relationshipService.create(context, publication1, author1, isAuthorOfPublication, -1, -1); + Relationship r2 = relationshipService.create(context, publication1, author2, isAuthorOfPublication, -1, -1); + Relationship r3 = relationshipService.create(context, publication1, author3, isAuthorOfPublication, -1, -1); + + // Move the first Author to leftPlace=1 + relationshipService.move(context, r1, 1, null); + + context.restoreAuthSystemState(); + + // Check relationship order + assertLeftPlace(r2, 0); + assertLeftPlace(r1, 1); + assertLeftPlace(r3, 2); + assertRelationMetadataOrder(publication1, isAuthorOfPublication, List.of(r2, r1, r3)); + } + + @Test + public void moveUseForPlaceRelationshipDownToLeftPlaceInTheMiddleNoMetadataTest() throws Exception { + context.turnOffAuthorisationSystem(); + + // Add three Authors to the same Publication, appending to the end + Relationship r1 = relationshipService.create(context, publication1, author1, isAuthorOfPublication, -1, -1); + Relationship r2 = relationshipService.create(context, publication1, author2, isAuthorOfPublication, -1, -1); + Relationship r3 = relationshipService.create(context, publication1, author3, isAuthorOfPublication, -1, -1); + + // Move the last Author to leftPlace=1 + relationshipService.move(context, r3, 1, null); + + context.restoreAuthSystemState(); + + // Check relationship order + assertLeftPlace(r1, 0); + assertLeftPlace(r3, 1); + assertLeftPlace(r2, 2); + assertRelationMetadataOrder(publication1, isAuthorOfPublication, List.of(r1, r3, r2)); + } + + @Test + public void moveUseForPlaceRelationshipToLeftPlaceAtTheEndNoMetadataTest() throws Exception { + context.turnOffAuthorisationSystem(); + + // Add three Authors to the same Publication, appending to the end + Relationship r1 = relationshipService.create(context, publication1, author1, isAuthorOfPublication, -1, -1); + Relationship r2 = relationshipService.create(context, publication1, author2, isAuthorOfPublication, -1, -1); + Relationship r3 = relationshipService.create(context, publication1, author3, isAuthorOfPublication, -1, -1); + + // Move the first Author to the back + relationshipService.move(context, r1, -1, null); + + context.restoreAuthSystemState(); + + // Check relationship order + assertLeftPlace(r2, 0); + assertLeftPlace(r3, 1); + assertLeftPlace(r1, 2); + assertRelationMetadataOrder(publication1, isAuthorOfPublication, List.of(r2, r3, r1)); + } + + @Test + public void moveUseForPlaceRelationshipToLeftPlaceAtTheEndOverlapNoMetadataTest() throws Exception { + context.turnOffAuthorisationSystem(); + + // Add three Authors to the same Publication, appending to the end + Relationship r1 = relationshipService.create(context, publication1, author1, isAuthorOfPublication, -1, -1); + Relationship r2 = relationshipService.create(context, publication1, author2, isAuthorOfPublication, -1, -1); + Relationship r3 = relationshipService.create(context, publication1, author3, isAuthorOfPublication, -1, -1); + + // Move the first Author to the back + relationshipService.move(context, r1, 2, null); + + context.restoreAuthSystemState(); + + // Check relationship order + assertLeftPlace(r2, 0); + assertLeftPlace(r3, 1); + assertLeftPlace(r1, 2); + assertRelationMetadataOrder(publication1, isAuthorOfPublication, List.of(r2, r3, r1)); + } + + @Test + public void moveUseForPlaceRelationshipToCurrentLeftPlaceWithMetadataTest() throws Exception { + context.turnOffAuthorisationSystem(); + + // Initialize MDVs and Relationships + itemService.addMetadata(context, publication1, dcSchema, contributorElement, authorQualifier, null, "MDV 1"); + Relationship r1 = relationshipService.create(context, publication1, author1, isAuthorOfPublication, -1, -1); + Relationship r2 = relationshipService.create(context, publication1, author2, isAuthorOfPublication, -1, -1); + itemService.addMetadata(context, publication1, dcSchema, contributorElement, authorQualifier, null, "MDV 2"); + Relationship r3 = relationshipService.create(context, publication1, author3, isAuthorOfPublication, -1, -1); + + relationshipService.move(context, r1, 1, null); + relationshipService.move(context, r2, 2, null); + relationshipService.move(context, r3, 4, null); + + context.restoreAuthSystemState(); + + // Check relationship order + assertLeftPlace(r1, 1); + assertLeftPlace(r2, 2); + assertLeftPlace(r3, 4); + assertRelationMetadataOrder(publication1, isAuthorOfPublication, List.of(r1, r2, r3)); + assertMetadataOrder(publication1, "dc.contributor.author", List.of( + "MDV 1", + "Author, First", + "Author, Second", + "MDV 2", + "Author, Third" + )); + } + + @Test + public void moveUseForPlaceRelationshipToLeftPlaceAtTheStartWithMetadataTest() throws Exception { + context.turnOffAuthorisationSystem(); + + // Initialize MDVs and Relationships + itemService.addMetadata(context, publication1, dcSchema, contributorElement, authorQualifier, null, "MDV 1"); + Relationship r1 = relationshipService.create(context, publication1, author1, isAuthorOfPublication, -1, -1); + Relationship r2 = relationshipService.create(context, publication1, author2, isAuthorOfPublication, -1, -1); + itemService.addMetadata(context, publication1, dcSchema, contributorElement, authorQualifier, null, "MDV 2"); + Relationship r3 = relationshipService.create(context, publication1, author3, isAuthorOfPublication, -1, -1); + + relationshipService.move(context, r3, 0, null); + + context.restoreAuthSystemState(); + + // Check relationship order + assertLeftPlace(r3, 0); + assertLeftPlace(r1, 2); + assertLeftPlace(r2, 3); + assertRelationMetadataOrder(publication1, isAuthorOfPublication, List.of(r3, r1, r2)); + assertMetadataOrder(publication1, "dc.contributor.author", List.of( + "Author, Third", + "MDV 1", + "Author, First", + "Author, Second", + "MDV 2" + )); + } + + @Test + public void moveUseForPlaceRelationshipUpToLeftPlaceInTheMiddleWithTest() throws Exception { + context.turnOffAuthorisationSystem(); + + // Initialize MDVs and Relationships + itemService.addMetadata(context, publication1, dcSchema, contributorElement, authorQualifier, null, "MDV 1"); + Relationship r1 = relationshipService.create(context, publication1, author1, isAuthorOfPublication, -1, -1); + Relationship r2 = relationshipService.create(context, publication1, author2, isAuthorOfPublication, -1, -1); + itemService.addMetadata(context, publication1, dcSchema, contributorElement, authorQualifier, null, "MDV 2"); + Relationship r3 = relationshipService.create(context, publication1, author3, isAuthorOfPublication, -1, -1); + + // Move the first Author to leftPlace=3 + relationshipService.move(context, r1, 3, null); + + context.restoreAuthSystemState(); + + // Check relationship order + assertLeftPlace(r2, 1); + assertLeftPlace(r1, 3); + assertLeftPlace(r3, 4); + assertRelationMetadataOrder(publication1, isAuthorOfPublication, List.of(r2, r1, r3)); + } + + @Test + public void moveUseForPlaceRelationshipUpToLeftPlaceInTheMiddleWithTest_ignoreOtherRels() throws Exception { + context.turnOffAuthorisationSystem(); + + // NOTE: unrelated relationship => should not be affected + Relationship ur1 = relationshipService.create(context, publication1, project1, isProjectOfPublication, -1, -1); + + // Initialize MDVs and Relationships + itemService.addMetadata(context, publication1, dcSchema, contributorElement, authorQualifier, null, "MDV 1"); + Relationship r1 = relationshipService.create(context, publication1, author1, isAuthorOfPublication, -1, -1); + Relationship r2 = relationshipService.create(context, publication1, author2, isAuthorOfPublication, -1, -1); + + // NOTE: unrelated relationship => should not be affected + Relationship ur2 = relationshipService.create(context, publication1, project2, isProjectOfPublication, -1, -1); + + itemService.addMetadata(context, publication1, dcSchema, contributorElement, authorQualifier, null, "MDV 2"); + Relationship r3 = relationshipService.create(context, publication1, author3, isAuthorOfPublication, -1, -1); + + // Move the first Author to leftPlace=3 + relationshipService.move(context, r1, 3, null); + + context.restoreAuthSystemState(); + + // Check relationship order + assertLeftPlace(r2, 1); + assertLeftPlace(r1, 3); + assertLeftPlace(r3, 4); + assertRelationMetadataOrder(publication1, isAuthorOfPublication, List.of(r2, r1, r3)); + + // check unaffected relationships + assertLeftPlace(ur1, 0); + assertRightPlace(ur1, 0); + assertLeftPlace(ur2, 1); + assertRightPlace(ur2, 0); + } + + @Test + public void moveUseForPlaceRelationshipDownToLeftPlaceInTheMiddleWithMetadataTest() throws Exception { + context.turnOffAuthorisationSystem(); + + // Initialize MDVs and Relationships + itemService.addMetadata(context, publication1, dcSchema, contributorElement, authorQualifier, null, "MDV 1"); + Relationship r1 = relationshipService.create(context, publication1, author1, isAuthorOfPublication, -1, -1); + Relationship r2 = relationshipService.create(context, publication1, author2, isAuthorOfPublication, -1, -1); + itemService.addMetadata(context, publication1, dcSchema, contributorElement, authorQualifier, null, "MDV 2"); + Relationship r3 = relationshipService.create(context, publication1, author3, isAuthorOfPublication, -1, -1); + + // Move the last Author to leftPlace=2 + relationshipService.move(context, r3, 2, null); + + context.restoreAuthSystemState(); + + // Check relationship order + assertLeftPlace(r1, 1); + assertLeftPlace(r3, 2); + assertLeftPlace(r2, 3); + assertRelationMetadataOrder(publication1, isAuthorOfPublication, List.of(r1, r3, r2)); + } + + @Test + public void moveUseForPlaceRelationshipDownToLeftPlaceInTheMiddleWithMetadataTest_ignoreOtherRels( + ) throws Exception { + context.turnOffAuthorisationSystem(); + + // Initialize MDVs and Relationships + itemService.addMetadata(context, publication1, dcSchema, contributorElement, authorQualifier, null, "MDV 1"); + Relationship r1 = relationshipService.create(context, publication1, author1, isAuthorOfPublication, -1, -1); + + // NOTE: unrelated relationship => should not be affected + Relationship ur1 = relationshipService.create(context, publication1, project1, isProjectOfPublication, -1, -1); + + Relationship r2 = relationshipService.create(context, publication1, author2, isAuthorOfPublication, -1, -1); + itemService.addMetadata(context, publication1, dcSchema, contributorElement, authorQualifier, null, "MDV 2"); + Relationship r3 = relationshipService.create(context, publication1, author3, isAuthorOfPublication, -1, -1); + + // NOTE: unrelated relationship => should not be affected + Relationship ur2 = relationshipService.create(context, author2, project2, isProjectOfPerson, -1, -1); + + // Move the last Author to leftPlace=2 + relationshipService.move(context, r3, 2, null); + + context.restoreAuthSystemState(); + + // Check relationship order + assertLeftPlace(r1, 1); + assertLeftPlace(r3, 2); + assertLeftPlace(r2, 3); + assertRelationMetadataOrder(publication1, isAuthorOfPublication, List.of(r1, r3, r2)); + + // check unaffected relationships + assertLeftPlace(ur1, 0); + assertRightPlace(ur1, 0); + assertLeftPlace(ur2, 0); + assertRightPlace(ur2, 0); + } + + @Test + public void moveUseForPlaceRelationshipToLeftPlaceAtTheEndWithMetadataTest() throws Exception { + context.turnOffAuthorisationSystem(); + + // Initialize MDVs and Relationships + itemService.addMetadata(context, publication1, dcSchema, contributorElement, authorQualifier, null, "MDV 1"); + Relationship r1 = relationshipService.create(context, publication1, author1, isAuthorOfPublication, -1, -1); + Relationship r2 = relationshipService.create(context, publication1, author2, isAuthorOfPublication, -1, -1); + itemService.addMetadata(context, publication1, dcSchema, contributorElement, authorQualifier, null, "MDV 2"); + Relationship r3 = relationshipService.create(context, publication1, author3, isAuthorOfPublication, -1, -1); + + // Move the first Author to the back + relationshipService.move(context, r1, -1, null); + + context.restoreAuthSystemState(); + + // Check relationship order + assertLeftPlace(r2, 1); + assertLeftPlace(r3, 3); + assertLeftPlace(r1, 4); + assertRelationMetadataOrder(publication1, isAuthorOfPublication, List.of(r2, r3, r1)); + } + + @Test + public void moveUseForPlaceRelationshipToLeftPlaceAtTheEndOverlapWithMetadataTest() throws Exception { + context.turnOffAuthorisationSystem(); + + // Initialize MDVs and Relationships + itemService.addMetadata(context, publication1, dcSchema, contributorElement, authorQualifier, null, "MDV 1"); + Relationship r1 = relationshipService.create(context, publication1, author1, isAuthorOfPublication, -1, -1); + Relationship r2 = relationshipService.create(context, publication1, author2, isAuthorOfPublication, -1, -1); + itemService.addMetadata(context, publication1, dcSchema, contributorElement, authorQualifier, null, "MDV 2"); + Relationship r3 = relationshipService.create(context, publication1, author3, isAuthorOfPublication, -1, -1); + + // Move the first Author to the back + relationshipService.move(context, r1, 4, null); + + context.restoreAuthSystemState(); + + // Check relationship order + assertLeftPlace(r2, 1); + assertLeftPlace(r3, 3); + assertLeftPlace(r1, 4); + assertRelationMetadataOrder(publication1, isAuthorOfPublication, List.of(r2, r3, r1)); + } + + @Test + public void moveUseForPlaceRelationshipToCurrentRightPlaceNoMetadataTest() throws Exception { + context.turnOffAuthorisationSystem(); + + // Add three Publications to the same Author, appending to the end + Relationship r1 = relationshipService.create(context, publication1, author1, isAuthorOfPublication, -1, -1); + Relationship r2 = relationshipService.create(context, publication2, author1, isAuthorOfPublication, -1, -1); + Relationship r3 = relationshipService.create(context, publication3, author1, isAuthorOfPublication, -1, -1); + + + relationshipService.move(context, r1, null, 0); + relationshipService.move(context, r2, null, 1); + relationshipService.move(context, r3, null, 2); + + context.restoreAuthSystemState(); + + // Check relationship order + assertRightPlace(r1, 0); + assertRightPlace(r2, 1); + assertRightPlace(r3, 2); + assertRelationMetadataOrder(author1, isAuthorOfPublication, List.of(r1, r2, r3)); + } + + @Test + public void moveUseForPlaceRelationshipToRightPlaceAtTheStartNoMetadataTest() throws Exception { + context.turnOffAuthorisationSystem(); + + // Add three Publications to the same Author, appending to the end + Relationship r1 = relationshipService.create(context, publication1, author1, isAuthorOfPublication, -1, -1); + Relationship r2 = relationshipService.create(context, publication2, author1, isAuthorOfPublication, -1, -1); + Relationship r3 = relationshipService.create(context, publication3, author1, isAuthorOfPublication, -1, -1); + + + relationshipService.move(context, r3, null, 0); + + context.restoreAuthSystemState(); + + // Check relationship order + assertRightPlace(r3, 0); + assertRightPlace(r1, 1); + assertRightPlace(r2, 2); + assertRelationMetadataOrder(author1, isAuthorOfPublication, List.of(r3, r1, r2)); + } + + @Test + public void moveUseForPlaceRelationshipUpToRightPlaceInTheMiddleNoMetadataTest() throws Exception { + context.turnOffAuthorisationSystem(); + + // Add three Authors to the same Publication, appending to the end + Relationship r1 = relationshipService.create(context, publication1, author1, isAuthorOfPublication, -1, -1); + Relationship r2 = relationshipService.create(context, publication2, author1, isAuthorOfPublication, -1, -1); + Relationship r3 = relationshipService.create(context, publication3, author1, isAuthorOfPublication, -1, -1); + + // Move the first Author to leftPlace=1 + relationshipService.move(context, r1, null, 1); + + context.restoreAuthSystemState(); + + // Check relationship order + assertRightPlace(r2, 0); + assertRightPlace(r1, 1); + assertRightPlace(r3, 2); + assertRelationMetadataOrder(author1, isAuthorOfPublication, List.of(r2, r1, r3)); + } + + @Test + public void moveUseForPlaceRelationshipDownToRightPlaceInTheMiddleNoMetadataTest() throws Exception { + context.turnOffAuthorisationSystem(); + + // Add three Authors to the same Publication, appending to the end + Relationship r1 = relationshipService.create(context, publication1, author1, isAuthorOfPublication, -1, -1); + Relationship r2 = relationshipService.create(context, publication2, author1, isAuthorOfPublication, -1, -1); + Relationship r3 = relationshipService.create(context, publication3, author1, isAuthorOfPublication, -1, -1); + + // Move the last Author to leftPlace=1 + relationshipService.move(context, r3, null, 1); + + context.restoreAuthSystemState(); + + // Check relationship order + assertRightPlace(r1, 0); + assertRightPlace(r3, 1); + assertRightPlace(r2, 2); + assertRelationMetadataOrder(author1, isAuthorOfPublication, List.of(r1, r3, r2)); + } + + @Test + public void moveUseForPlaceRelationshipToRightPlaceAtTheEndNoMetadataTest() throws Exception { + context.turnOffAuthorisationSystem(); + + // Add three Authors to the same Publication, appending to the end + Relationship r1 = relationshipService.create(context, publication1, author1, isAuthorOfPublication, -1, -1); + Relationship r2 = relationshipService.create(context, publication2, author1, isAuthorOfPublication, -1, -1); + Relationship r3 = relationshipService.create(context, publication3, author1, isAuthorOfPublication, -1, -1); + + // Move the first Author to the back + relationshipService.move(context, r1, null, -1); + + context.restoreAuthSystemState(); + + // Check relationship order + assertRightPlace(r2, 0); + assertRightPlace(r3, 1); + assertRightPlace(r1, 2); + assertRelationMetadataOrder(author1, isAuthorOfPublication, List.of(r2, r3, r1)); + } + + @Test + public void moveUseForPlaceRelationshipToRightPlaceAtTheEndOverlapNoMetadataTest() throws Exception { + context.turnOffAuthorisationSystem(); + + // Add three Authors to the same Publication, appending to the end + Relationship r1 = relationshipService.create(context, publication1, author1, isAuthorOfPublication, -1, -1); + Relationship r2 = relationshipService.create(context, publication2, author1, isAuthorOfPublication, -1, -1); + Relationship r3 = relationshipService.create(context, publication3, author1, isAuthorOfPublication, -1, -1); + + // Move the first Author to the back + relationshipService.move(context, r1, null, 2); + + context.restoreAuthSystemState(); + + // Check relationship order + assertRightPlace(r2, 0); + assertRightPlace(r3, 1); + assertRightPlace(r1, 2); + assertRelationMetadataOrder(author1, isAuthorOfPublication, List.of(r2, r3, r1)); + } + + @Test + public void moveNonUseForPlaceRelationshipToCurrentLeftPlaceNoMetadataTest() throws Exception { + context.turnOffAuthorisationSystem(); + + // Add three Projects to the same Author, appending to the end + Relationship r1 = relationshipService.create(context, author1, project1, isProjectOfPerson, -1, -1); + Relationship r2 = relationshipService.create(context, author1, project2, isProjectOfPerson, -1, -1); + Relationship r3 = relationshipService.create(context, author1, project3, isProjectOfPerson, -1, -1); + + // Move the last Project to the front + relationshipService.move(context, r1, 0, null); + relationshipService.move(context, r2, 1, null); + relationshipService.move(context, r3, 2, null); + + context.restoreAuthSystemState(); + + // Check relationship order + assertLeftPlace(r1, 0); + assertLeftPlace(r2, 1); + assertLeftPlace(r3, 2); + assertRelationMetadataOrder(author1, isProjectOfPerson, List.of(r1, r2, r3)); + } + + @Test + public void moveNonUseForPlaceRelationshipToLeftPlaceAtTheStartNoMetadataTest() throws Exception { + context.turnOffAuthorisationSystem(); + + // Add three Projects to the same Author, appending to the end + Relationship r1 = relationshipService.create(context, author1, project1, isProjectOfPerson, -1, -1); + Relationship r2 = relationshipService.create(context, author1, project2, isProjectOfPerson, -1, -1); + Relationship r3 = relationshipService.create(context, author1, project3, isProjectOfPerson, -1, -1); + + // Move the last Project to the front + relationshipService.move(context, r3, 0, null); + + context.restoreAuthSystemState(); + + // Check relationship order + assertLeftPlace(r3, 0); + assertLeftPlace(r1, 1); + assertLeftPlace(r2, 2); + assertRelationMetadataOrder(author1, isProjectOfPerson, List.of(r3, r1, r2)); + } + + @Test + public void moveNonUseForPlaceRelationshipUpToLeftPlaceInTheMiddleNoMetadataTest() throws Exception { + context.turnOffAuthorisationSystem(); + + // Add three Projects to the same Author, appending to the end + Relationship r1 = relationshipService.create(context, author1, project1, isProjectOfPerson, -1, -1); + Relationship r2 = relationshipService.create(context, author1, project2, isProjectOfPerson, -1, -1); + Relationship r3 = relationshipService.create(context, author1, project3, isProjectOfPerson, -1, -1); + + // Move the first Author to leftPlace=1 + relationshipService.move(context, r1, 1, null); + + context.restoreAuthSystemState(); + + // Check relationship order + assertLeftPlace(r2, 0); + assertLeftPlace(r1, 1); + assertLeftPlace(r3, 2); + assertRelationMetadataOrder(author1, isProjectOfPerson, List.of(r2, r1, r3)); + } + + @Test + public void moveNonUseForPlaceRelationshipDownToLeftPlaceInTheMiddleNoMetadataTest() throws Exception { + context.turnOffAuthorisationSystem(); + + // Add three Projects to the same Author, appending to the end + Relationship r1 = relationshipService.create(context, author1, project1, isProjectOfPerson, -1, -1); + Relationship r2 = relationshipService.create(context, author1, project2, isProjectOfPerson, -1, -1); + Relationship r3 = relationshipService.create(context, author1, project3, isProjectOfPerson, -1, -1); + + // Move the last Author to leftPlace=1 + relationshipService.move(context, r3, 1, null); + + context.restoreAuthSystemState(); + + // Check relationship order + assertLeftPlace(r1, 0); + assertLeftPlace(r3, 1); + assertLeftPlace(r2, 2); + assertRelationMetadataOrder(author1, isProjectOfPerson, List.of(r1, r3, r2)); + } + + @Test + public void moveNonUseForPlaceRelationshipToLeftPlaceAtTheEndNoMetadataTest() throws Exception { + context.turnOffAuthorisationSystem(); + + // Add three Projects to the same Author, appending to the end + Relationship r1 = relationshipService.create(context, author1, project1, isProjectOfPerson, -1, -1); + Relationship r2 = relationshipService.create(context, author1, project2, isProjectOfPerson, -1, -1); + Relationship r3 = relationshipService.create(context, author1, project3, isProjectOfPerson, -1, -1); + + // Move the first Author to the back + relationshipService.move(context, r1, -1, null); + + context.restoreAuthSystemState(); + + // Check relationship order + assertLeftPlace(r2, 0); + assertLeftPlace(r3, 1); + assertLeftPlace(r1, 2); + assertRelationMetadataOrder(author1, isProjectOfPerson, List.of(r2, r3, r1)); + } + + @Test + public void moveNonUseForPlaceRelationshipToLeftPlaceAtTheEndOverlapNoMetadataTest() throws Exception { + context.turnOffAuthorisationSystem(); + + // Add three Projects to the same Author, appending to the end + Relationship r1 = relationshipService.create(context, author1, project1, isProjectOfPerson, -1, -1); + Relationship r2 = relationshipService.create(context, author1, project2, isProjectOfPerson, -1, -1); + Relationship r3 = relationshipService.create(context, author1, project3, isProjectOfPerson, -1, -1); + + // Move the first Author to the back + relationshipService.move(context, r1, 2, null); + + context.restoreAuthSystemState(); + + // Check relationship order + assertLeftPlace(r2, 0); + assertLeftPlace(r3, 1); + assertLeftPlace(r1, 2); + assertRelationMetadataOrder(author1, isProjectOfPerson, List.of(r2, r3, r1)); + } + + @Test + public void moveNonUseForPlaceRelationshipToCurrentRightPlaceNoMetadataTest() throws Exception { + context.turnOffAuthorisationSystem(); + + // Add three Authors to the same Project, appending to the end + Relationship r1 = relationshipService.create(context, author1, project1, isProjectOfPerson, -1, -1); + Relationship r2 = relationshipService.create(context, author2, project1, isProjectOfPerson, -1, -1); + Relationship r3 = relationshipService.create(context, author3, project1, isProjectOfPerson, -1, -1); + + + relationshipService.move(context, r1, null, 0); + relationshipService.move(context, r2, null, 1); + relationshipService.move(context, r3, null, 2); + + context.restoreAuthSystemState(); + + // Check relationship order + assertRightPlace(r1, 0); + assertRightPlace(r2, 1); + assertRightPlace(r3, 2); + assertRelationMetadataOrder(project1, isProjectOfPerson, List.of(r1, r2, r3)); + } + + @Test + public void moveNonUseForPlaceRelationshipToRightPlaceAtTheStartNoMetadataTest() throws Exception { + context.turnOffAuthorisationSystem(); + + // Add three Authors to the same Project, appending to the end + Relationship r1 = relationshipService.create(context, author1, project1, isProjectOfPerson, -1, -1); + Relationship r2 = relationshipService.create(context, author2, project1, isProjectOfPerson, -1, -1); + Relationship r3 = relationshipService.create(context, author3, project1, isProjectOfPerson, -1, -1); + + + relationshipService.move(context, r3, null, 0); + + context.restoreAuthSystemState(); + + // Check relationship order + assertRightPlace(r3, 0); + assertRightPlace(r1, 1); + assertRightPlace(r2, 2); + assertRelationMetadataOrder(project1, isProjectOfPerson, List.of(r3, r1, r2)); + } + + @Test + public void moveNonUseForPlaceRelationshipUpToRightPlaceInTheMiddleNoMetadataTest() throws Exception { + context.turnOffAuthorisationSystem(); + + // Add three Authors to the same Project, appending to the end + Relationship r1 = relationshipService.create(context, author1, project1, isProjectOfPerson, -1, -1); + Relationship r2 = relationshipService.create(context, author2, project1, isProjectOfPerson, -1, -1); + Relationship r3 = relationshipService.create(context, author3, project1, isProjectOfPerson, -1, -1); + + // Move the first Author to leftPlace=1 + relationshipService.move(context, r1, null, 1); + + context.restoreAuthSystemState(); + + // Check relationship order + assertRightPlace(r2, 0); + assertRightPlace(r1, 1); + assertRightPlace(r3, 2); + assertRelationMetadataOrder(project1, isProjectOfPerson, List.of(r2, r1, r3)); + } + + @Test + public void moveNonUseForPlaceRelationshipUpToRightPlaceInTheMiddleNoMetadataTest_ignoreOtherRels( + ) throws Exception { + context.turnOffAuthorisationSystem(); + + // Add three Authors to the same Project, appending to the end + Relationship r1 = relationshipService.create(context, author1, project1, isProjectOfPerson, -1, -1); + + // NOTE: unrelated relationship => should not be affected + Relationship ur1 = relationshipService.create(context, publication1, project1, isProjectOfPublication, -1, -1); + + Relationship r2 = relationshipService.create(context, author2, project1, isProjectOfPerson, -1, -1); + + // NOTE: unrelated relationship => should not be affected + Relationship ur2 = relationshipService.create(context, publication2, project1, isProjectOfPublication, -1, -1); + + Relationship r3 = relationshipService.create(context, author3, project1, isProjectOfPerson, -1, -1); + + // NOTE: unrelated relationship => should not be affected + Relationship ur3 = relationshipService.create(context, publication3, project1, isProjectOfPublication, -1, -1); + + // Move the first Author to leftPlace=1 + relationshipService.move(context, r1, null, 1); + + context.restoreAuthSystemState(); + + // Check relationship order + assertRightPlace(r2, 0); + assertRightPlace(r1, 1); + assertRightPlace(r3, 2); + assertRelationMetadataOrder(project1, isProjectOfPerson, List.of(r2, r1, r3)); + + // check unaffected relationships + assertLeftPlace(ur1, 0); + assertRightPlace(ur1, 0); + assertLeftPlace(ur2, 0); + assertRightPlace(ur2, 1); + assertLeftPlace(ur3, 0); + assertRightPlace(ur3, 2); + } + + @Test + public void moveNonUseForPlaceRelationshipDownToRightPlaceInTheMiddleNoMetadataTest() throws Exception { + context.turnOffAuthorisationSystem(); + + // Add three Authors to the same Project, appending to the end + Relationship r1 = relationshipService.create(context, author1, project1, isProjectOfPerson, -1, -1); + Relationship r2 = relationshipService.create(context, author2, project1, isProjectOfPerson, -1, -1); + Relationship r3 = relationshipService.create(context, author3, project1, isProjectOfPerson, -1, -1); + + // Move the last Author to leftPlace=1 + relationshipService.move(context, r3, null, 1); + + context.restoreAuthSystemState(); + + // Check relationship order + assertRightPlace(r1, 0); + assertRightPlace(r3, 1); + assertRightPlace(r2, 2); + assertRelationMetadataOrder(project1, isProjectOfPerson, List.of(r1, r3, r2)); + } + + @Test + public void moveNonUseForPlaceRelationshipDownToRightPlaceInTheMiddleNoMetadataTest_ignoreOtherRels( + ) throws Exception { + context.turnOffAuthorisationSystem(); + + // Add three Authors to the same Project, appending to the end + Relationship r1 = relationshipService.create(context, author1, project1, isProjectOfPerson, -1, -1); + Relationship r2 = relationshipService.create(context, author2, project1, isProjectOfPerson, -1, -1); + + // NOTE: unrelated relationship => should not be affected + Relationship ur1 = relationshipService.create(context, publication1, project1, isProjectOfPublication, -1, -1); + + Relationship r3 = relationshipService.create(context, author3, project1, isProjectOfPerson, -1, -1); + + // Move the last Author to leftPlace=1 + relationshipService.move(context, r3, null, 1); + + context.restoreAuthSystemState(); + + // Check relationship order + assertRightPlace(r1, 0); + assertRightPlace(r3, 1); + assertRightPlace(r2, 2); + assertRelationMetadataOrder(project1, isProjectOfPerson, List.of(r1, r3, r2)); + + // check unaffected relationships + assertLeftPlace(ur1, 0); + assertRightPlace(ur1, 0); + } + + @Test + public void moveNonUseForPlaceRelationshipToRightPlaceAtTheEndNoMetadataTest() throws Exception { + context.turnOffAuthorisationSystem(); + + // Add three Authors to the same Project, appending to the end + Relationship r1 = relationshipService.create(context, author1, project1, isProjectOfPerson, -1, -1); + Relationship r2 = relationshipService.create(context, author2, project1, isProjectOfPerson, -1, -1); + Relationship r3 = relationshipService.create(context, author3, project1, isProjectOfPerson, -1, -1); + + // Move the first Author to the back + relationshipService.move(context, r1, null, -1); + + context.restoreAuthSystemState(); + + // Check relationship order + assertRightPlace(r2, 0); + assertRightPlace(r3, 1); + assertRightPlace(r1, 2); + assertRelationMetadataOrder(project1, isProjectOfPerson, List.of(r2, r3, r1)); + } + + /* RelationshipService#delete */ + + @Test + public void deleteUseForPlaceRelationshipFromLeftStartNoMetadataTest() throws Exception { + context.turnOffAuthorisationSystem(); + + // Add three Authors to the same Publication, appending to the end + Relationship r1 = relationshipService.create(context, publication1, author1, isAuthorOfPublication, -1, -1); + Relationship r2 = relationshipService.create(context, publication1, author2, isAuthorOfPublication, -1, -1); + Relationship r3 = relationshipService.create(context, publication1, author3, isAuthorOfPublication, -1, -1); + + // Delete the first Author + relationshipService.delete(context, r1); + + context.restoreAuthSystemState(); + + // Check relationship order + assertLeftPlace(r2, 0); + assertLeftPlace(r3, 1); + assertRelationMetadataOrder(publication1, isAuthorOfPublication, List.of(r2, r3)); + } + + @Test + public void deleteUseForPlaceRelationshipFromLeftMiddleNoMetadataTest() throws Exception { + context.turnOffAuthorisationSystem(); + + // Add three Authors to the same Publication, appending to the end + Relationship r1 = relationshipService.create(context, publication1, author1, isAuthorOfPublication, -1, -1); + Relationship r2 = relationshipService.create(context, publication1, author2, isAuthorOfPublication, -1, -1); + Relationship r3 = relationshipService.create(context, publication1, author3, isAuthorOfPublication, -1, -1); + + // Delete the second Author + relationshipService.delete(context, r2); + + context.restoreAuthSystemState(); + + // Check relationship order + assertLeftPlace(r1, 0); + assertLeftPlace(r3, 1); + assertRelationMetadataOrder(publication1, isAuthorOfPublication, List.of(r1, r3)); + } + + @Test + public void deleteUseForPlaceRelationshipFromLeftEndNoMetadataTest() throws Exception { + context.turnOffAuthorisationSystem(); + + // Add three Authors to the same Publication, appending to the end + Relationship r1 = relationshipService.create(context, publication1, author1, isAuthorOfPublication, -1, -1); + Relationship r2 = relationshipService.create(context, publication1, author2, isAuthorOfPublication, -1, -1); + Relationship r3 = relationshipService.create(context, publication1, author3, isAuthorOfPublication, -1, -1); + + // Delete the third Author + relationshipService.delete(context, r3); + + context.restoreAuthSystemState(); + + // Check relationship order + assertLeftPlace(r1, 0); + assertLeftPlace(r2, 1); + assertRelationMetadataOrder(publication1, isAuthorOfPublication, List.of(r1, r2)); + } + + @Test + public void deleteUseForPlaceRelationshipFromLeftStartWithMetadataNoCopyTest() throws Exception { + context.turnOffAuthorisationSystem(); + + // Initialize MDVs and Relationships + Relationship r1 = relationshipService.create(context, publication1, author1, isAuthorOfPublication, -1, -1); + itemService.addMetadata(context, publication1, dcSchema, contributorElement, authorQualifier, null, "MDV 1"); + Relationship r2 = relationshipService.create(context, publication1, author2, isAuthorOfPublication, -1, -1); + itemService.addMetadata(context, publication1, dcSchema, contributorElement, authorQualifier, null, "MDV 2"); + Relationship r3 = relationshipService.create(context, publication1, author3, isAuthorOfPublication, -1, -1); + + relationshipService.delete(context, r1, false, false); + + context.restoreAuthSystemState(); + + // Check relationship order + assertLeftPlace(r2, 1); + assertLeftPlace(r3, 3); + assertRelationMetadataOrder(publication1, isAuthorOfPublication, List.of(r2, r3)); + assertMetadataOrder(publication1, "dc.contributor.author", List.of( + "MDV 1", + "Author, Second", + "MDV 2", + "Author, Third" + )); + } + + @Test + public void deleteUseForPlaceRelationshipFromLeftStartWithMetadataCopyTest() throws Exception { + context.turnOffAuthorisationSystem(); + + // Initialize MDVs and Relationships + Relationship r1 = relationshipService.create(context, publication1, author1, isAuthorOfPublication, -1, -1); + itemService.addMetadata(context, publication1, dcSchema, contributorElement, authorQualifier, null, "MDV 1"); + Relationship r2 = relationshipService.create(context, publication1, author2, isAuthorOfPublication, -1, -1); + itemService.addMetadata(context, publication1, dcSchema, contributorElement, authorQualifier, null, "MDV 2"); + Relationship r3 = relationshipService.create(context, publication1, author3, isAuthorOfPublication, -1, -1); + + relationshipService.delete(context, r1, true, false); + + context.restoreAuthSystemState(); + + // Check relationship order + // NOTE: since R1 has been removed, but copied to left, this place remains at 2 (instead of 1) + assertLeftPlace(r2, 2); + // NOTE: since R1 has been removed, but copied to left, this place remains at 4 (instead of 3) + assertLeftPlace(r3, 4); + assertRelationMetadataOrder(publication1, isAuthorOfPublication, Arrays.asList(null, r2, r3)); + assertMetadataOrder(publication1, "dc.contributor.author", List.of( + "Author, First", // this is not longer a relationship + "MDV 1", + "Author, Second", + "MDV 2", + "Author, Third" + )); + } + + @Test + public void deleteUseForPlaceRelationshipFromLeftMiddleWithMetadataNoCopyTest() throws Exception { + context.turnOffAuthorisationSystem(); + + // Initialize MDVs and Relationships + Relationship r1 = relationshipService.create(context, publication1, author1, isAuthorOfPublication, -1, -1); + itemService.addMetadata(context, publication1, dcSchema, contributorElement, authorQualifier, null, "MDV 1"); + Relationship r2 = relationshipService.create(context, publication1, author2, isAuthorOfPublication, -1, -1); + itemService.addMetadata(context, publication1, dcSchema, contributorElement, authorQualifier, null, "MDV 2"); + Relationship r3 = relationshipService.create(context, publication1, author3, isAuthorOfPublication, -1, -1); + + relationshipService.delete(context, r2, false, false); + + context.restoreAuthSystemState(); + + // Check relationship order + assertLeftPlace(r1, 0); + assertLeftPlace(r3, 3); + assertRelationMetadataOrder(publication1, isAuthorOfPublication, Arrays.asList(r1, r3)); + assertMetadataOrder(publication1, "dc.contributor.author", List.of( + "Author, First", + "MDV 1", + "MDV 2", + "Author, Third" + )); + } + + @Test + public void deleteUseForPlaceRelationshipFromLeftMiddleWithMetadataNoCopyTest_ignoreOtherRels() throws Exception { + context.turnOffAuthorisationSystem(); + + // Initialize MDVs and Relationships + Relationship r1 = relationshipService.create(context, publication1, author1, isAuthorOfPublication, -1, -1); + itemService.addMetadata(context, publication1, dcSchema, contributorElement, authorQualifier, null, "MDV 1"); + + // NOTE: unrelated relationship => should not be affected + Relationship ur1 = relationshipService.create(context, publication1, project1, isProjectOfPublication, -1, -1); + + Relationship r2 = relationshipService.create(context, publication1, author2, isAuthorOfPublication, -1, -1); + itemService.addMetadata(context, publication1, dcSchema, contributorElement, authorQualifier, null, "MDV 2"); + Relationship r3 = relationshipService.create(context, publication1, author3, isAuthorOfPublication, -1, -1); + + relationshipService.delete(context, r2, false, false); + + context.restoreAuthSystemState(); + + // Check relationship order + assertLeftPlace(r1, 0); + assertLeftPlace(r3, 3); + assertRelationMetadataOrder(publication1, isAuthorOfPublication, Arrays.asList(r1, r3)); + assertMetadataOrder(publication1, "dc.contributor.author", List.of( + "Author, First", + "MDV 1", + "MDV 2", + "Author, Third" + )); + + // check unaffected relationships + assertLeftPlace(ur1, 0); + assertRightPlace(ur1, 0); + } + + @Test + public void deleteUseForPlaceRelationshipFromLeftMiddleWithMetadataCopyTest() throws Exception { + context.turnOffAuthorisationSystem(); + + // Initialize MDVs and Relationships + Relationship r1 = relationshipService.create(context, publication1, author1, isAuthorOfPublication, -1, -1); + itemService.addMetadata(context, publication1, dcSchema, contributorElement, authorQualifier, null, "MDV 1"); + Relationship r2 = relationshipService.create(context, publication1, author2, isAuthorOfPublication, -1, -1); + itemService.addMetadata(context, publication1, dcSchema, contributorElement, authorQualifier, null, "MDV 2"); + Relationship r3 = relationshipService.create(context, publication1, author3, isAuthorOfPublication, -1, -1); + + relationshipService.delete(context, r2, true, false); + + context.restoreAuthSystemState(); + + // Check relationship order + assertLeftPlace(r1, 0); + // NOTE: since R2 has been removed, but copied to left, this place remains at 4 (instead of 3) + assertLeftPlace(r3, 4); + assertRelationMetadataOrder(publication1, isAuthorOfPublication, Arrays.asList(r1, null, r3)); + assertMetadataOrder(publication1, "dc.contributor.author", List.of( + "Author, First", + "MDV 1", + "Author, Second", // this is not longer a relationship + "MDV 2", + "Author, Third" + )); + } + + @Test + public void deleteUseForPlaceRelationshipFromLeftMiddleWithMetadataCopyTest_ignoreOtherRels() throws Exception { + context.turnOffAuthorisationSystem(); + + // Initialize MDVs and Relationships + Relationship r1 = relationshipService.create(context, publication1, author1, isAuthorOfPublication, -1, -1); + + // NOTE: unrelated relationship => should not be affected + Relationship ur1 = relationshipService.create(context, publication1, project1, isProjectOfPublication, -1, -1); + + itemService.addMetadata(context, publication1, dcSchema, contributorElement, authorQualifier, null, "MDV 1"); + + // NOTE: unrelated relationship => should not be affected + Relationship ur2 = relationshipService.create(context, publication1, project2, isProjectOfPublication, -1, -1); + + Relationship r2 = relationshipService.create(context, publication1, author2, isAuthorOfPublication, -1, -1); + + // NOTE: unrelated relationship => should not be affected + Relationship ur3 = relationshipService.create(context, author2, project1, isProjectOfPerson, -1, -1); + + itemService.addMetadata(context, publication1, dcSchema, contributorElement, authorQualifier, null, "MDV 2"); + Relationship r3 = relationshipService.create(context, publication1, author3, isAuthorOfPublication, -1, -1); + + relationshipService.delete(context, r2, true, false); + + context.restoreAuthSystemState(); + + // Check relationship order + assertLeftPlace(r1, 0); + // NOTE: since R2 has been removed, but copied to left, this place remains at 4 (instead of 3) + assertLeftPlace(r3, 4); + assertRelationMetadataOrder(publication1, isAuthorOfPublication, Arrays.asList(r1, null, r3)); + assertMetadataOrder(publication1, "dc.contributor.author", List.of( + "Author, First", + "MDV 1", + "Author, Second", // this is not longer a relationship + "MDV 2", + "Author, Third" + )); + + // check unaffected relationships + assertLeftPlace(ur1, 0); + assertRightPlace(ur1, 0); + assertLeftPlace(ur2, 1); + assertRightPlace(ur2, 0); + assertLeftPlace(ur3, 0); + assertRightPlace(ur3, 0); + } + + @Test + public void deleteUseForPlaceRelationshipFromLeftEndWithMetadataNoCopyTest() throws Exception { + context.turnOffAuthorisationSystem(); + + // Initialize MDVs and Relationships + Relationship r1 = relationshipService.create(context, publication1, author1, isAuthorOfPublication, -1, -1); + itemService.addMetadata(context, publication1, dcSchema, contributorElement, authorQualifier, null, "MDV 1"); + Relationship r2 = relationshipService.create(context, publication1, author2, isAuthorOfPublication, -1, -1); + itemService.addMetadata(context, publication1, dcSchema, contributorElement, authorQualifier, null, "MDV 2"); + Relationship r3 = relationshipService.create(context, publication1, author3, isAuthorOfPublication, -1, -1); + + + relationshipService.delete(context, r3, false, false); + + context.restoreAuthSystemState(); + + // Check relationship order + assertLeftPlace(r1, 0); + assertLeftPlace(r2, 2); + assertRelationMetadataOrder(publication1, isAuthorOfPublication, Arrays.asList(r1, r2)); + assertMetadataOrder(publication1, "dc.contributor.author", List.of( + "Author, First", + "MDV 1", + "Author, Second", + "MDV 2" + )); + } + + @Test + public void deleteUseForPlaceRelationshipFromLeftEndWithMetadataCopyTest() throws Exception { + context.turnOffAuthorisationSystem(); + + // Initialize MDVs and Relationships + Relationship r1 = relationshipService.create(context, publication1, author1, isAuthorOfPublication, -1, -1); + itemService.addMetadata(context, publication1, dcSchema, contributorElement, authorQualifier, null, "MDV 1"); + Relationship r2 = relationshipService.create(context, publication1, author2, isAuthorOfPublication, -1, -1); + itemService.addMetadata(context, publication1, dcSchema, contributorElement, authorQualifier, null, "MDV 2"); + Relationship r3 = relationshipService.create(context, publication1, author3, isAuthorOfPublication, -1, -1); + + relationshipService.delete(context, r3, true, false); + + context.restoreAuthSystemState(); + + // Check relationship order + assertLeftPlace(r1, 0); + assertLeftPlace(r2, 2); + assertRelationMetadataOrder(publication1, isAuthorOfPublication, Arrays.asList(r1, r2, null)); + assertMetadataOrder(publication1, "dc.contributor.author", List.of( + "Author, First", + "MDV 1", + "Author, Second", + "MDV 2", + "Author, Third" // this is not longer a relationship + )); + } + + @Test + public void deleteUseForPlaceRelationshipFromRightStartNoMetadataTest() throws Exception { + context.turnOffAuthorisationSystem(); + + // Add three Publications to the same Author, appending to the end + Relationship r1 = relationshipService.create(context, publication1, author1, isAuthorOfPublication, -1, -1); + Relationship r2 = relationshipService.create(context, publication2, author1, isAuthorOfPublication, -1, -1); + Relationship r3 = relationshipService.create(context, publication3, author1, isAuthorOfPublication, -1, -1); + + // Delete the first Publication + relationshipService.delete(context, r1); + + context.restoreAuthSystemState(); + + // Check relationship order + assertRightPlace(r2, 0); + assertRightPlace(r3, 1); + assertRelationMetadataOrder(author1, isAuthorOfPublication, List.of(r2, r3)); + } + + @Test + public void deleteUseForPlaceRelationshipFromRightMiddleNoMetadataTest() throws Exception { + context.turnOffAuthorisationSystem(); + + // Add three Publications to the same Author, appending to the end + Relationship r1 = relationshipService.create(context, publication1, author1, isAuthorOfPublication, -1, -1); + Relationship r2 = relationshipService.create(context, publication2, author1, isAuthorOfPublication, -1, -1); + Relationship r3 = relationshipService.create(context, publication3, author1, isAuthorOfPublication, -1, -1); + + // Delete the second Publication + relationshipService.delete(context, r2); + + context.restoreAuthSystemState(); + + // Check relationship order + assertRightPlace(r1, 0); + assertRightPlace(r3, 1); + assertRelationMetadataOrder(author1, isAuthorOfPublication, List.of(r1, r3)); + } + + @Test + public void deleteUseForPlaceRelationshipFromRightEndNoMetadataTest() throws Exception { + context.turnOffAuthorisationSystem(); + + // Add three Publications to the same Author, appending to the end + Relationship r1 = relationshipService.create(context, publication1, author1, isAuthorOfPublication, -1, -1); + Relationship r2 = relationshipService.create(context, publication2, author1, isAuthorOfPublication, -1, -1); + Relationship r3 = relationshipService.create(context, publication3, author1, isAuthorOfPublication, -1, -1); + + // Delete the third Publication + relationshipService.delete(context, r3); + + context.restoreAuthSystemState(); + + // Check relationship order + assertRightPlace(r1, 0); + assertRightPlace(r2, 1); + assertRelationMetadataOrder(author1, isAuthorOfPublication, List.of(r1, r2)); + } + + @Test + public void deleteNonUseForPlaceRelationshipFromLeftStartNoMetadataTest() throws Exception { + context.turnOffAuthorisationSystem(); + + // Add three Projects to the same Author, appending to the end + Relationship r1 = relationshipService.create(context, author1, project1, isProjectOfPerson, -1, -1); + Relationship r2 = relationshipService.create(context, author1, project2, isProjectOfPerson, -1, -1); + Relationship r3 = relationshipService.create(context, author1, project3, isProjectOfPerson, -1, -1); + + // Delete the first Author + relationshipService.delete(context, r1); + + context.restoreAuthSystemState(); + + // Check relationship order + assertLeftPlace(r2, 0); + assertLeftPlace(r3, 1); + assertRelationMetadataOrder(author1, isProjectOfPerson, List.of(r2, r3)); + } + + @Test + public void deleteNonUseForPlaceRelationshipFromLeftMiddleNoMetadataTest() throws Exception { + context.turnOffAuthorisationSystem(); + + // Add three Projects to the same Author, appending to the end + Relationship r1 = relationshipService.create(context, author1, project1, isProjectOfPerson, -1, -1); + Relationship r2 = relationshipService.create(context, author1, project2, isProjectOfPerson, -1, -1); + Relationship r3 = relationshipService.create(context, author1, project3, isProjectOfPerson, -1, -1); + + // Delete the second Author + relationshipService.delete(context, r2); + + context.restoreAuthSystemState(); + + // Check relationship order + assertLeftPlace(r1, 0); + assertLeftPlace(r3, 1); + assertRelationMetadataOrder(author1, isProjectOfPerson, List.of(r1, r3)); + } + + @Test + public void deleteNonUseForPlaceRelationshipFromLeftEndNoMetadataTest() throws Exception { + context.turnOffAuthorisationSystem(); + + // Add three Projects to the same Author, appending to the end + Relationship r1 = relationshipService.create(context, author1, project1, isProjectOfPerson, -1, -1); + Relationship r2 = relationshipService.create(context, author1, project2, isProjectOfPerson, -1, -1); + Relationship r3 = relationshipService.create(context, author1, project3, isProjectOfPerson, -1, -1); + + // Delete the third Author + relationshipService.delete(context, r3); + + context.restoreAuthSystemState(); + + // Check relationship order + assertLeftPlace(r1, 0); + assertLeftPlace(r2, 1); + assertRelationMetadataOrder(author1, isProjectOfPerson, List.of(r1, r2)); + } + + @Test + public void deleteNonUseForPlaceRelationshipFromRightStartNoMetadataTest() throws Exception { + context.turnOffAuthorisationSystem(); + + // Add three Authors to the same Project, appending to the end + Relationship r1 = relationshipService.create(context, author1, project1, isProjectOfPerson, -1, -1); + Relationship r2 = relationshipService.create(context, author2, project1, isProjectOfPerson, -1, -1); + Relationship r3 = relationshipService.create(context, author3, project1, isProjectOfPerson, -1, -1); + + // Delete the first Publication + relationshipService.delete(context, r1); + + context.restoreAuthSystemState(); + + // Check relationship order + assertRightPlace(r2, 0); + assertRightPlace(r3, 1); + assertRelationMetadataOrder(project1, isProjectOfPerson, List.of(r2, r3)); + } + + @Test + public void deleteNonUseForPlaceRelationshipFromRightMiddleNoMetadataTest() throws Exception { + context.turnOffAuthorisationSystem(); + + // Add three Authors to the same Project, appending to the end + Relationship r1 = relationshipService.create(context, author1, project1, isProjectOfPerson, -1, -1); + Relationship r2 = relationshipService.create(context, author2, project1, isProjectOfPerson, -1, -1); + Relationship r3 = relationshipService.create(context, author3, project1, isProjectOfPerson, -1, -1); + + // Delete the second Publication + relationshipService.delete(context, r2); + + context.restoreAuthSystemState(); + + // Check relationship order + assertRightPlace(r1, 0); + assertRightPlace(r3, 1); + assertRelationMetadataOrder(project1, isProjectOfPerson, List.of(r1, r3)); + } + + @Test + public void deleteNonUseForPlaceRelationshipFromRightMiddleNoMetadataTest_ignoreOtherRels() throws Exception { + context.turnOffAuthorisationSystem(); + + // Add three Authors to the same Project, appending to the end + Relationship r1 = relationshipService.create(context, author1, project1, isProjectOfPerson, -1, -1); + Relationship r2 = relationshipService.create(context, author2, project1, isProjectOfPerson, -1, -1); + + // NOTE: unrelated relationship => should not be affected + Relationship ur1 = relationshipService.create(context, publication1, project1, isProjectOfPublication, -1, -1); + + Relationship r3 = relationshipService.create(context, author3, project1, isProjectOfPerson, -1, -1); + + // Delete the second Publication + relationshipService.delete(context, r2); + + context.restoreAuthSystemState(); + + // Check relationship order + assertRightPlace(r1, 0); + assertRightPlace(r3, 1); + assertRelationMetadataOrder(project1, isProjectOfPerson, List.of(r1, r3)); + + // check unaffected relationships + assertLeftPlace(ur1, 0); + assertRightPlace(ur1, 0); + } + + @Test + public void deleteNonUseForPlaceRelationshipFromRightEndNoMetadataTest() throws Exception { + context.turnOffAuthorisationSystem(); + + // Add three Authors to the same Project, appending to the end + Relationship r1 = relationshipService.create(context, author1, project1, isProjectOfPerson, -1, -1); + Relationship r2 = relationshipService.create(context, author2, project1, isProjectOfPerson, -1, -1); + Relationship r3 = relationshipService.create(context, author3, project1, isProjectOfPerson, -1, -1); + + // Delete the third Publication + relationshipService.delete(context, r3); + + context.restoreAuthSystemState(); + + // Check relationship order + assertRightPlace(r1, 0); + assertRightPlace(r2, 1); + assertRelationMetadataOrder(project1, isProjectOfPerson, List.of(r1, r2)); + } + + @Test + public void changeLeftItemInUseForPlaceRelationshipAtTheStartNoMetadataTest() throws Exception { + context.turnOffAuthorisationSystem(); + + // Add three Authors to publication1, appending to the end + Relationship r1 = relationshipService.create(context, publication1, author1, isAuthorOfPublication, -1, -1); + Relationship r2 = relationshipService.create(context, publication1, author2, isAuthorOfPublication, -1, -1); + Relationship r3 = relationshipService.create(context, publication1, author3, isAuthorOfPublication, -1, -1); + + // Add three Authors to publication2, appending to the end + Relationship r4 = relationshipService.create(context, publication2, author4, isAuthorOfPublication, -1, -1); + Relationship r5 = relationshipService.create(context, publication2, author5, isAuthorOfPublication, -1, -1); + Relationship r6 = relationshipService.create(context, publication2, author6, isAuthorOfPublication, -1, -1); + + // Move r1 to publication 2 + relationshipService.move(context, r1, publication2, null); + + context.restoreAuthSystemState(); + + // Check relationship order for publication1 + assertLeftPlace(r2, 0); // should both move down as the first Relationship was removed + assertLeftPlace(r3, 1); + assertRelationMetadataOrder(publication1, isAuthorOfPublication, List.of(r2, r3)); + + // Check relationship order for publication2 + assertLeftPlace(r4, 0); // previous Relationships should stay where they were + assertLeftPlace(r5, 1); + assertLeftPlace(r6, 2); + assertLeftPlace(r1, 3); // moved Relationship should be appended to the end + assertRelationMetadataOrder(publication2, isAuthorOfPublication, List.of(r4, r5, r6, r1)); + } + + @Test + public void changeLeftItemInUseForPlaceRelationshipInTheMiddleNoMetadataTest() throws Exception { + context.turnOffAuthorisationSystem(); + + // Add three Authors to publication1, appending to the end + Relationship r1 = relationshipService.create(context, publication1, author1, isAuthorOfPublication, -1, -1); + Relationship r2 = relationshipService.create(context, publication1, author2, isAuthorOfPublication, -1, -1); + Relationship r3 = relationshipService.create(context, publication1, author3, isAuthorOfPublication, -1, -1); + + // Add three Authors to publication2, appending to the end + Relationship r4 = relationshipService.create(context, publication2, author4, isAuthorOfPublication, -1, -1); + Relationship r5 = relationshipService.create(context, publication2, author5, isAuthorOfPublication, -1, -1); + Relationship r6 = relationshipService.create(context, publication2, author6, isAuthorOfPublication, -1, -1); + + // Move r2 to publication 2 + relationshipService.move(context, r2, publication2, null); + + context.restoreAuthSystemState(); + + // Check relationship order for publication1 + assertLeftPlace(r1, 0); + assertLeftPlace(r3, 1); // should move down as the second Relationship was removed + assertRelationMetadataOrder(publication1, isAuthorOfPublication, List.of(r1, r3)); + + // Check relationship order for publication2 + assertLeftPlace(r4, 0); // previous Relationships should stay where they were + assertLeftPlace(r5, 1); + assertLeftPlace(r6, 2); + assertLeftPlace(r2, 3); // moved Relationship should be appended to the end + assertRelationMetadataOrder(publication2, isAuthorOfPublication, List.of(r4, r5, r6, r2)); + } + + @Test + public void changeLeftItemInUseForPlaceRelationshipAtTheEndNoMetadataTest() throws Exception { + context.turnOffAuthorisationSystem(); + + // Add three Authors to publication1, appending to the end + Relationship r1 = relationshipService.create(context, publication1, author1, isAuthorOfPublication, -1, -1); + Relationship r2 = relationshipService.create(context, publication1, author2, isAuthorOfPublication, -1, -1); + Relationship r3 = relationshipService.create(context, publication1, author3, isAuthorOfPublication, -1, -1); + + // Add three Authors to publication2, appending to the end + Relationship r4 = relationshipService.create(context, publication2, author4, isAuthorOfPublication, -1, -1); + Relationship r5 = relationshipService.create(context, publication2, author5, isAuthorOfPublication, -1, -1); + Relationship r6 = relationshipService.create(context, publication2, author6, isAuthorOfPublication, -1, -1); + + // Move r3 to publication 2 + relationshipService.move(context, r3, publication2, null); + + context.restoreAuthSystemState(); + + // Check relationship order for publication1 + assertLeftPlace(r1, 0); + assertLeftPlace(r2, 1); + assertRelationMetadataOrder(publication1, isAuthorOfPublication, List.of(r1, r2)); + + // Check relationship order for publication2 + assertLeftPlace(r4, 0); // previous Relationships should stay where they were + assertLeftPlace(r5, 1); + assertLeftPlace(r6, 2); + assertLeftPlace(r3, 3); // moved Relationship should be appended to the end + assertRelationMetadataOrder(publication2, isAuthorOfPublication, List.of(r4, r5, r6, r3)); + } + + @Test + public void changeLeftItemInUseForPlaceRelationshipAtTheStartWithMetadataTest() throws Exception { + context.turnOffAuthorisationSystem(); + + // Add three Authors to publication1, with regular MDVs in between + Relationship r1 = relationshipService.create(context, publication1, author1, isAuthorOfPublication, -1, -1); + itemService.addMetadata(context, publication1, dcSchema, contributorElement, authorQualifier, null, "MDV 1"); + Relationship r2 = relationshipService.create(context, publication1, author2, isAuthorOfPublication, -1, -1); + itemService.addMetadata(context, publication1, dcSchema, contributorElement, authorQualifier, null, "MDV 2"); + Relationship r3 = relationshipService.create(context, publication1, author3, isAuthorOfPublication, -1, -1); + + // Add three Authors to publication2, with regular MDVs in between + itemService.addMetadata(context, publication2, dcSchema, contributorElement, authorQualifier, null, "MDV 3"); + Relationship r4 = relationshipService.create(context, publication2, author4, isAuthorOfPublication, -1, -1); + Relationship r5 = relationshipService.create(context, publication2, author5, isAuthorOfPublication, -1, -1); + itemService.addMetadata(context, publication2, dcSchema, contributorElement, authorQualifier, null, "MDV 4"); + Relationship r6 = relationshipService.create(context, publication2, author6, isAuthorOfPublication, -1, -1); + + // Move r1 to publication 2 + relationshipService.move(context, r1, publication2, null); + + context.restoreAuthSystemState(); + + // Check relationship order for publication1 + assertLeftPlace(r2, 1); // should both move down as the first Relationship was removed + assertLeftPlace(r3, 3); + assertRelationMetadataOrder(publication1, isAuthorOfPublication, List.of(r2, r3)); + assertMetadataOrder(publication1, "dc.contributor.author", List.of( + "MDV 1", + "Author, Second", + "MDV 2", + "Author, Third" + )); + + // Check relationship order for publication2 + assertLeftPlace(r4, 1); // previous Relationships should stay where they were + assertLeftPlace(r5, 2); + assertLeftPlace(r6, 4); + assertLeftPlace(r1, 5); // moved Relationship should be appended to the end + assertRelationMetadataOrder(publication2, isAuthorOfPublication, List.of(r4, r5, r6, r1)); + assertMetadataOrder(publication2, "dc.contributor.author", List.of( + "MDV 3", + "Author, Fourth", + "Author, Fifth", + "MDV 4", + "Author, Sixth", + "Author, First" + )); + } + + @Test + public void changeLeftItemInUseForPlaceRelationshipInTheMiddleWithMetadataTest() throws Exception { + context.turnOffAuthorisationSystem(); + + // Add three Authors to publication1, with regular MDVs in between + Relationship r1 = relationshipService.create(context, publication1, author1, isAuthorOfPublication, -1, -1); + itemService.addMetadata(context, publication1, dcSchema, contributorElement, authorQualifier, null, "MDV 1"); + Relationship r2 = relationshipService.create(context, publication1, author2, isAuthorOfPublication, -1, -1); + itemService.addMetadata(context, publication1, dcSchema, contributorElement, authorQualifier, null, "MDV 2"); + Relationship r3 = relationshipService.create(context, publication1, author3, isAuthorOfPublication, -1, -1); + + // Add three Authors to publication2, with regular MDVs in between + itemService.addMetadata(context, publication2, dcSchema, contributorElement, authorQualifier, null, "MDV 3"); + Relationship r4 = relationshipService.create(context, publication2, author4, isAuthorOfPublication, -1, -1); + Relationship r5 = relationshipService.create(context, publication2, author5, isAuthorOfPublication, -1, -1); + itemService.addMetadata(context, publication2, dcSchema, contributorElement, authorQualifier, null, "MDV 4"); + Relationship r6 = relationshipService.create(context, publication2, author6, isAuthorOfPublication, -1, -1); + + // Move r2 to publication 2 + relationshipService.move(context, r2, publication2, null); + + context.restoreAuthSystemState(); + + // Check relationship order for publication1 + assertLeftPlace(r1, 0); // should both move down as the first Relationship was removed + assertLeftPlace(r3, 3); + assertRelationMetadataOrder(publication1, isAuthorOfPublication, List.of(r1, r3)); + assertMetadataOrder(publication1, "dc.contributor.author", List.of( + "Author, First", + "MDV 1", + "MDV 2", + "Author, Third" + )); + + // Check relationship order for publication2 + assertLeftPlace(r4, 1); // previous Relationships should stay where they were + assertLeftPlace(r5, 2); + assertLeftPlace(r6, 4); + assertLeftPlace(r2, 5); // moved Relationship should be appended to the end + assertRelationMetadataOrder(publication2, isAuthorOfPublication, List.of(r4, r5, r6, r2)); + assertMetadataOrder(publication2, "dc.contributor.author", List.of( + "MDV 3", + "Author, Fourth", + "Author, Fifth", + "MDV 4", + "Author, Sixth", + "Author, Second" + )); + } + + @Test + public void changeLeftItemInUseForPlaceRelationshipInTheMiddleWithMetadataTest_ignoreOtherRels() throws Exception { + context.turnOffAuthorisationSystem(); + + // Add three Authors to publication1, with regular MDVs in between + Relationship r1 = relationshipService.create(context, publication1, author1, isAuthorOfPublication, -1, -1); + itemService.addMetadata(context, publication1, dcSchema, contributorElement, authorQualifier, null, "MDV 1"); + + // NOTE: unrelated relationship => should not be affected + Relationship ur1 = relationshipService.create(context, publication1, project1, isProjectOfPublication, -1, -1); + + Relationship r2 = relationshipService.create(context, publication1, author2, isAuthorOfPublication, -1, -1); + itemService.addMetadata(context, publication1, dcSchema, contributorElement, authorQualifier, null, "MDV 2"); + + // NOTE: unrelated relationship => should not be affected + Relationship ur2 = relationshipService.create(context, publication1, project3, isProjectOfPublication, -1, -1); + + Relationship r3 = relationshipService.create(context, publication1, author3, isAuthorOfPublication, -1, -1); + + // NOTE: unrelated relationship => should not be affected + Relationship ur3 = relationshipService.create(context, publication2, project2, isProjectOfPublication, -1, -1); + + // Add three Authors to publication2, with regular MDVs in between + itemService.addMetadata(context, publication2, dcSchema, contributorElement, authorQualifier, null, "MDV 3"); + Relationship r4 = relationshipService.create(context, publication2, author4, isAuthorOfPublication, -1, -1); + Relationship r5 = relationshipService.create(context, publication2, author5, isAuthorOfPublication, -1, -1); + + // NOTE: unrelated relationship => should not be affected + Relationship ur4 = relationshipService.create(context, publication2, project1, isProjectOfPublication, -1, -1); + + itemService.addMetadata(context, publication2, dcSchema, contributorElement, authorQualifier, null, "MDV 4"); + Relationship r6 = relationshipService.create(context, publication2, author6, isAuthorOfPublication, -1, -1); + + // Move r2 to publication 2 + relationshipService.move(context, r2, publication2, null); + + context.restoreAuthSystemState(); + + // Check relationship order for publication1 + assertLeftPlace(r1, 0); // should both move down as the first Relationship was removed + assertLeftPlace(r3, 3); + assertRelationMetadataOrder(publication1, isAuthorOfPublication, List.of(r1, r3)); + assertMetadataOrder(publication1, "dc.contributor.author", List.of( + "Author, First", + "MDV 1", + "MDV 2", + "Author, Third" + )); + + // Check relationship order for publication2 + assertLeftPlace(r4, 1); // previous Relationships should stay where they were + assertLeftPlace(r5, 2); + assertLeftPlace(r6, 4); + assertLeftPlace(r2, 5); // moved Relationship should be appended to the end + assertRelationMetadataOrder(publication2, isAuthorOfPublication, List.of(r4, r5, r6, r2)); + assertMetadataOrder(publication2, "dc.contributor.author", List.of( + "MDV 3", + "Author, Fourth", + "Author, Fifth", + "MDV 4", + "Author, Sixth", + "Author, Second" + )); + + // check unaffected relationships + assertLeftPlace(ur1, 0); + assertRightPlace(ur1, 0); + assertLeftPlace(ur2, 1); + assertRightPlace(ur2, 0); + assertLeftPlace(ur3, 0); + assertRightPlace(ur3, 0); + assertLeftPlace(ur4, 1); + assertRightPlace(ur4, 1); + } + + @Test + public void changeLeftItemInUseForPlaceRelationshipAtTheEndWithMetadataTest() throws Exception { + context.turnOffAuthorisationSystem(); + + // Add three Authors to publication1, with regular MDVs in between + Relationship r1 = relationshipService.create(context, publication1, author1, isAuthorOfPublication, -1, -1); + itemService.addMetadata(context, publication1, dcSchema, contributorElement, authorQualifier, null, "MDV 1"); + Relationship r2 = relationshipService.create(context, publication1, author2, isAuthorOfPublication, -1, -1); + itemService.addMetadata(context, publication1, dcSchema, contributorElement, authorQualifier, null, "MDV 2"); + Relationship r3 = relationshipService.create(context, publication1, author3, isAuthorOfPublication, -1, -1); + + // Add three Authors to publication2, with regular MDVs in between + itemService.addMetadata(context, publication2, dcSchema, contributorElement, authorQualifier, null, "MDV 3"); + Relationship r4 = relationshipService.create(context, publication2, author4, isAuthorOfPublication, -1, -1); + Relationship r5 = relationshipService.create(context, publication2, author5, isAuthorOfPublication, -1, -1); + itemService.addMetadata(context, publication2, dcSchema, contributorElement, authorQualifier, null, "MDV 4"); + Relationship r6 = relationshipService.create(context, publication2, author6, isAuthorOfPublication, -1, -1); + + // Move r3 to publication 2 + relationshipService.move(context, r3, publication2, null); + + context.restoreAuthSystemState(); + + // Check relationship order for publication1 + assertLeftPlace(r1, 0); + assertLeftPlace(r2, 2); + assertRelationMetadataOrder(publication1, isAuthorOfPublication, List.of(r1, r2)); + assertMetadataOrder(publication1, "dc.contributor.author", List.of( + "Author, First", + "MDV 1", + "Author, Second", + "MDV 2" + )); + + // Check relationship order for publication2 + assertLeftPlace(r4, 1); // previous Relationships should stay where they were + assertLeftPlace(r5, 2); + assertLeftPlace(r6, 4); + assertLeftPlace(r3, 5); // moved Relationship should be appended to the end + assertRelationMetadataOrder(publication2, isAuthorOfPublication, List.of(r4, r5, r6, r3)); + assertMetadataOrder(publication2, "dc.contributor.author", List.of( + "MDV 3", + "Author, Fourth", + "Author, Fifth", + "MDV 4", + "Author, Sixth", + "Author, Third" + )); + } + + @Test + public void changeRightItemInUseForPlaceRelationshipAtTheStartNoMetadataTest() throws Exception { + context.turnOffAuthorisationSystem(); + + // Add three Publications to author1, appending to the end + Relationship r1 = relationshipService.create(context, publication1, author1, isAuthorOfPublication, -1, -1); + Relationship r2 = relationshipService.create(context, publication2, author1, isAuthorOfPublication, -1, -1); + Relationship r3 = relationshipService.create(context, publication3, author1, isAuthorOfPublication, -1, -1); + + // Add three Publications to author2, appending to the end + Relationship r4 = relationshipService.create(context, publication4, author2, isAuthorOfPublication, -1, -1); + Relationship r5 = relationshipService.create(context, publication5, author2, isAuthorOfPublication, -1, -1); + Relationship r6 = relationshipService.create(context, publication6, author2, isAuthorOfPublication, -1, -1); + + // Move r1 to author2 + relationshipService.move(context, r1, null, author2); + + context.restoreAuthSystemState(); + + // Check relationship order for author1 + assertRightPlace(r2, 0); // should both move down as the first Relationship was removed + assertRightPlace(r3, 1); + assertRelationMetadataOrder(author1, isAuthorOfPublication, List.of(r2, r3)); + + // Check relationship order for author2 + assertRightPlace(r4, 0); // previous Relationships should stay where they were + assertRightPlace(r5, 1); + assertRightPlace(r6, 2); + assertRightPlace(r1, 3); // moved Relationship should be appended to the end + assertRelationMetadataOrder(author2, isAuthorOfPublication, List.of(r4, r5, r6, r1)); + } + + @Test + public void changeRightItemInUseForPlaceRelationshipInTheMiddleNoMetadataTest() throws Exception { + context.turnOffAuthorisationSystem(); + + // Add three Publications to author1, appending to the end + Relationship r1 = relationshipService.create(context, publication1, author1, isAuthorOfPublication, -1, -1); + Relationship r2 = relationshipService.create(context, publication2, author1, isAuthorOfPublication, -1, -1); + Relationship r3 = relationshipService.create(context, publication3, author1, isAuthorOfPublication, -1, -1); + + // Add three Publications to author2, appending to the end + Relationship r4 = relationshipService.create(context, publication4, author2, isAuthorOfPublication, -1, -1); + Relationship r5 = relationshipService.create(context, publication5, author2, isAuthorOfPublication, -1, -1); + Relationship r6 = relationshipService.create(context, publication6, author2, isAuthorOfPublication, -1, -1); + + // Move r2 to author2 + relationshipService.move(context, r2, null, author2); + + context.restoreAuthSystemState(); + + // Check relationship order for author1 + assertRightPlace(r1, 0); + assertRightPlace(r3, 1); // should move down as the first Relationship was removed + assertRelationMetadataOrder(author1, isAuthorOfPublication, List.of(r1, r3)); + + // Check relationship order for author2 + assertRightPlace(r4, 0); // previous Relationships should stay where they were + assertRightPlace(r5, 1); + assertRightPlace(r6, 2); + assertRightPlace(r2, 3); // moved Relationship should be appended to the end + assertRelationMetadataOrder(author2, isAuthorOfPublication, List.of(r4, r5, r6, r2)); + } + + @Test + public void changeRightItemInUseForPlaceRelationshipInTheMiddleNoMetadataTest_ignoreOtherRels() throws Exception { + context.turnOffAuthorisationSystem(); + + // Add three Publications to author1, appending to the end + Relationship r1 = relationshipService.create(context, publication1, author1, isAuthorOfPublication, -1, -1); + Relationship r2 = relationshipService.create(context, publication2, author1, isAuthorOfPublication, -1, -1); + + // NOTE: unrelated relationship => should not be affected + Relationship ur1 = relationshipService.create(context, author1, project1, isProjectOfPerson, -1, -1); + + Relationship r3 = relationshipService.create(context, publication3, author1, isAuthorOfPublication, -1, -1); + + // Add three Publications to author2, appending to the end + Relationship r4 = relationshipService.create(context, publication4, author2, isAuthorOfPublication, -1, -1); + Relationship r5 = relationshipService.create(context, publication5, author2, isAuthorOfPublication, -1, -1); + Relationship r6 = relationshipService.create(context, publication6, author2, isAuthorOfPublication, -1, -1); + + // NOTE: unrelated relationship => should not be affected + Relationship ur2 = relationshipService.create(context, author2, project1, isProjectOfPerson, -1, -1); + + // Move r2 to author2 + relationshipService.move(context, r2, null, author2); + + context.restoreAuthSystemState(); + + // Check relationship order for author1 + assertRightPlace(r1, 0); + assertRightPlace(r3, 1); // should move down as the first Relationship was removed + assertRelationMetadataOrder(author1, isAuthorOfPublication, List.of(r1, r3)); + + // Check relationship order for author2 + assertRightPlace(r4, 0); // previous Relationships should stay where they were + assertRightPlace(r5, 1); + assertRightPlace(r6, 2); + assertRightPlace(r2, 3); // moved Relationship should be appended to the end + assertRelationMetadataOrder(author2, isAuthorOfPublication, List.of(r4, r5, r6, r2)); + + // check unaffected relationships + assertLeftPlace(ur1, 0); + assertRightPlace(ur1, 0); + assertLeftPlace(ur2, 0); + assertRightPlace(ur2, 1); + } + + @Test + public void changeRightItemInUseForPlaceRelationshipAtTheEndNoMetadataTest() throws Exception { + context.turnOffAuthorisationSystem(); + + // Add three Publications to author1, appending to the end + Relationship r1 = relationshipService.create(context, publication1, author1, isAuthorOfPublication, -1, -1); + Relationship r2 = relationshipService.create(context, publication2, author1, isAuthorOfPublication, -1, -1); + Relationship r3 = relationshipService.create(context, publication3, author1, isAuthorOfPublication, -1, -1); + + // Add three Publications to author2, appending to the end + Relationship r4 = relationshipService.create(context, publication4, author2, isAuthorOfPublication, -1, -1); + Relationship r5 = relationshipService.create(context, publication5, author2, isAuthorOfPublication, -1, -1); + Relationship r6 = relationshipService.create(context, publication6, author2, isAuthorOfPublication, -1, -1); + + // Move r3 to author2 + relationshipService.move(context, r3, null, author2); + + context.restoreAuthSystemState(); + + // Check relationship order for author1 + assertRightPlace(r1, 0); + assertRightPlace(r2, 1); + assertRelationMetadataOrder(author1, isAuthorOfPublication, List.of(r1, r2)); + + // Check relationship order for author2 + assertRightPlace(r4, 0); // previous Relationships should stay where they were + assertRightPlace(r5, 1); + assertRightPlace(r6, 2); + assertRightPlace(r3, 3); // moved Relationship should be appended to the end + assertRelationMetadataOrder(author2, isAuthorOfPublication, List.of(r4, r5, r6, r3)); + } + + @Test + public void changeLeftItemInNonUseForPlaceRelationshipAtTheStart() throws Exception { + context.turnOffAuthorisationSystem(); + + // Add three Projects to author1, appending to the end + Relationship r1 = relationshipService.create(context, author1, project1, isProjectOfPerson, -1, -1); + Relationship r2 = relationshipService.create(context, author1, project2, isProjectOfPerson, -1, -1); + Relationship r3 = relationshipService.create(context, author1, project3, isProjectOfPerson, -1, -1); + + // Add three Projects to author2, appending to the end + Relationship r4 = relationshipService.create(context, author2, project4, isProjectOfPerson, -1, -1); + Relationship r5 = relationshipService.create(context, author2, project5, isProjectOfPerson, -1, -1); + Relationship r6 = relationshipService.create(context, author2, project6, isProjectOfPerson, -1, -1); + + // Move r1 to publication 2 + relationshipService.move(context, r1, author2, null); + + context.restoreAuthSystemState(); + + // Check relationship order for author1 + assertLeftPlace(r2, 0); // should both move down as the first Relationship was removed + assertLeftPlace(r3, 1); + assertRelationMetadataOrder(author1, isProjectOfPerson, List.of(r2, r3)); + + // Check relationship order for author2 + assertLeftPlace(r4, 0); // previous Relationships should stay where they were + assertLeftPlace(r5, 1); + assertLeftPlace(r6, 2); + assertLeftPlace(r1, 3); // moved Relationship should be appended to the end + assertRelationMetadataOrder(author2, isProjectOfPerson, List.of(r4, r5, r6, r1)); + } + + @Test + public void changeLeftItemInNonUseNonForPlaceRelationshipInTheMiddle() throws Exception { + context.turnOffAuthorisationSystem(); + + // Add three Projects to author1, appending to the end + Relationship r1 = relationshipService.create(context, author1, project1, isProjectOfPerson, -1, -1); + Relationship r2 = relationshipService.create(context, author1, project2, isProjectOfPerson, -1, -1); + Relationship r3 = relationshipService.create(context, author1, project3, isProjectOfPerson, -1, -1); + + // Add three Projects to author2, appending to the end + Relationship r4 = relationshipService.create(context, author2, project4, isProjectOfPerson, -1, -1); + Relationship r5 = relationshipService.create(context, author2, project5, isProjectOfPerson, -1, -1); + Relationship r6 = relationshipService.create(context, author2, project6, isProjectOfPerson, -1, -1); + + // Move r2 to author2 + relationshipService.move(context, r2, author2, null); + + context.restoreAuthSystemState(); + + // Check relationship order for author1 + assertLeftPlace(r1, 0); + assertLeftPlace(r3, 1); // should move down as the second Relationship was removed + assertRelationMetadataOrder(author1, isProjectOfPerson, List.of(r1, r3)); + + // Check relationship order for author2 + assertLeftPlace(r4, 0); // previous Relationships should stay where they were + assertLeftPlace(r5, 1); + assertLeftPlace(r6, 2); + assertLeftPlace(r2, 3); // moved Relationship should be appended to the end + assertRelationMetadataOrder(author2, isProjectOfPerson, List.of(r4, r5, r6, r2)); + } + + @Test + public void changeLeftItemInNonUseForPlaceRelationshipAtTheEnd() throws Exception { + context.turnOffAuthorisationSystem(); + + // Add three Projects to author1, appending to the end + Relationship r1 = relationshipService.create(context, author1, project1, isProjectOfPerson, -1, -1); + Relationship r2 = relationshipService.create(context, author1, project2, isProjectOfPerson, -1, -1); + Relationship r3 = relationshipService.create(context, author1, project3, isProjectOfPerson, -1, -1); + + // Add three Projects to author2, appending to the end + Relationship r4 = relationshipService.create(context, author2, project4, isProjectOfPerson, -1, -1); + Relationship r5 = relationshipService.create(context, author2, project5, isProjectOfPerson, -1, -1); + Relationship r6 = relationshipService.create(context, author2, project6, isProjectOfPerson, -1, -1); + + // Move r3 to author2 + relationshipService.move(context, r3, author2, null); + + context.restoreAuthSystemState(); + + // Check relationship order for publication1 + assertLeftPlace(r1, 0); + assertLeftPlace(r2, 1); + assertRelationMetadataOrder(author1, isProjectOfPerson, List.of(r1, r2)); + + // Check relationship order for publication2 + assertLeftPlace(r4, 0); // previous Relationships should stay where they were + assertLeftPlace(r5, 1); + assertLeftPlace(r6, 2); + assertLeftPlace(r3, 3); // moved Relationship should be appended to the end + assertRelationMetadataOrder(author2, isProjectOfPerson, List.of(r4, r5, r6, r3)); + } + + @Test + public void changeRightItemInUseNonForPlaceRelationshipAtTheStartTest() throws Exception { + context.turnOffAuthorisationSystem(); + + // Add three Authors to project1, appending to the end + Relationship r1 = relationshipService.create(context, author1, project1, isProjectOfPerson, -1, -1); + Relationship r2 = relationshipService.create(context, author2, project1, isProjectOfPerson, -1, -1); + Relationship r3 = relationshipService.create(context, author3, project1, isProjectOfPerson, -1, -1); + + // Add three Authors to project2, appending to the end + Relationship r4 = relationshipService.create(context, author4, project2, isProjectOfPerson, -1, -1); + Relationship r5 = relationshipService.create(context, author5, project2, isProjectOfPerson, -1, -1); + Relationship r6 = relationshipService.create(context, author6, project2, isProjectOfPerson, -1, -1); + + // Move r1 to project2 + relationshipService.move(context, r1, null, project2); + + context.restoreAuthSystemState(); + + // Check relationship order for project1 + assertRightPlace(r2, 0); // should both move down as the first Relationship was removed + assertRightPlace(r3, 1); + assertRelationMetadataOrder(project1, isProjectOfPerson, List.of(r2, r3)); + + // Check relationship order for project2 + assertRightPlace(r4, 0); // previous Relationships should stay where they were + assertRightPlace(r5, 1); + assertRightPlace(r6, 2); + assertRightPlace(r1, 3); // moved Relationship should be appended to the end + assertRelationMetadataOrder(project2, isProjectOfPerson, List.of(r4, r5, r6, r1)); + } + + @Test + public void changeRightItemInNonUseForPlaceRelationshipInTheMiddleTest() throws Exception { + context.turnOffAuthorisationSystem(); + + // Add three Authors to project1, appending to the end + Relationship r1 = relationshipService.create(context, author1, project1, isProjectOfPerson, -1, -1); + Relationship r2 = relationshipService.create(context, author2, project1, isProjectOfPerson, -1, -1); + Relationship r3 = relationshipService.create(context, author3, project1, isProjectOfPerson, -1, -1); + + // Add three Authors to project2, appending to the end + Relationship r4 = relationshipService.create(context, author4, project2, isProjectOfPerson, -1, -1); + Relationship r5 = relationshipService.create(context, author5, project2, isProjectOfPerson, -1, -1); + Relationship r6 = relationshipService.create(context, author6, project2, isProjectOfPerson, -1, -1); + + // Move r2 to project2 + relationshipService.move(context, r2, null, project2); + + context.restoreAuthSystemState(); + + // Check relationship order for project1 + assertRightPlace(r1, 0); + assertRightPlace(r3, 1); // should move down as the first Relationship was removed + assertRelationMetadataOrder(project1, isProjectOfPerson, List.of(r1, r3)); + + // Check relationship order for project2 + assertRightPlace(r4, 0); // previous Relationships should stay where they were + assertRightPlace(r5, 1); + assertRightPlace(r6, 2); + assertRightPlace(r2, 3); // moved Relationship should be appended to the end + assertRelationMetadataOrder(project2, isProjectOfPerson, List.of(r4, r5, r6, r2)); + } + + @Test + public void changeRightItemInNonUseForPlaceRelationshipInTheMiddleTest_ignoreOtherRels() throws Exception { + context.turnOffAuthorisationSystem(); + + // Add three Authors to project1, appending to the end + Relationship r1 = relationshipService.create(context, author1, project1, isProjectOfPerson, -1, -1); + Relationship r2 = relationshipService.create(context, author2, project1, isProjectOfPerson, -1, -1); + + // NOTE: unrelated relationship => should not be affected + Relationship ur1 = relationshipService.create(context, publication1, project1, isProjectOfPublication, -1, -1); + + // NOTE: unrelated relationship => should not be affected + Relationship ur2 = relationshipService.create(context, publication2, project1, isProjectOfPublication, -1, -1); + + Relationship r3 = relationshipService.create(context, author3, project1, isProjectOfPerson, -1, -1); + + // Add three Authors to project2, appending to the end + Relationship r4 = relationshipService.create(context, author4, project2, isProjectOfPerson, -1, -1); + Relationship r5 = relationshipService.create(context, author5, project2, isProjectOfPerson, -1, -1); + + // NOTE: unrelated relationship => should not be affected + Relationship ur3 = relationshipService.create(context, author5, project3, isProjectOfPerson, -1, -1); + + Relationship r6 = relationshipService.create(context, author6, project2, isProjectOfPerson, -1, -1); + + // Move r2 to project2 + relationshipService.move(context, r2, null, project2); + + context.restoreAuthSystemState(); + + // Check relationship order for project1 + assertRightPlace(r1, 0); + assertRightPlace(r3, 1); // should move down as the first Relationship was removed + assertRelationMetadataOrder(project1, isProjectOfPerson, List.of(r1, r3)); + + // Check relationship order for project2 + assertRightPlace(r4, 0); // previous Relationships should stay where they were + assertRightPlace(r5, 1); + assertRightPlace(r6, 2); + assertRightPlace(r2, 3); // moved Relationship should be appended to the end + assertRelationMetadataOrder(project2, isProjectOfPerson, List.of(r4, r5, r6, r2)); + + // check unaffected relationships + assertLeftPlace(ur1, 0); + assertRightPlace(ur1, 0); + assertLeftPlace(ur2, 0); + assertRightPlace(ur2, 1); + assertLeftPlace(ur3, 1); + assertRightPlace(ur3, 0); + } + + @Test + public void changeRightItemInNonUseForPlaceRelationshipAtTheEndTest() throws Exception { + context.turnOffAuthorisationSystem(); + + // Add three Authors to project1, appending to the end + Relationship r1 = relationshipService.create(context, author1, project1, isProjectOfPerson, -1, -1); + Relationship r2 = relationshipService.create(context, author2, project1, isProjectOfPerson, -1, -1); + Relationship r3 = relationshipService.create(context, author3, project1, isProjectOfPerson, -1, -1); + + // Add three Authors to project2, appending to the end + Relationship r4 = relationshipService.create(context, author4, project2, isProjectOfPerson, -1, -1); + Relationship r5 = relationshipService.create(context, author5, project2, isProjectOfPerson, -1, -1); + Relationship r6 = relationshipService.create(context, author6, project2, isProjectOfPerson, -1, -1); + + // Move r3 to project2 + relationshipService.move(context, r3, null, project2); + + context.restoreAuthSystemState(); + + // Check relationship order for author1 + assertRightPlace(r1, 0); + assertRightPlace(r2, 1); + assertRelationMetadataOrder(project1, isProjectOfPerson, List.of(r1, r2)); + + // Check relationship order for author2 + assertRightPlace(r4, 0); // previous Relationships should stay where they were + assertRightPlace(r5, 1); + assertRightPlace(r6, 2); + assertRightPlace(r3, 3); // moved Relationship should be appended to the end + assertRelationMetadataOrder(project2, isProjectOfPerson, List.of(r4, r5, r6, r3)); + } + + @Test + public void changeLeftItemInNonUseForPlaceRelationshipAtTheStartToSameItemNoChanges() throws Exception { + context.turnOffAuthorisationSystem(); + + // Add three Projects to author1, appending to the end + Relationship r1 = relationshipService.create(context, author1, project1, isProjectOfPerson, -1, -1); + Relationship r2 = relationshipService.create(context, author1, project2, isProjectOfPerson, -1, -1); + Relationship r3 = relationshipService.create(context, author1, project3, isProjectOfPerson, -1, -1); + + // Move r1 to author1 + relationshipService.move(context, r1, author1, null); + + context.restoreAuthSystemState(); + + // Check relationship order for author1 -> should remain unchanged + assertLeftPlace(r1, 0); + assertLeftPlace(r2, 1); + assertLeftPlace(r3, 2); + assertRelationMetadataOrder(author1, isProjectOfPerson, List.of(r1, r2, r3)); + } + + @Test + public void changeRightItemInNonUseForPlaceRelationshipAtTheStartToSameItemNoChanges() throws Exception { + context.turnOffAuthorisationSystem(); + + // Add three Projects to author1, appending to the end + Relationship r1 = relationshipService.create(context, author1, project1, isProjectOfPerson, -1, -1); + Relationship r2 = relationshipService.create(context, author1, project2, isProjectOfPerson, -1, -1); + Relationship r3 = relationshipService.create(context, author1, project3, isProjectOfPerson, -1, -1); + + // Move r1 to author1 + relationshipService.move(context, r1, null, project1); + + context.restoreAuthSystemState(); + + // Check relationship order for author1 -> should remain unchanged + assertLeftPlace(r1, 0); + assertLeftPlace(r2, 1); + assertLeftPlace(r3, 2); + assertRelationMetadataOrder(author1, isProjectOfPerson, List.of(r1, r2, r3)); + } + + @Test + public void changeLeftItemInNonUseForPlaceRelationshipAtTheStartWithSiblingsInOldLeftItem() throws Exception { + context.turnOffAuthorisationSystem(); + + // Add three Projects to author1, appending to the end + Relationship r1 = relationshipService.create(context, author1, project1, isProjectOfPerson, -1, -1); + Relationship r2 = relationshipService.create(context, author1, project2, isProjectOfPerson, -1, -1); + Relationship r3 = relationshipService.create(context, author1, project3, isProjectOfPerson, -1, -1); + + // Add three Authors to project1, appending to the end + Relationship r4 = relationshipService.create(context, author4, project1, isProjectOfPerson, -1, -1); + Relationship r5 = relationshipService.create(context, author5, project1, isProjectOfPerson, -1, -1); + Relationship r6 = relationshipService.create(context, author6, project1, isProjectOfPerson, -1, -1); + + // Move r1 to author2 + relationshipService.move(context, r1, author2, null); + + context.restoreAuthSystemState(); + + // Check relationship order for author1 -> should shift down by one + assertLeftPlace(r2, 0); + assertLeftPlace(r3, 1); + assertRelationMetadataOrder(author1, isProjectOfPerson, List.of(r2, r3)); + + // Check relationship order for project 1 -> should remain unchanged + assertRightPlace(r1, 0); + assertRightPlace(r4, 1); + assertRightPlace(r5, 2); + assertRightPlace(r6, 3); + assertRelationMetadataOrder(project1, isProjectOfPerson, List.of(r1, r4, r5, r6)); + } + + @Test + public void changeRightItemInNonUseForPlaceRelationshipAtTheStartWithSiblingsInOldRightItem() throws Exception { + context.turnOffAuthorisationSystem(); + + // Add three Authors to project1, appending to the end + Relationship r1 = relationshipService.create(context, author1, project1, isProjectOfPerson, -1, -1); + Relationship r2 = relationshipService.create(context, author2, project1, isProjectOfPerson, -1, -1); + Relationship r3 = relationshipService.create(context, author3, project1, isProjectOfPerson, -1, -1); + + // Add three Projects to author1, appending to the end + Relationship r4 = relationshipService.create(context, author1, project4, isProjectOfPerson, -1, -1); + Relationship r5 = relationshipService.create(context, author1, project5, isProjectOfPerson, -1, -1); + Relationship r6 = relationshipService.create(context, author1, project6, isProjectOfPerson, -1, -1); + + // Move r1 to project2 + relationshipService.move(context, r1, null, project2); + + context.restoreAuthSystemState(); + + // Check relationship order for project1 -> should remain unchanged + assertRightPlace(r2, 0); + assertRightPlace(r3, 1); + assertRelationMetadataOrder(project1, isProjectOfPerson, List.of(r2, r3)); + + // Check relationship order for author1 -> should remain unchanged + assertLeftPlace(r1, 0); + assertLeftPlace(r4, 1); + assertLeftPlace(r5, 2); + assertLeftPlace(r6, 3); + assertRelationMetadataOrder(author1, isProjectOfPerson, List.of(r1, r4, r5, r6)); + } + + + private void assertLeftPlace(Relationship relationship, int leftPlace) { + assertEquals(leftPlace, relationship.getLeftPlace()); + } + + private void assertRightPlace(Relationship relationship, int rightPlace) { + assertEquals(rightPlace, relationship.getRightPlace()); + } + + + private void assertRelationMetadataOrder( + Item item, RelationshipType relationshipType, List relationships + ) { + String element = getRelationshipTypeStringForEntity(relationshipType, item); + List mdvs = itemService.getMetadata( + item, + MetadataSchemaEnum.RELATION.getName(), element, null, + Item.ANY + ); + + assertEquals( + "Metadata authorities should match relationship IDs", + relationships.stream() + .map(r -> { + if (r != null) { + return Constants.VIRTUAL_AUTHORITY_PREFIX + r.getID(); + } else { + return null; // To match relationships that have been deleted and copied to MDVs + } + }) + .collect(Collectors.toList()), + mdvs.stream() + .map(MetadataValue::getAuthority) + .collect(Collectors.toList()) + ); + } + + private void assertMetadataOrder( + Item item, String metadataField, List metadataValues + ) { + List mdvs = itemService.getMetadataByMetadataString(item, metadataField); + + assertEquals( + metadataValues, + mdvs.stream() + .map(MetadataValue::getValue) + .collect(Collectors.toList()) + ); + } + + private String getRelationshipTypeStringForEntity(RelationshipType relationshipType, Item item) { + String entityType = itemService.getEntityTypeLabel(item); + + if (StringUtils.equals(entityType, relationshipType.getLeftType().getLabel())) { + return relationshipType.getLeftwardType(); + } else if (StringUtils.equals(entityType, relationshipType.getRightType().getLabel())) { + return relationshipType.getRightwardType(); + } else { + throw new IllegalArgumentException( + entityType + "is not a valid entity for " + relationshipType.getLeftwardType() + ", must be either " + + relationshipType.getLeftType().getLabel() + " or " + relationshipType.getRightType().getLabel() + ); + } + } } diff --git a/dspace-api/src/test/java/org/dspace/content/RelationshipServiceImplTest.java b/dspace-api/src/test/java/org/dspace/content/RelationshipServiceImplTest.java index 5d6197e494..579e05b3de 100644 --- a/dspace-api/src/test/java/org/dspace/content/RelationshipServiceImplTest.java +++ b/dspace-api/src/test/java/org/dspace/content/RelationshipServiceImplTest.java @@ -24,12 +24,14 @@ import org.dspace.content.virtual.VirtualMetadataPopulator; import org.dspace.core.Constants; import org.dspace.core.Context; import org.dspace.services.ConfigurationService; +import org.dspace.versioning.utils.RelationshipVersioningUtils; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.mockito.InjectMocks; import org.mockito.Mock; import org.mockito.Mockito; +import org.mockito.Spy; import org.mockito.junit.MockitoJUnitRunner; @RunWith(MockitoJUnitRunner.class) @@ -71,6 +73,9 @@ public class RelationshipServiceImplTest { @Mock private ConfigurationService configurationService; + @Spy + private RelationshipVersioningUtils relationshipVersioningUtils; + @Before public void init() { relationshipsList = new ArrayList<>(); @@ -112,9 +117,6 @@ public class RelationshipServiceImplTest { relationshipTest.add(getRelationship(bob, cindy, hasMother,1,0)); when(relationshipService.findByItem(context, cindy, -1, -1, false)).thenReturn(relationshipTest); - // Mock the state of objects utilized in findByItem() to meet the success criteria of the invocation - when(relationshipDAO.findByItem(context, cindy, -1, -1, false)).thenReturn(relationshipTest); - List results = relationshipService.findByItem(context, cindy); assertEquals("TestFindByItem 0", relationshipTest, results); for (int i = 0; i < relationshipTest.size(); i++) { @@ -122,32 +124,6 @@ public class RelationshipServiceImplTest { } } - @Test - public void testFindLeftPlaceByLeftItem() throws Exception { - // Declare objects utilized in unit test - Item item = mock(Item.class); - - // Mock DAO to return mocked left place as 0 - when(relationshipDAO.findNextLeftPlaceByLeftItem(context, item)).thenReturn(0); - - // The left place reported from out mocked item should match the DAO's report of the left place - assertEquals("TestFindLeftPlaceByLeftItem 0", relationshipDAO.findNextLeftPlaceByLeftItem(context, item), - relationshipService.findNextLeftPlaceByLeftItem(context, item)); - } - - @Test - public void testFindRightPlaceByRightItem() throws Exception { - // Declare objects utilized in unit test - Item item = mock(Item.class); - - // Mock lower level DAO to return mocked right place as 0 - when(relationshipDAO.findNextRightPlaceByRightItem(context, item)).thenReturn(0); - - // The right place reported from out mocked item should match the DAO's report of the right place - assertEquals("TestFindRightPlaceByRightItem 0", relationshipDAO.findNextRightPlaceByRightItem(context, item), - relationshipService.findNextRightPlaceByRightItem(context, item)); - } - @Test public void testFindByItemAndRelationshipType() throws Exception { // Declare objects utilized in unit test diff --git a/dspace-api/src/test/java/org/dspace/content/RelationshipServiceImplVersioningTest.java b/dspace-api/src/test/java/org/dspace/content/RelationshipServiceImplVersioningTest.java new file mode 100644 index 0000000000..d42213da2c --- /dev/null +++ b/dspace-api/src/test/java/org/dspace/content/RelationshipServiceImplVersioningTest.java @@ -0,0 +1,1105 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.content; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotNull; + +import java.util.List; + +import org.dspace.AbstractIntegrationTestWithDatabase; +import org.dspace.builder.CollectionBuilder; +import org.dspace.builder.CommunityBuilder; +import org.dspace.builder.EntityTypeBuilder; +import org.dspace.builder.ItemBuilder; +import org.dspace.builder.RelationshipBuilder; +import org.dspace.builder.RelationshipTypeBuilder; +import org.dspace.content.dao.RelationshipDAO; +import org.dspace.content.factory.ContentServiceFactory; +import org.dspace.content.service.RelationshipService; +import org.dspace.services.factory.DSpaceServicesFactory; +import org.junit.Before; +import org.junit.Test; + +public class RelationshipServiceImplVersioningTest extends AbstractIntegrationTestWithDatabase { + + private RelationshipService relationshipService; + private RelationshipDAO relationshipDAO; + + protected Community community; + protected Collection collection; + protected EntityType publicationEntityType; + protected EntityType personEntityType; + protected RelationshipType relationshipType; + protected Item publication1; + protected Item publication2; + protected Item publication3; + protected Item person1; + + @Override + @Before + public void setUp() throws Exception { + super.setUp(); + + relationshipService = ContentServiceFactory.getInstance().getRelationshipService(); + relationshipDAO = DSpaceServicesFactory.getInstance().getServiceManager() + .getServicesByType(RelationshipDAO.class).get(0); + + context.turnOffAuthorisationSystem(); + + community = CommunityBuilder.createCommunity(context) + .withName("community") + .build(); + + collection = CollectionBuilder.createCollection(context, community) + .withName("collection") + .build(); + + publicationEntityType = EntityTypeBuilder.createEntityTypeBuilder(context, "Publication") + .build(); + + personEntityType = EntityTypeBuilder.createEntityTypeBuilder(context, "Person") + .build(); + + relationshipType = RelationshipTypeBuilder.createRelationshipTypeBuilder( + context, publicationEntityType, personEntityType, + "isAuthorOfPublication", "isPublicationOfAuthor", + null, null, null, null + ) + .withCopyToLeft(false) + .withCopyToRight(false) + .build(); + + publication1 = ItemBuilder.createItem(context, collection) + .withTitle("publication1") + .withMetadata("dspace", "entity", "type", publicationEntityType.getLabel()) + .build(); + + publication2 = ItemBuilder.createItem(context, collection) + .withTitle("publication2") + .withMetadata("dspace", "entity", "type", publicationEntityType.getLabel()) + .build(); + + publication3 = ItemBuilder.createItem(context, collection) + .withTitle("publication3") + .withMetadata("dspace", "entity", "type", publicationEntityType.getLabel()) + .build(); + + person1 = ItemBuilder.createItem(context, collection) + .withTitle("person1") + .withMetadata("dspace", "entity", "type", personEntityType.getLabel()) + .build(); + + context.restoreAuthSystemState(); + } + + @Test + public void testRelationshipLatestVersionStatusDefault() throws Exception { + // create method #1 + context.turnOffAuthorisationSystem(); + Relationship relationship1 = relationshipService.create( + context, publication1, person1, relationshipType, 3, 5, "left", "right" + ); + context.restoreAuthSystemState(); + assertEquals(Relationship.LatestVersionStatus.BOTH, relationship1.getLatestVersionStatus()); + Relationship relationship2 = relationshipService.find(context, relationship1.getID()); + assertEquals(Relationship.LatestVersionStatus.BOTH, relationship2.getLatestVersionStatus()); + + // create method #2 + context.turnOffAuthorisationSystem(); + Relationship relationship3 = relationshipService.create( + context, publication2, person1, relationshipType, 3, 5 + ); + context.restoreAuthSystemState(); + assertEquals(Relationship.LatestVersionStatus.BOTH, relationship3.getLatestVersionStatus()); + Relationship relationship4 = relationshipService.find(context, relationship3.getID()); + assertEquals(Relationship.LatestVersionStatus.BOTH, relationship4.getLatestVersionStatus()); + + // create method #3 + Relationship inputRelationship = new Relationship(); + inputRelationship.setLeftItem(publication3); + inputRelationship.setRightItem(person1); + inputRelationship.setRelationshipType(relationshipType); + context.turnOffAuthorisationSystem(); + Relationship relationship5 = relationshipService.create(context, inputRelationship); + context.restoreAuthSystemState(); + assertEquals(Relationship.LatestVersionStatus.BOTH, relationship5.getLatestVersionStatus()); + Relationship relationship6 = relationshipService.find(context, relationship5.getID()); + assertEquals(Relationship.LatestVersionStatus.BOTH, relationship6.getLatestVersionStatus()); + + // clean up + context.turnOffAuthorisationSystem(); + relationshipService.delete(context, relationship1); + relationshipService.delete(context, relationship3); + relationshipService.delete(context, relationship5); + context.restoreAuthSystemState(); + } + + @Test + public void testRelationshipLatestVersionStatusBoth() throws Exception { + // create method #1 + context.turnOffAuthorisationSystem(); + Relationship relationship1 = relationshipService.create( + context, publication1, person1, relationshipType, 3, 5, "left", "right", + Relationship.LatestVersionStatus.BOTH // set latest version status + ); + context.restoreAuthSystemState(); + assertEquals(Relationship.LatestVersionStatus.BOTH, relationship1.getLatestVersionStatus()); + Relationship relationship2 = relationshipService.find(context, relationship1.getID()); + assertEquals(Relationship.LatestVersionStatus.BOTH, relationship2.getLatestVersionStatus()); + + // create method #2 + Relationship inputRelationship = new Relationship(); + inputRelationship.setLeftItem(publication2); + inputRelationship.setRightItem(person1); + inputRelationship.setRelationshipType(relationshipType); + inputRelationship.setLatestVersionStatus(Relationship.LatestVersionStatus.BOTH); // set latest version status + context.turnOffAuthorisationSystem(); + Relationship relationship3 = relationshipService.create(context, inputRelationship); + context.restoreAuthSystemState(); + assertEquals(Relationship.LatestVersionStatus.BOTH, relationship3.getLatestVersionStatus()); + Relationship relationship4 = relationshipService.find(context, relationship3.getID()); + assertEquals(Relationship.LatestVersionStatus.BOTH, relationship4.getLatestVersionStatus()); + + // clean up + context.turnOffAuthorisationSystem(); + relationshipService.delete(context, relationship1); + relationshipService.delete(context, relationship3); + context.restoreAuthSystemState(); + } + + @Test + public void testRelationshipLatestVersionStatusLeftOnly() throws Exception { + // create method #1 + context.turnOffAuthorisationSystem(); + Relationship relationship1 = relationshipService.create( + context, publication1, person1, relationshipType, 3, 5, "left", "right", + Relationship.LatestVersionStatus.LEFT_ONLY // set latest version status + ); + context.restoreAuthSystemState(); + assertEquals(Relationship.LatestVersionStatus.LEFT_ONLY, relationship1.getLatestVersionStatus()); + Relationship relationship2 = relationshipService.find(context, relationship1.getID()); + assertEquals(Relationship.LatestVersionStatus.LEFT_ONLY, relationship2.getLatestVersionStatus()); + + // create method #2 + Relationship inputRelationship = new Relationship(); + inputRelationship.setLeftItem(publication2); + inputRelationship.setRightItem(person1); + inputRelationship.setRelationshipType(relationshipType); + inputRelationship.setLatestVersionStatus(Relationship.LatestVersionStatus.LEFT_ONLY); // set LVS + context.turnOffAuthorisationSystem(); + Relationship relationship3 = relationshipService.create(context, inputRelationship); + context.restoreAuthSystemState(); + assertEquals(Relationship.LatestVersionStatus.LEFT_ONLY, relationship3.getLatestVersionStatus()); + Relationship relationship4 = relationshipService.find(context, relationship3.getID()); + assertEquals(Relationship.LatestVersionStatus.LEFT_ONLY, relationship4.getLatestVersionStatus()); + + // clean up + context.turnOffAuthorisationSystem(); + relationshipService.delete(context, relationship1); + relationshipService.delete(context, relationship3); + context.restoreAuthSystemState(); + } + + @Test + public void testRelationshipLatestVersionStatusRightOnly() throws Exception { + // create method #1 + context.turnOffAuthorisationSystem(); + Relationship relationship1 = relationshipService.create( + context, publication1, person1, relationshipType, 3, 5, "left", "right", + Relationship.LatestVersionStatus.RIGHT_ONLY // set latest version status + ); + context.restoreAuthSystemState(); + assertEquals(Relationship.LatestVersionStatus.RIGHT_ONLY, relationship1.getLatestVersionStatus()); + Relationship relationship2 = relationshipService.find(context, relationship1.getID()); + assertEquals(Relationship.LatestVersionStatus.RIGHT_ONLY, relationship2.getLatestVersionStatus()); + + // create method #2 + Relationship inputRelationship = new Relationship(); + inputRelationship.setLeftItem(publication2); + inputRelationship.setRightItem(person1); + inputRelationship.setRelationshipType(relationshipType); + inputRelationship.setLatestVersionStatus(Relationship.LatestVersionStatus.RIGHT_ONLY); // set LVS + context.turnOffAuthorisationSystem(); + Relationship relationship3 = relationshipService.create(context, inputRelationship); + context.restoreAuthSystemState(); + assertEquals(Relationship.LatestVersionStatus.RIGHT_ONLY, relationship3.getLatestVersionStatus()); + Relationship relationship4 = relationshipService.find(context, relationship3.getID()); + assertEquals(Relationship.LatestVersionStatus.RIGHT_ONLY, relationship4.getLatestVersionStatus()); + + // clean up + context.turnOffAuthorisationSystem(); + relationshipService.delete(context, relationship1); + relationshipService.delete(context, relationship3); + context.restoreAuthSystemState(); + } + + protected void assertRelationship(Relationship expectedRelationship, List relationships) { + assertNotNull(relationships); + assertEquals(1, relationships.size()); + assertEquals(expectedRelationship, relationships.get(0)); + } + + protected void assertNoRelationship(List relationships) { + assertNotNull(relationships); + assertEquals(0, relationships.size()); + } + + @Test + public void testExcludeNonLatestBoth() throws Exception { + context.turnOffAuthorisationSystem(); + Relationship relationship1 = RelationshipBuilder + .createRelationshipBuilder(context, publication1, person1, relationshipType) + .withLatestVersionStatus(Relationship.LatestVersionStatus.BOTH) + .build(); + context.restoreAuthSystemState(); + + assertRelationship( + relationship1, + relationshipDAO.findByItem(context, publication1, false, false) + ); + assertRelationship( + relationship1, + relationshipDAO.findByItem(context, publication1, false, true) + ); + assertRelationship( + relationship1, + relationshipDAO.findByItem(context, person1, false, false) + ); + assertRelationship( + relationship1, + relationshipDAO.findByItem(context, person1, false, true) + ); + + assertRelationship( + relationship1, + relationshipDAO.findByItem(context, publication1, -1, -1, false, false) + ); + assertRelationship( + relationship1, + relationshipDAO.findByItem(context, publication1, -1, -1, false, true) + ); + assertRelationship( + relationship1, + relationshipDAO.findByItem(context, person1, -1, -1, false, false) + ); + assertRelationship( + relationship1, + relationshipDAO.findByItem(context, person1, -1, -1, false, true) + ); + + assertEquals(1, relationshipDAO.countByItem(context, publication1, false, false)); + assertEquals(1, relationshipDAO.countByItem(context, publication1, false, true)); + assertEquals(1, relationshipDAO.countByItem(context, person1, false, false)); + assertEquals(1, relationshipDAO.countByItem(context, person1, false, true)); + assertEquals(1, relationshipDAO.countByItem(context, publication1, true, false)); + assertEquals(1, relationshipDAO.countByItem(context, publication1, true, true)); + assertEquals(1, relationshipDAO.countByItem(context, person1, true, false)); + assertEquals(1, relationshipDAO.countByItem(context, person1, true, true)); + + assertRelationship( + relationship1, + relationshipDAO.findByItemAndRelationshipType(context, publication1, relationshipType, -1, -1, false) + ); + assertRelationship( + relationship1, + relationshipDAO.findByItemAndRelationshipType(context, publication1, relationshipType, -1, -1, true) + ); + assertRelationship( + relationship1, + relationshipDAO.findByItemAndRelationshipType(context, person1, relationshipType, -1, -1, false) + ); + assertRelationship( + relationship1, + relationshipDAO.findByItemAndRelationshipType(context, person1, relationshipType, -1, -1, true) + ); + + assertNoRelationship( + relationshipDAO.findByItemAndRelationshipType(context, publication1, relationshipType, false, -1, -1, false) + ); + assertNoRelationship( + relationshipDAO.findByItemAndRelationshipType(context, publication1, relationshipType, false, -1, -1, true) + ); + assertRelationship( + relationship1, + relationshipDAO.findByItemAndRelationshipType(context, publication1, relationshipType, true, -1, -1, false) + ); + assertRelationship( + relationship1, + relationshipDAO.findByItemAndRelationshipType(context, publication1, relationshipType, true, -1, -1, true) + ); + assertRelationship( + relationship1, + relationshipDAO.findByItemAndRelationshipType(context, person1, relationshipType, false, -1, -1, false) + ); + assertRelationship( + relationship1, + relationshipDAO.findByItemAndRelationshipType(context, person1, relationshipType, false, -1, -1, true) + ); + assertNoRelationship( + relationshipDAO.findByItemAndRelationshipType(context, person1, relationshipType, true, -1, -1, false) + ); + assertNoRelationship( + relationshipDAO.findByItemAndRelationshipType(context, person1, relationshipType, true, -1, -1, true) + ); + + assertEquals( + 0, relationshipDAO.countByItemAndRelationshipType(context, publication1, relationshipType, false, false) + ); + assertEquals( + 0, relationshipDAO.countByItemAndRelationshipType(context, publication1, relationshipType, false, true) + ); + assertEquals( + 1, relationshipDAO.countByItemAndRelationshipType(context, publication1, relationshipType, true, false) + ); + assertEquals( + 1, relationshipDAO.countByItemAndRelationshipType(context, publication1, relationshipType, true, true) + ); + assertEquals( + 1, relationshipDAO.countByItemAndRelationshipType(context, person1, relationshipType, false, false) + ); + assertEquals( + 1, relationshipDAO.countByItemAndRelationshipType(context, person1, relationshipType, false, true) + ); + assertEquals( + 0, relationshipDAO.countByItemAndRelationshipType(context, person1, relationshipType, true, false) + ); + assertEquals( + 0, relationshipDAO.countByItemAndRelationshipType(context, person1, relationshipType, true, true) + ); + + assertRelationship( + relationship1, + relationshipService.findByItem(context, publication1) + ); + assertRelationship( + relationship1, + relationshipService.findByItem(context, person1) + ); + + assertRelationship( + relationship1, + relationshipService.findByItem(context, publication1, -1, -1, false) + ); + assertRelationship( + relationship1, + relationshipService.findByItem(context, person1, -1, -1, false) + ); + + assertRelationship( + relationship1, + relationshipService.findByItem(context, publication1, -1, -1, false, false) + ); + assertRelationship( + relationship1, + relationshipService.findByItem(context, publication1, -1, -1, false, true) + ); + assertRelationship( + relationship1, + relationshipService.findByItem(context, person1, -1, -1, false, false) + ); + assertRelationship( + relationship1, + relationshipService.findByItem(context, person1, -1, -1, false, true) + ); + + assertRelationship( + relationship1, + relationshipService.findByItemAndRelationshipType(context, publication1, relationshipType) + ); + assertRelationship( + relationship1, + relationshipService.findByItemAndRelationshipType(context, person1, relationshipType) + ); + + assertRelationship( + relationship1, + relationshipService.findByItemAndRelationshipType(context, publication1, relationshipType, -1, -1) + ); + assertRelationship( + relationship1, + relationshipService.findByItemAndRelationshipType(context, person1, relationshipType, -1, -1) + ); + + assertRelationship( + relationship1, + relationshipService.findByItemAndRelationshipType(context, publication1, relationshipType, -1, -1, false) + ); + assertRelationship( + relationship1, + relationshipService.findByItemAndRelationshipType(context, publication1, relationshipType, -1, -1, true) + ); + assertRelationship( + relationship1, + relationshipService.findByItemAndRelationshipType(context, person1, relationshipType, -1, -1, false) + ); + assertRelationship( + relationship1, + relationshipService.findByItemAndRelationshipType(context, person1, relationshipType, -1, -1, true) + ); + + assertNoRelationship( + relationshipService.findByItemAndRelationshipType(context, publication1, relationshipType, false, -1, -1) + ); + assertRelationship( + relationship1, + relationshipService.findByItemAndRelationshipType(context, publication1, relationshipType, true, -1, -1) + ); + assertRelationship( + relationship1, + relationshipService.findByItemAndRelationshipType(context, person1, relationshipType, false, -1, -1) + ); + assertNoRelationship( + relationshipService.findByItemAndRelationshipType(context, person1, relationshipType, true, -1, -1) + ); + + assertNoRelationship( + relationshipService + .findByItemAndRelationshipType(context, publication1, relationshipType, false, -1, -1, false) + ); + assertNoRelationship( + relationshipService + .findByItemAndRelationshipType(context, publication1, relationshipType, false, -1, -1, true) + ); + assertRelationship( + relationship1, + relationshipService + .findByItemAndRelationshipType(context, publication1, relationshipType, true, -1, -1, false) + ); + assertRelationship( + relationship1, + relationshipService + .findByItemAndRelationshipType(context, publication1, relationshipType, true, -1, -1, true) + ); + assertRelationship( + relationship1, + relationshipService.findByItemAndRelationshipType(context, person1, relationshipType, false, -1, -1, false) + ); + assertRelationship( + relationship1, + relationshipService.findByItemAndRelationshipType(context, person1, relationshipType, false, -1, -1, true) + ); + assertNoRelationship( + relationshipService.findByItemAndRelationshipType(context, person1, relationshipType, true, -1, -1, false) + ); + assertNoRelationship( + relationshipService.findByItemAndRelationshipType(context, person1, relationshipType, true, -1, -1, true) + ); + + assertEquals(1, relationshipService.countByItem(context, publication1)); + assertEquals(1, relationshipService.countByItem(context, person1)); + + assertEquals(1, relationshipService.countByItem(context, publication1, false, false)); + assertEquals(1, relationshipService.countByItem(context, publication1, false, true)); + assertEquals(1, relationshipService.countByItem(context, person1, false, false)); + assertEquals(1, relationshipService.countByItem(context, person1, false, true)); + assertEquals(1, relationshipService.countByItem(context, publication1, true, false)); + assertEquals(1, relationshipService.countByItem(context, publication1, true, true)); + assertEquals(1, relationshipService.countByItem(context, person1, true, false)); + assertEquals(1, relationshipService.countByItem(context, person1, true, true)); + + assertEquals( + 0, relationshipService.countByItemAndRelationshipType(context, publication1, relationshipType, false) + ); + assertEquals( + 1, relationshipService.countByItemAndRelationshipType(context, publication1, relationshipType, true) + ); + assertEquals( + 1, relationshipService.countByItemAndRelationshipType(context, person1, relationshipType, false) + ); + assertEquals( + 0, relationshipService.countByItemAndRelationshipType(context, person1, relationshipType, true) + ); + + assertEquals( + 0, relationshipService.countByItemAndRelationshipType(context, publication1, relationshipType, false, false) + ); + assertEquals( + 0, relationshipService.countByItemAndRelationshipType(context, publication1, relationshipType, false, true) + ); + assertEquals( + 1, relationshipService.countByItemAndRelationshipType(context, publication1, relationshipType, true, false) + ); + assertEquals( + 1, relationshipService.countByItemAndRelationshipType(context, publication1, relationshipType, true, true) + ); + assertEquals( + 1, relationshipService.countByItemAndRelationshipType(context, person1, relationshipType, false, false) + ); + assertEquals( + 1, relationshipService.countByItemAndRelationshipType(context, person1, relationshipType, false, true) + ); + assertEquals( + 0, relationshipService.countByItemAndRelationshipType(context, person1, relationshipType, true, false) + ); + assertEquals( + 0, relationshipService.countByItemAndRelationshipType(context, person1, relationshipType, true, true) + ); + } + + @Test + public void testExcludeNonLatestLeftOnly() throws Exception { + context.turnOffAuthorisationSystem(); + Relationship relationship1 = RelationshipBuilder + .createRelationshipBuilder(context, publication1, person1, relationshipType) + .withLatestVersionStatus(Relationship.LatestVersionStatus.LEFT_ONLY) + .build(); + context.restoreAuthSystemState(); + + assertRelationship( + relationship1, + relationshipDAO.findByItem(context, publication1, false, false) + ); + assertNoRelationship( + relationshipDAO.findByItem(context, publication1, false, true) + ); + assertRelationship( + relationship1, + relationshipDAO.findByItem(context, person1, false, false) + ); + assertRelationship( + relationship1, + relationshipDAO.findByItem(context, person1, false, true) + ); + + assertRelationship( + relationship1, + relationshipDAO.findByItem(context, publication1, -1, -1, false, false) + ); + assertNoRelationship( + relationshipDAO.findByItem(context, publication1, -1, -1, false, true) + ); + assertRelationship( + relationship1, + relationshipDAO.findByItem(context, person1, -1, -1, false, false) + ); + assertRelationship( + relationship1, + relationshipDAO.findByItem(context, person1, -1, -1, false, true) + ); + + assertEquals(1, relationshipDAO.countByItem(context, publication1, false, false)); + assertEquals(0, relationshipDAO.countByItem(context, publication1, false, true)); + assertEquals(1, relationshipDAO.countByItem(context, person1, false, false)); + assertEquals(1, relationshipDAO.countByItem(context, person1, false, true)); + assertEquals(1, relationshipDAO.countByItem(context, publication1, true, false)); + assertEquals(0, relationshipDAO.countByItem(context, publication1, true, true)); + assertEquals(1, relationshipDAO.countByItem(context, person1, true, false)); + assertEquals(1, relationshipDAO.countByItem(context, person1, true, true)); + + assertRelationship( + relationship1, + relationshipDAO.findByItemAndRelationshipType(context, publication1, relationshipType, -1, -1, false) + ); + assertNoRelationship( + relationshipDAO.findByItemAndRelationshipType(context, publication1, relationshipType, -1, -1, true) + ); + assertRelationship( + relationship1, + relationshipDAO.findByItemAndRelationshipType(context, person1, relationshipType, -1, -1, false) + ); + assertRelationship( + relationship1, + relationshipDAO.findByItemAndRelationshipType(context, person1, relationshipType, -1, -1, true) + ); + + assertNoRelationship( + relationshipDAO.findByItemAndRelationshipType(context, publication1, relationshipType, false, -1, -1, false) + ); + assertNoRelationship( + relationshipDAO.findByItemAndRelationshipType(context, publication1, relationshipType, false, -1, -1, true) + ); + assertRelationship( + relationship1, + relationshipDAO.findByItemAndRelationshipType(context, publication1, relationshipType, true, -1, -1, false) + ); + assertNoRelationship( + relationshipDAO.findByItemAndRelationshipType(context, publication1, relationshipType, true, -1, -1, true) + ); + assertRelationship( + relationship1, + relationshipDAO.findByItemAndRelationshipType(context, person1, relationshipType, false, -1, -1, false) + ); + assertRelationship( + relationship1, + relationshipDAO.findByItemAndRelationshipType(context, person1, relationshipType, false, -1, -1, true) + ); + assertNoRelationship( + relationshipDAO.findByItemAndRelationshipType(context, person1, relationshipType, true, -1, -1, false) + ); + assertNoRelationship( + relationshipDAO.findByItemAndRelationshipType(context, person1, relationshipType, true, -1, -1, true) + ); + + assertEquals( + 0, relationshipDAO.countByItemAndRelationshipType(context, publication1, relationshipType, false, false) + ); + assertEquals( + 0, relationshipDAO.countByItemAndRelationshipType(context, publication1, relationshipType, false, true) + ); + assertEquals( + 1, relationshipDAO.countByItemAndRelationshipType(context, publication1, relationshipType, true, false) + ); + assertEquals( + 0, relationshipDAO.countByItemAndRelationshipType(context, publication1, relationshipType, true, true) + ); + assertEquals( + 1, relationshipDAO.countByItemAndRelationshipType(context, person1, relationshipType, false, false) + ); + assertEquals( + 1, relationshipDAO.countByItemAndRelationshipType(context, person1, relationshipType, false, true) + ); + assertEquals( + 0, relationshipDAO.countByItemAndRelationshipType(context, person1, relationshipType, true, false) + ); + assertEquals( + 0, relationshipDAO.countByItemAndRelationshipType(context, person1, relationshipType, true, true) + ); + + assertNoRelationship( + relationshipService.findByItem(context, publication1) + ); + assertRelationship( + relationship1, + relationshipService.findByItem(context, person1) + ); + + assertNoRelationship( + relationshipService.findByItem(context, publication1, -1, -1, false) + ); + assertRelationship( + relationship1, + relationshipService.findByItem(context, person1, -1, -1, false) + ); + + assertRelationship( + relationship1, + relationshipService.findByItem(context, publication1, -1, -1, false, false) + ); + assertNoRelationship( + relationshipService.findByItem(context, publication1, -1, -1, false, true) + ); + assertRelationship( + relationship1, + relationshipService.findByItem(context, person1, -1, -1, false, false) + ); + assertRelationship( + relationship1, + relationshipService.findByItem(context, person1, -1, -1, false, true) + ); + + assertNoRelationship( + relationshipService.findByItemAndRelationshipType(context, publication1, relationshipType) + ); + assertRelationship( + relationship1, + relationshipService.findByItemAndRelationshipType(context, person1, relationshipType) + ); + + assertNoRelationship( + relationshipService.findByItemAndRelationshipType(context, publication1, relationshipType, -1, -1) + ); + assertRelationship( + relationship1, + relationshipService.findByItemAndRelationshipType(context, person1, relationshipType, -1, -1) + ); + + assertRelationship( + relationship1, + relationshipService.findByItemAndRelationshipType(context, publication1, relationshipType, -1, -1, false) + ); + assertNoRelationship( + relationshipService.findByItemAndRelationshipType(context, publication1, relationshipType, -1, -1, true) + ); + assertRelationship( + relationship1, + relationshipService.findByItemAndRelationshipType(context, person1, relationshipType, -1, -1, false) + ); + assertRelationship( + relationship1, + relationshipService.findByItemAndRelationshipType(context, person1, relationshipType, -1, -1, true) + ); + + assertNoRelationship( + relationshipService.findByItemAndRelationshipType(context, publication1, relationshipType, false, -1, -1) + ); + assertNoRelationship( + relationshipService.findByItemAndRelationshipType(context, publication1, relationshipType, true, -1, -1) + ); + assertRelationship( + relationship1, + relationshipService.findByItemAndRelationshipType(context, person1, relationshipType, false, -1, -1) + ); + assertNoRelationship( + relationshipService.findByItemAndRelationshipType(context, person1, relationshipType, true, -1, -1) + ); + + assertNoRelationship( + relationshipService + .findByItemAndRelationshipType(context, publication1, relationshipType, false, -1, -1, false) + ); + assertNoRelationship( + relationshipService + .findByItemAndRelationshipType(context, publication1, relationshipType, false, -1, -1, true) + ); + assertRelationship( + relationship1, + relationshipService + .findByItemAndRelationshipType(context, publication1, relationshipType, true, -1, -1, false) + ); + assertNoRelationship( + relationshipService + .findByItemAndRelationshipType(context, publication1, relationshipType, true, -1, -1, true) + ); + assertRelationship( + relationship1, + relationshipService.findByItemAndRelationshipType(context, person1, relationshipType, false, -1, -1, false) + ); + assertRelationship( + relationship1, + relationshipService.findByItemAndRelationshipType(context, person1, relationshipType, false, -1, -1, true) + ); + assertNoRelationship( + relationshipService.findByItemAndRelationshipType(context, person1, relationshipType, true, -1, -1, false) + ); + assertNoRelationship( + relationshipService.findByItemAndRelationshipType(context, person1, relationshipType, true, -1, -1, true) + ); + + assertEquals(0, relationshipService.countByItem(context, publication1)); + assertEquals(1, relationshipService.countByItem(context, person1)); + + assertEquals(1, relationshipService.countByItem(context, publication1, false, false)); + assertEquals(0, relationshipService.countByItem(context, publication1, false, true)); + assertEquals(1, relationshipService.countByItem(context, person1, false, false)); + assertEquals(1, relationshipService.countByItem(context, person1, false, true)); + assertEquals(1, relationshipService.countByItem(context, publication1, true, false)); + assertEquals(0, relationshipService.countByItem(context, publication1, true, true)); + assertEquals(1, relationshipService.countByItem(context, person1, true, false)); + assertEquals(1, relationshipService.countByItem(context, person1, true, true)); + + assertEquals( + 0, relationshipService.countByItemAndRelationshipType(context, publication1, relationshipType, false) + ); + assertEquals( + 0, relationshipService.countByItemAndRelationshipType(context, publication1, relationshipType, true) + ); + assertEquals( + 1, relationshipService.countByItemAndRelationshipType(context, person1, relationshipType, false) + ); + assertEquals( + 0, relationshipService.countByItemAndRelationshipType(context, person1, relationshipType, true) + ); + + assertEquals( + 0, relationshipService.countByItemAndRelationshipType(context, publication1, relationshipType, false, false) + ); + assertEquals( + 0, relationshipService.countByItemAndRelationshipType(context, publication1, relationshipType, false, true) + ); + assertEquals( + 1, relationshipService.countByItemAndRelationshipType(context, publication1, relationshipType, true, false) + ); + assertEquals( + 0, relationshipService.countByItemAndRelationshipType(context, publication1, relationshipType, true, true) + ); + assertEquals( + 1, relationshipService.countByItemAndRelationshipType(context, person1, relationshipType, false, false) + ); + assertEquals( + 1, relationshipService.countByItemAndRelationshipType(context, person1, relationshipType, false, true) + ); + assertEquals( + 0, relationshipService.countByItemAndRelationshipType(context, person1, relationshipType, true, false) + ); + assertEquals( + 0, relationshipService.countByItemAndRelationshipType(context, person1, relationshipType, true, true) + ); + } + + @Test + public void testExcludeNonLatestRightOnly() throws Exception { + context.turnOffAuthorisationSystem(); + Relationship relationship1 = RelationshipBuilder + .createRelationshipBuilder(context, publication1, person1, relationshipType) + .withLatestVersionStatus(Relationship.LatestVersionStatus.RIGHT_ONLY) + .build(); + context.restoreAuthSystemState(); + + assertRelationship( + relationship1, + relationshipDAO.findByItem(context, publication1, false, false) + ); + assertRelationship( + relationship1, + relationshipDAO.findByItem(context, publication1, false, true) + ); + assertRelationship( + relationship1, + relationshipDAO.findByItem(context, person1, false, false) + ); + assertNoRelationship( + relationshipDAO.findByItem(context, person1, false, true) + ); + + assertRelationship( + relationship1, + relationshipDAO.findByItem(context, publication1, -1, -1, false, false) + ); + assertRelationship( + relationship1, + relationshipDAO.findByItem(context, publication1, -1, -1, false, true) + ); + assertRelationship( + relationship1, + relationshipDAO.findByItem(context, person1, -1, -1, false, false) + ); + assertNoRelationship( + relationshipDAO.findByItem(context, person1, -1, -1, false, true) + ); + + assertEquals(1, relationshipDAO.countByItem(context, publication1, false, false)); + assertEquals(1, relationshipDAO.countByItem(context, publication1, false, true)); + assertEquals(1, relationshipDAO.countByItem(context, person1, false, false)); + assertEquals(0, relationshipDAO.countByItem(context, person1, false, true)); + assertEquals(1, relationshipDAO.countByItem(context, publication1, true, false)); + assertEquals(1, relationshipDAO.countByItem(context, publication1, true, true)); + assertEquals(1, relationshipDAO.countByItem(context, person1, true, false)); + assertEquals(0, relationshipDAO.countByItem(context, person1, true, true)); + + assertRelationship( + relationship1, + relationshipDAO.findByItemAndRelationshipType(context, publication1, relationshipType, -1, -1, false) + ); + assertRelationship( + relationship1, + relationshipDAO.findByItemAndRelationshipType(context, publication1, relationshipType, -1, -1, true) + ); + assertRelationship( + relationship1, + relationshipDAO.findByItemAndRelationshipType(context, person1, relationshipType, -1, -1, false) + ); + assertNoRelationship( + relationshipDAO.findByItemAndRelationshipType(context, person1, relationshipType, -1, -1, true) + ); + + assertNoRelationship( + relationshipDAO.findByItemAndRelationshipType(context, publication1, relationshipType, false, -1, -1, false) + ); + assertNoRelationship( + relationshipDAO.findByItemAndRelationshipType(context, publication1, relationshipType, false, -1, -1, true) + ); + assertRelationship( + relationship1, + relationshipDAO.findByItemAndRelationshipType(context, publication1, relationshipType, true, -1, -1, false) + ); + assertRelationship( + relationship1, + relationshipDAO.findByItemAndRelationshipType(context, publication1, relationshipType, true, -1, -1, true) + ); + assertRelationship( + relationship1, + relationshipDAO.findByItemAndRelationshipType(context, person1, relationshipType, false, -1, -1, false) + ); + assertNoRelationship( + relationshipDAO.findByItemAndRelationshipType(context, person1, relationshipType, false, -1, -1, true) + ); + assertNoRelationship( + relationshipDAO.findByItemAndRelationshipType(context, person1, relationshipType, true, -1, -1, false) + ); + assertNoRelationship( + relationshipDAO.findByItemAndRelationshipType(context, person1, relationshipType, true, -1, -1, true) + ); + + assertEquals( + 0, relationshipDAO.countByItemAndRelationshipType(context, publication1, relationshipType, false, false) + ); + assertEquals( + 0, relationshipDAO.countByItemAndRelationshipType(context, publication1, relationshipType, false, true) + ); + assertEquals( + 1, relationshipDAO.countByItemAndRelationshipType(context, publication1, relationshipType, true, false) + ); + assertEquals( + 1, relationshipDAO.countByItemAndRelationshipType(context, publication1, relationshipType, true, true) + ); + assertEquals( + 1, relationshipDAO.countByItemAndRelationshipType(context, person1, relationshipType, false, false) + ); + assertEquals( + 0, relationshipDAO.countByItemAndRelationshipType(context, person1, relationshipType, false, true) + ); + assertEquals( + 0, relationshipDAO.countByItemAndRelationshipType(context, person1, relationshipType, true, false) + ); + assertEquals( + 0, relationshipDAO.countByItemAndRelationshipType(context, person1, relationshipType, true, true) + ); + + assertRelationship( + relationship1, + relationshipService.findByItem(context, publication1) + ); + assertNoRelationship( + relationshipService.findByItem(context, person1) + ); + + assertRelationship( + relationship1, + relationshipService.findByItem(context, publication1, -1, -1, false) + ); + assertNoRelationship( + relationshipService.findByItem(context, person1, -1, -1, false) + ); + + assertRelationship( + relationship1, + relationshipService.findByItem(context, publication1, -1, -1, false, false) + ); + assertRelationship( + relationship1, + relationshipService.findByItem(context, publication1, -1, -1, false, true) + ); + assertRelationship( + relationship1, + relationshipService.findByItem(context, person1, -1, -1, false, false) + ); + assertNoRelationship( + relationshipService.findByItem(context, person1, -1, -1, false, true) + ); + + assertRelationship( + relationship1, + relationshipService.findByItemAndRelationshipType(context, publication1, relationshipType) + ); + assertNoRelationship( + relationshipService.findByItemAndRelationshipType(context, person1, relationshipType) + ); + + assertRelationship( + relationship1, + relationshipService.findByItemAndRelationshipType(context, publication1, relationshipType, -1, -1) + ); + assertNoRelationship( + relationshipService.findByItemAndRelationshipType(context, person1, relationshipType, -1, -1) + ); + + assertRelationship( + relationship1, + relationshipService.findByItemAndRelationshipType(context, publication1, relationshipType, -1, -1, false) + ); + assertRelationship( + relationship1, + relationshipService.findByItemAndRelationshipType(context, publication1, relationshipType, -1, -1, true) + ); + assertRelationship( + relationship1, + relationshipService.findByItemAndRelationshipType(context, person1, relationshipType, -1, -1, false) + ); + assertNoRelationship( + relationshipService.findByItemAndRelationshipType(context, person1, relationshipType, -1, -1, true) + ); + + assertNoRelationship( + relationshipService.findByItemAndRelationshipType(context, publication1, relationshipType, false, -1, -1) + ); + assertRelationship( + relationship1, + relationshipService.findByItemAndRelationshipType(context, publication1, relationshipType, true, -1, -1) + ); + assertNoRelationship( + relationshipService.findByItemAndRelationshipType(context, person1, relationshipType, false, -1, -1) + ); + assertNoRelationship( + relationshipService.findByItemAndRelationshipType(context, person1, relationshipType, true, -1, -1) + ); + + assertNoRelationship( + relationshipService + .findByItemAndRelationshipType(context, publication1, relationshipType, false, -1, -1, false) + ); + assertNoRelationship( + relationshipService + .findByItemAndRelationshipType(context, publication1, relationshipType, false, -1, -1, true) + ); + assertRelationship( + relationship1, + relationshipService + .findByItemAndRelationshipType(context, publication1, relationshipType, true, -1, -1, false) + ); + assertRelationship( + relationship1, + relationshipService + .findByItemAndRelationshipType(context, publication1, relationshipType, true, -1, -1, true) + ); + assertRelationship( + relationship1, + relationshipService.findByItemAndRelationshipType(context, person1, relationshipType, false, -1, -1, false) + ); + assertNoRelationship( + relationshipService.findByItemAndRelationshipType(context, person1, relationshipType, false, -1, -1, true) + ); + assertNoRelationship( + relationshipService.findByItemAndRelationshipType(context, person1, relationshipType, true, -1, -1, false) + ); + assertNoRelationship( + relationshipService.findByItemAndRelationshipType(context, person1, relationshipType, true, -1, -1, true) + ); + + assertEquals(1, relationshipService.countByItem(context, publication1)); + assertEquals(0, relationshipService.countByItem(context, person1)); + + assertEquals(1, relationshipService.countByItem(context, publication1, false, false)); + assertEquals(1, relationshipService.countByItem(context, publication1, false, true)); + assertEquals(1, relationshipService.countByItem(context, person1, false, false)); + assertEquals(0, relationshipService.countByItem(context, person1, false, true)); + assertEquals(1, relationshipService.countByItem(context, publication1, true, false)); + assertEquals(1, relationshipService.countByItem(context, publication1, true, true)); + assertEquals(1, relationshipService.countByItem(context, person1, true, false)); + assertEquals(0, relationshipService.countByItem(context, person1, true, true)); + + assertEquals( + 0, relationshipService.countByItemAndRelationshipType(context, publication1, relationshipType, false) + ); + assertEquals( + 1, relationshipService.countByItemAndRelationshipType(context, publication1, relationshipType, true) + ); + assertEquals( + 0, relationshipService.countByItemAndRelationshipType(context, person1, relationshipType, false) + ); + assertEquals( + 0, relationshipService.countByItemAndRelationshipType(context, person1, relationshipType, true) + ); + + assertEquals( + 0, relationshipService.countByItemAndRelationshipType(context, publication1, relationshipType, false, false) + ); + assertEquals( + 0, relationshipService.countByItemAndRelationshipType(context, publication1, relationshipType, false, true) + ); + assertEquals( + 1, relationshipService.countByItemAndRelationshipType(context, publication1, relationshipType, true, false) + ); + assertEquals( + 1, relationshipService.countByItemAndRelationshipType(context, publication1, relationshipType, true, true) + ); + assertEquals( + 1, relationshipService.countByItemAndRelationshipType(context, person1, relationshipType, false, false) + ); + assertEquals( + 0, relationshipService.countByItemAndRelationshipType(context, person1, relationshipType, false, true) + ); + assertEquals( + 0, relationshipService.countByItemAndRelationshipType(context, person1, relationshipType, true, false) + ); + assertEquals( + 0, relationshipService.countByItemAndRelationshipType(context, person1, relationshipType, true, true) + ); + } + +} diff --git a/dspace-api/src/test/java/org/dspace/content/RightTiltedRelationshipMetadataServiceIT.java b/dspace-api/src/test/java/org/dspace/content/RightTiltedRelationshipMetadataServiceIT.java index fc57f588db..c5359b23f0 100644 --- a/dspace-api/src/test/java/org/dspace/content/RightTiltedRelationshipMetadataServiceIT.java +++ b/dspace-api/src/test/java/org/dspace/content/RightTiltedRelationshipMetadataServiceIT.java @@ -8,6 +8,7 @@ package org.dspace.content; import static org.hamcrest.CoreMatchers.equalTo; +import static org.hamcrest.CoreMatchers.nullValue; import static org.hamcrest.MatcherAssert.assertThat; import java.util.List; @@ -88,18 +89,27 @@ public class RightTiltedRelationshipMetadataServiceIT extends RelationshipMetada //request the virtual metadata of the journal volume List volumeRelList = relationshipMetadataService.getRelationshipMetadata(rightItem, true); - assertThat(volumeRelList.size(), equalTo(2)); - assertThat(volumeRelList.get(0).getValue(), equalTo("2")); - assertThat(volumeRelList.get(0).getMetadataField().getMetadataSchema().getName(), equalTo("publicationissue")); - assertThat(volumeRelList.get(0).getMetadataField().getElement(), equalTo("issueNumber")); - assertThat(volumeRelList.get(0).getMetadataField().getQualifier(), equalTo(null)); + assertThat(volumeRelList.size(), equalTo(3)); + + assertThat(volumeRelList.get(0).getValue(), equalTo(String.valueOf(leftItem.getID()))); + assertThat(volumeRelList.get(0).getMetadataField().getMetadataSchema().getName(), + equalTo(MetadataSchemaEnum.RELATION.getName())); + assertThat(volumeRelList.get(0).getMetadataField().getElement(), equalTo("isIssueOfJournalVolume")); + assertThat(volumeRelList.get(0).getMetadataField().getQualifier(), equalTo("latestForDiscovery")); assertThat(volumeRelList.get(0).getAuthority(), equalTo("virtual::" + relationship.getID())); - assertThat(volumeRelList.get(1).getValue(), equalTo(String.valueOf(leftItem.getID()))); - assertThat(volumeRelList.get(1).getMetadataField().getMetadataSchema().getName(), - equalTo(MetadataSchemaEnum.RELATION.getName())); - assertThat(volumeRelList.get(1).getMetadataField().getElement(), equalTo("isIssueOfJournalVolume")); + assertThat(volumeRelList.get(1).getValue(), equalTo("2")); + assertThat(volumeRelList.get(1).getMetadataField().getMetadataSchema().getName(), equalTo("publicationissue")); + assertThat(volumeRelList.get(1).getMetadataField().getElement(), equalTo("issueNumber")); + assertThat(volumeRelList.get(1).getMetadataField().getQualifier(), equalTo(null)); assertThat(volumeRelList.get(1).getAuthority(), equalTo("virtual::" + relationship.getID())); + + assertThat(volumeRelList.get(2).getValue(), equalTo(String.valueOf(leftItem.getID()))); + assertThat(volumeRelList.get(2).getMetadataField().getMetadataSchema().getName(), + equalTo(MetadataSchemaEnum.RELATION.getName())); + assertThat(volumeRelList.get(2).getMetadataField().getElement(), equalTo("isIssueOfJournalVolume")); + assertThat(volumeRelList.get(2).getMetadataField().getQualifier(), nullValue()); + assertThat(volumeRelList.get(2).getAuthority(), equalTo("virtual::" + relationship.getID())); } } diff --git a/dspace-api/src/test/java/org/dspace/content/VersioningWithRelationshipsTest.java b/dspace-api/src/test/java/org/dspace/content/VersioningWithRelationshipsTest.java new file mode 100644 index 0000000000..528568c4e5 --- /dev/null +++ b/dspace-api/src/test/java/org/dspace/content/VersioningWithRelationshipsTest.java @@ -0,0 +1,4203 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.content; + +import static org.dspace.content.Relationship.LatestVersionStatus.BOTH; +import static org.dspace.content.Relationship.LatestVersionStatus.LEFT_ONLY; +import static org.dspace.content.Relationship.LatestVersionStatus.RIGHT_ONLY; +import static org.dspace.util.RelationshipVersioningTestUtils.isRel; +import static org.hamcrest.CoreMatchers.is; +import static org.hamcrest.CoreMatchers.startsWith; +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.allOf; +import static org.hamcrest.Matchers.containsInAnyOrder; +import static org.hamcrest.Matchers.empty; +import static org.hamcrest.Matchers.hasProperty; +import static org.hamcrest.Matchers.instanceOf; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertNotEquals; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertNotSame; +import static org.junit.Assert.assertNull; +import static org.junit.Assert.assertTrue; + +import java.sql.SQLException; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.concurrent.atomic.AtomicInteger; +import java.util.stream.Collectors; + +import org.apache.commons.lang3.function.FailableRunnable; +import org.apache.commons.lang3.function.FailableSupplier; +import org.apache.solr.client.solrj.SolrQuery; +import org.apache.solr.client.solrj.response.QueryResponse; +import org.apache.solr.common.SolrDocument; +import org.apache.solr.common.SolrDocumentList; +import org.dspace.AbstractIntegrationTestWithDatabase; +import org.dspace.authorize.AuthorizeException; +import org.dspace.builder.CollectionBuilder; +import org.dspace.builder.CommunityBuilder; +import org.dspace.builder.EntityTypeBuilder; +import org.dspace.builder.ItemBuilder; +import org.dspace.builder.RelationshipBuilder; +import org.dspace.builder.RelationshipTypeBuilder; +import org.dspace.content.factory.ContentServiceFactory; +import org.dspace.content.service.InstallItemService; +import org.dspace.content.service.ItemService; +import org.dspace.content.service.RelationshipService; +import org.dspace.content.service.WorkspaceItemService; +import org.dspace.content.virtual.Collected; +import org.dspace.content.virtual.VirtualMetadataConfiguration; +import org.dspace.content.virtual.VirtualMetadataPopulator; +import org.dspace.core.Constants; +import org.dspace.discovery.SolrSearchCore; +import org.dspace.kernel.ServiceManager; +import org.dspace.services.factory.DSpaceServicesFactory; +import org.dspace.versioning.Version; +import org.dspace.versioning.factory.VersionServiceFactory; +import org.dspace.versioning.service.VersioningService; +import org.hamcrest.Matcher; +import org.junit.Assert; +import org.junit.Before; +import org.junit.Test; +import org.springframework.beans.factory.config.AutowireCapableBeanFactory; + +public class VersioningWithRelationshipsTest extends AbstractIntegrationTestWithDatabase { + + private final RelationshipService relationshipService = + ContentServiceFactory.getInstance().getRelationshipService(); + private final VersioningService versioningService = + VersionServiceFactory.getInstance().getVersionService(); + private final WorkspaceItemService workspaceItemService = + ContentServiceFactory.getInstance().getWorkspaceItemService(); + private final InstallItemService installItemService = + ContentServiceFactory.getInstance().getInstallItemService(); + private final ItemService itemService = + ContentServiceFactory.getInstance().getItemService(); + private final SolrSearchCore solrSearchCore = + DSpaceServicesFactory.getInstance().getServiceManager().getServicesByType(SolrSearchCore.class).get(0); + + protected Community community; + protected Collection collection; + protected EntityType publicationEntityType; + protected EntityType personEntityType; + protected EntityType projectEntityType; + protected EntityType orgUnitEntityType; + protected EntityType journalIssueEntityType; + protected EntityType journalVolumeEntityType; + protected RelationshipType isAuthorOfPublication; + protected RelationshipType isProjectOfPublication; + protected RelationshipType isOrgUnitOfPublication; + protected RelationshipType isMemberOfProject; + protected RelationshipType isMemberOfOrgUnit; + protected RelationshipType isIssueOfJournalVolume; + protected RelationshipType isProjectOfPerson; + + @Override + @Before + public void setUp() throws Exception { + super.setUp(); + + context.turnOffAuthorisationSystem(); + + community = CommunityBuilder.createCommunity(context) + .withName("community") + .build(); + + collection = CollectionBuilder.createCollection(context, community) + .withName("collection") + .build(); + + publicationEntityType = EntityTypeBuilder.createEntityTypeBuilder(context, "Publication") + .build(); + + personEntityType = EntityTypeBuilder.createEntityTypeBuilder(context, "Person") + .build(); + + projectEntityType = EntityTypeBuilder.createEntityTypeBuilder(context, "Project") + .build(); + + orgUnitEntityType = EntityTypeBuilder.createEntityTypeBuilder(context, "OrgUnit") + .build(); + + journalIssueEntityType = EntityTypeBuilder.createEntityTypeBuilder(context, "JournalIssue") + .build(); + + journalVolumeEntityType = EntityTypeBuilder.createEntityTypeBuilder(context, "JournalVolume") + .build(); + + isAuthorOfPublication = RelationshipTypeBuilder.createRelationshipTypeBuilder( + context, publicationEntityType, personEntityType, + "isAuthorOfPublication", "isPublicationOfAuthor", + null, null, null, null + ) + .withCopyToLeft(false) + .withCopyToRight(false) + .build(); + + isProjectOfPublication = RelationshipTypeBuilder.createRelationshipTypeBuilder( + context, publicationEntityType, projectEntityType, + "isProjectOfPublication", "isPublicationOfProject", + null, null, null, null + ) + .withCopyToLeft(false) + .withCopyToRight(false) + .build(); + + isOrgUnitOfPublication = RelationshipTypeBuilder.createRelationshipTypeBuilder( + context, publicationEntityType, orgUnitEntityType, + "isOrgUnitOfPublication", "isPublicationOfOrgUnit", + null, null, null, null + ) + .withCopyToLeft(false) + .withCopyToRight(false) + .build(); + + isMemberOfProject = RelationshipTypeBuilder.createRelationshipTypeBuilder( + context, projectEntityType, personEntityType, + "isMemberOfProject", "isProjectOfMember", + null, null, null, null + ) + .withCopyToLeft(false) + .withCopyToRight(false) + .build(); + + isMemberOfOrgUnit = RelationshipTypeBuilder.createRelationshipTypeBuilder( + context, orgUnitEntityType, personEntityType, + "isMemberOfOrgUnit", "isOrgUnitOfMember", + null, null, null, null + ) + .withCopyToLeft(false) + .withCopyToRight(false) + .build(); + + isIssueOfJournalVolume = RelationshipTypeBuilder.createRelationshipTypeBuilder( + context, journalVolumeEntityType, journalIssueEntityType, + "isIssueOfJournalVolume", "isJournalVolumeOfIssue", + null, null, 1, 1 + ) + .withCopyToLeft(false) + .withCopyToRight(false) + .build(); + + isProjectOfPerson = RelationshipTypeBuilder.createRelationshipTypeBuilder( + context, personEntityType, projectEntityType, + "isProjectOfPerson", "isPersonOfProject", + null, null, null, null + ) + .withCopyToLeft(false) + .withCopyToRight(false) + .build(); + } + + protected Relationship getRelationship( + Item leftItem, RelationshipType relationshipType, Item rightItem + ) throws Exception { + List rels = relationshipService.findByRelationshipType(context, relationshipType).stream() + .filter(rel -> leftItem.getID().equals(rel.getLeftItem().getID())) + .filter(rel -> rightItem.getID().equals(rel.getRightItem().getID())) + .collect(Collectors.toList()); + + if (rels.size() == 0) { + return null; + } + + if (rels.size() == 1) { + return rels.get(0); + } + + // NOTE: this shouldn't be possible because of database constraints + throw new IllegalStateException(); + } + + @Test + public void test_createNewVersionOfItemOnLeftSideOfRelationships() throws Exception { + /////////////////////////////////////////////// + // create a publication with 3 relationships // + /////////////////////////////////////////////// + + Item person1 = ItemBuilder.createItem(context, collection) + .withTitle("person 1") + .withMetadata("dspace", "entity", "type", personEntityType.getLabel()) + .build(); + + Item project1 = ItemBuilder.createItem(context, collection) + .withTitle("project 1") + .withMetadata("dspace", "entity", "type", projectEntityType.getLabel()) + .build(); + + Item orgUnit1 = ItemBuilder.createItem(context, collection) + .withTitle("org unit 1") + .withMetadata("dspace", "entity", "type", orgUnitEntityType.getLabel()) + .build(); + + Item originalPublication = ItemBuilder.createItem(context, collection) + .withTitle("original publication") + .withMetadata("dspace", "entity", "type", publicationEntityType.getLabel()) + .build(); + + RelationshipBuilder.createRelationshipBuilder(context, originalPublication, person1, isAuthorOfPublication) + .build(); + + RelationshipBuilder.createRelationshipBuilder(context, originalPublication, project1, isProjectOfPublication) + .build(); + + RelationshipBuilder.createRelationshipBuilder(context, originalPublication, orgUnit1, isOrgUnitOfPublication) + .build(); + + ///////////////////////////////////////////////////////// + // verify that the relationships were properly created // + ///////////////////////////////////////////////////////// + + assertThat( + relationshipService.findByItem(context, originalPublication, -1, -1, false, false), + containsInAnyOrder(List.of( + isRel(originalPublication, isAuthorOfPublication, person1, BOTH, 0, 0), + isRel(originalPublication, isProjectOfPublication, project1, BOTH, 0, 0), + isRel(originalPublication, isOrgUnitOfPublication, orgUnit1, BOTH, 0, 0) + )) + ); + + assertThat( + relationshipService.findByItem(context, person1, -1, -1, false, false), + containsInAnyOrder(List.of( + isRel(originalPublication, isAuthorOfPublication, person1, BOTH, 0, 0) + )) + ); + + assertThat( + relationshipService.findByItem(context, project1, -1, -1, false, false), + containsInAnyOrder(List.of( + isRel(originalPublication, isProjectOfPublication, project1, BOTH, 0, 0) + )) + ); + + assertThat( + relationshipService.findByItem(context, orgUnit1, -1, -1, false, false), + containsInAnyOrder(List.of( + isRel(originalPublication, isOrgUnitOfPublication, orgUnit1, BOTH, 0, 0) + )) + ); + + ///////////////////////////////////////////// + // create a new version of the publication // + ///////////////////////////////////////////// + + Version newVersion = versioningService.createNewVersion(context, originalPublication); + Item newPublication = newVersion.getItem(); + assertNotSame(originalPublication, newPublication); + + /////////////////////////////////////////////////////////////////////// + // verify the relationships of all 5 items (excludeNonLatest = true) // + /////////////////////////////////////////////////////////////////////// + + assertThat( + relationshipService.findByItem(context, originalPublication, -1, -1, false, true), + containsInAnyOrder(List.of( + isRel(originalPublication, isAuthorOfPublication, person1, BOTH, 0, 0), + isRel(originalPublication, isProjectOfPublication, project1, BOTH, 0, 0), + isRel(originalPublication, isOrgUnitOfPublication, orgUnit1, BOTH, 0, 0) + )) + ); + + assertThat( + relationshipService.findByItem(context, person1, -1, -1, false, true), + containsInAnyOrder(List.of( + isRel(originalPublication, isAuthorOfPublication, person1, BOTH, 0, 0) + )) + ); + + assertThat( + relationshipService.findByItem(context, project1, -1, -1, false, true), + containsInAnyOrder(List.of( + isRel(originalPublication, isProjectOfPublication, project1, BOTH, 0, 0) + )) + ); + + assertThat( + relationshipService.findByItem(context, orgUnit1, -1, -1, false, true), + containsInAnyOrder(List.of( + isRel(originalPublication, isOrgUnitOfPublication, orgUnit1, BOTH, 0, 0) + )) + ); + + assertThat( + relationshipService.findByItem(context, newPublication, -1, -1, false, true), + containsInAnyOrder(List.of( + isRel(newPublication, isAuthorOfPublication, person1, RIGHT_ONLY, 0, 0), + isRel(newPublication, isProjectOfPublication, project1, RIGHT_ONLY, 0, 0), + isRel(newPublication, isOrgUnitOfPublication, orgUnit1, RIGHT_ONLY, 0, 0) + )) + ); + + //////////////////////////////////////////////////////////////////////// + // verify the relationships of all 5 items (excludeNonLatest = false) // + //////////////////////////////////////////////////////////////////////// + + assertThat( + relationshipService.findByItem(context, originalPublication, -1, -1, false, false), + containsInAnyOrder(List.of( + isRel(originalPublication, isAuthorOfPublication, person1, BOTH, 0, 0), + isRel(originalPublication, isProjectOfPublication, project1, BOTH, 0, 0), + isRel(originalPublication, isOrgUnitOfPublication, orgUnit1, BOTH, 0, 0) + )) + ); + + assertThat( + relationshipService.findByItem(context, person1, -1, -1, false, false), + containsInAnyOrder(List.of( + isRel(originalPublication, isAuthorOfPublication, person1, BOTH, 0, 0), + isRel(newPublication, isAuthorOfPublication, person1, RIGHT_ONLY, 0, 0) + )) + ); + + assertThat( + relationshipService.findByItem(context, project1, -1, -1, false, false), + containsInAnyOrder(List.of( + isRel(originalPublication, isProjectOfPublication, project1, BOTH, 0, 0), + isRel(newPublication, isProjectOfPublication, project1, RIGHT_ONLY, 0, 0) + )) + ); + + assertThat( + relationshipService.findByItem(context, orgUnit1, -1, -1, false, false), + containsInAnyOrder(List.of( + isRel(originalPublication, isOrgUnitOfPublication, orgUnit1, BOTH, 0, 0), + isRel(newPublication, isOrgUnitOfPublication, orgUnit1, RIGHT_ONLY, 0, 0) + )) + ); + + assertThat( + relationshipService.findByItem(context, newPublication, -1, -1, false, false), + containsInAnyOrder(List.of( + isRel(newPublication, isAuthorOfPublication, person1, RIGHT_ONLY, 0, 0), + isRel(newPublication, isProjectOfPublication, project1, RIGHT_ONLY, 0, 0), + isRel(newPublication, isOrgUnitOfPublication, orgUnit1, RIGHT_ONLY, 0, 0) + )) + ); + + //////////////////////////////////////// + // do item install on new publication // + //////////////////////////////////////// + + WorkspaceItem newPublicationWSI = workspaceItemService.findByItem(context, newPublication); + installItemService.installItem(context, newPublicationWSI); + context.dispatchEvents(); + + /////////////////////////////////////////////////////////////////////// + // verify the relationships of all 5 items (excludeNonLatest = true) // + /////////////////////////////////////////////////////////////////////// + + assertThat( + relationshipService.findByItem(context, originalPublication, -1, -1, false, true), + containsInAnyOrder(List.of( + isRel(originalPublication, isAuthorOfPublication, person1, RIGHT_ONLY, 0, 0), + isRel(originalPublication, isProjectOfPublication, project1, RIGHT_ONLY, 0, 0), + isRel(originalPublication, isOrgUnitOfPublication, orgUnit1, RIGHT_ONLY, 0, 0) + )) + ); + + assertThat( + relationshipService.findByItem(context, person1, -1, -1, false, true), + containsInAnyOrder(List.of( + isRel(newPublication, isAuthorOfPublication, person1, BOTH, 0, 0) + )) + ); + + assertThat( + relationshipService.findByItem(context, project1, -1, -1, false, true), + containsInAnyOrder(List.of( + isRel(newPublication, isProjectOfPublication, project1, BOTH, 0, 0) + )) + ); + + assertThat( + relationshipService.findByItem(context, orgUnit1, -1, -1, false, true), + containsInAnyOrder(List.of( + isRel(newPublication, isOrgUnitOfPublication, orgUnit1, BOTH, 0, 0) + )) + ); + + assertThat( + relationshipService.findByItem(context, newPublication, -1, -1, false, true), + containsInAnyOrder(List.of( + isRel(newPublication, isAuthorOfPublication, person1, BOTH, 0, 0), + isRel(newPublication, isProjectOfPublication, project1, BOTH, 0, 0), + isRel(newPublication, isOrgUnitOfPublication, orgUnit1, BOTH, 0, 0) + )) + ); + + //////////////////////////////////////////////////////////////////////// + // verify the relationships of all 5 items (excludeNonLatest = false) // + //////////////////////////////////////////////////////////////////////// + + assertThat( + relationshipService.findByItem(context, originalPublication, -1, -1, false, false), + containsInAnyOrder(List.of( + isRel(originalPublication, isAuthorOfPublication, person1, RIGHT_ONLY, 0, 0), + isRel(originalPublication, isProjectOfPublication, project1, RIGHT_ONLY, 0, 0), + isRel(originalPublication, isOrgUnitOfPublication, orgUnit1, RIGHT_ONLY, 0, 0) + )) + ); + + assertThat( + relationshipService.findByItem(context, person1, -1, -1, false, false), + containsInAnyOrder(List.of( + isRel(originalPublication, isAuthorOfPublication, person1, RIGHT_ONLY, 0, 0), + isRel(newPublication, isAuthorOfPublication, person1, BOTH, 0, 0) + )) + ); + + assertThat( + relationshipService.findByItem(context, project1, -1, -1, false, false), + containsInAnyOrder(List.of( + isRel(originalPublication, isProjectOfPublication, project1, RIGHT_ONLY, 0, 0), + isRel(newPublication, isProjectOfPublication, project1, BOTH, 0, 0) + )) + ); + + assertThat( + relationshipService.findByItem(context, orgUnit1, -1, -1, false, false), + containsInAnyOrder(List.of( + isRel(originalPublication, isOrgUnitOfPublication, orgUnit1, RIGHT_ONLY, 0, 0), + isRel(newPublication, isOrgUnitOfPublication, orgUnit1, BOTH, 0, 0) + )) + ); + + assertThat( + relationshipService.findByItem(context, newPublication, -1, -1, false, false), + containsInAnyOrder(List.of( + isRel(newPublication, isAuthorOfPublication, person1, BOTH, 0, 0), + isRel(newPublication, isProjectOfPublication, project1, BOTH, 0, 0), + isRel(newPublication, isOrgUnitOfPublication, orgUnit1, BOTH, 0, 0) + )) + ); + + ////////////// + // clean up // + ////////////// + + // need to manually delete all relationships to avoid SQL constraint violation exception + List relationships = relationshipService.findAll(context); + for (Relationship relationship : relationships) { + relationshipService.delete(context, relationship); + } + } + + @Test + public void test_createNewVersionOfItemAndModifyRelationships() throws Exception { + /////////////////////////////////////////////// + // create a publication with 3 relationships // + /////////////////////////////////////////////// + + Item person1 = ItemBuilder.createItem(context, collection) + .withTitle("person 1") + .withMetadata("dspace", "entity", "type", personEntityType.getLabel()) + .build(); + + Item project1 = ItemBuilder.createItem(context, collection) + .withTitle("project 1") + .withMetadata("dspace", "entity", "type", projectEntityType.getLabel()) + .build(); + + Item orgUnit1 = ItemBuilder.createItem(context, collection) + .withTitle("org unit 1") + .withMetadata("dspace", "entity", "type", orgUnitEntityType.getLabel()) + .build(); + + Item originalPublication = ItemBuilder.createItem(context, collection) + .withTitle("original publication") + .withMetadata("dspace", "entity", "type", publicationEntityType.getLabel()) + .build(); + + RelationshipBuilder.createRelationshipBuilder(context, originalPublication, person1, isAuthorOfPublication) + .build(); + + RelationshipBuilder + .createRelationshipBuilder(context, originalPublication, project1, isProjectOfPublication) + .build(); + + RelationshipBuilder + .createRelationshipBuilder(context, originalPublication, orgUnit1, isOrgUnitOfPublication) + .build(); + + ///////////////////////////////////////////////////////// + // verify that the relationships were properly created // + ///////////////////////////////////////////////////////// + + assertThat( + relationshipService.findByItem(context, originalPublication, -1, -1, false, false), + containsInAnyOrder(List.of( + isRel(originalPublication, isAuthorOfPublication, person1, BOTH, 0, 0), + isRel(originalPublication, isProjectOfPublication, project1, BOTH, 0, 0), + isRel(originalPublication, isOrgUnitOfPublication, orgUnit1, BOTH, 0, 0) + )) + ); + + assertThat( + relationshipService.findByItem(context, person1, -1, -1, false, false), + containsInAnyOrder(List.of( + isRel(originalPublication, isAuthorOfPublication, person1, BOTH, 0, 0) + )) + ); + + assertThat( + relationshipService.findByItem(context, project1, -1, -1, false, false), + containsInAnyOrder(List.of( + isRel(originalPublication, isProjectOfPublication, project1, BOTH, 0, 0) + )) + ); + + assertThat( + relationshipService.findByItem(context, orgUnit1, -1, -1, false, false), + containsInAnyOrder(List.of( + isRel(originalPublication, isOrgUnitOfPublication, orgUnit1, BOTH, 0, 0) + )) + ); + + ///////////////////////////////////////////// + // create a new version of the publication // + ///////////////////////////////////////////// + + Version newVersion = versioningService.createNewVersion(context, originalPublication); + Item newPublication = newVersion.getItem(); + assertNotSame(originalPublication, newPublication); + + ///////////////////////////////////////////// + // modify relationships on new publication // + ///////////////////////////////////////////// + + Item person2 = ItemBuilder.createItem(context, collection) + .withTitle("person 2") + .withMetadata("dspace", "entity", "type", personEntityType.getLabel()) + .build(); + + Item orgUnit2 = ItemBuilder.createItem(context, collection) + .withTitle("org unit 2") + .withMetadata("dspace", "entity", "type", orgUnitEntityType.getLabel()) + .build(); + + // on new item, remove relationship with project 1 + List newProjectRels = relationshipService + .findByItemAndRelationshipType(context, newPublication, isProjectOfPublication); + assertEquals(1, newProjectRels.size()); + relationshipService.delete(context, newProjectRels.get(0)); + + // on new item remove relationship with org unit 1 + List newOrgUnitRels = relationshipService + .findByItemAndRelationshipType(context, newPublication, isOrgUnitOfPublication); + assertEquals(1, newOrgUnitRels.size()); + relationshipService.delete(context, newOrgUnitRels.get(0)); + + RelationshipBuilder.createRelationshipBuilder(context, newPublication, person2, isAuthorOfPublication) + .build(); + + RelationshipBuilder.createRelationshipBuilder(context, newPublication, orgUnit2, isOrgUnitOfPublication) + .build(); + + /////////////////////////////////////////////////////////////////////// + // verify the relationships of all 7 items (excludeNonLatest = true) // + /////////////////////////////////////////////////////////////////////// + + assertThat( + relationshipService.findByItem(context, originalPublication, -1, -1, false, true), + containsInAnyOrder(List.of( + isRel(originalPublication, isAuthorOfPublication, person1, BOTH, 0, 0), + isRel(originalPublication, isProjectOfPublication, project1, BOTH, 0, 0), + isRel(originalPublication, isOrgUnitOfPublication, orgUnit1, BOTH, 0, 0) + )) + ); + + assertThat( + relationshipService.findByItem(context, person1, -1, -1, false, true), + containsInAnyOrder(List.of( + isRel(originalPublication, isAuthorOfPublication, person1, BOTH, 0, 0) + )) + ); + + assertThat( + relationshipService.findByItem(context, person2, -1, -1, false, true), + containsInAnyOrder(List.of( + // NOTE: BOTH because new relationship + isRel(newPublication, isAuthorOfPublication, person2, BOTH, 1, 0) + )) + ); + + assertThat( + relationshipService.findByItem(context, project1, -1, -1, false, true), + containsInAnyOrder(List.of( + isRel(originalPublication, isProjectOfPublication, project1, BOTH, 0, 0) + )) + ); + + assertThat( + relationshipService.findByItem(context, orgUnit1, -1, -1, false, true), + containsInAnyOrder(List.of( + isRel(originalPublication, isOrgUnitOfPublication, orgUnit1, BOTH, 0, 0) + )) + ); + + assertThat( + relationshipService.findByItem(context, orgUnit2, -1, -1, false, true), + containsInAnyOrder(List.of( + // NOTE: BOTH because new relationship + isRel(newPublication, isOrgUnitOfPublication, orgUnit2, BOTH, 0, 0) + )) + ); + + assertThat( + relationshipService.findByItem(context, newPublication, -1, -1, false, true), + containsInAnyOrder(List.of( + isRel(newPublication, isAuthorOfPublication, person1, RIGHT_ONLY, 0, 0), + // NOTE: BOTH because new relationship + isRel(newPublication, isAuthorOfPublication, person2, BOTH, 1, 0), + isRel(newPublication, isOrgUnitOfPublication, orgUnit2, BOTH, 0, 0) + )) + ); + + //////////////////////////////////////////////////////////////////////// + // verify the relationships of all 7 items (excludeNonLatest = false) // + //////////////////////////////////////////////////////////////////////// + + assertThat( + relationshipService.findByItem(context, originalPublication, -1, -1, false, false), + containsInAnyOrder(List.of( + isRel(originalPublication, isAuthorOfPublication, person1, BOTH, 0, 0), + isRel(originalPublication, isProjectOfPublication, project1, BOTH, 0, 0), + isRel(originalPublication, isOrgUnitOfPublication, orgUnit1, BOTH, 0, 0) + )) + ); + + assertThat( + relationshipService.findByItem(context, person1, -1, -1, false, false), + containsInAnyOrder(List.of( + isRel(originalPublication, isAuthorOfPublication, person1, BOTH, 0, 0), + isRel(newPublication, isAuthorOfPublication, person1, RIGHT_ONLY, 0, 0) + )) + ); + + assertThat( + relationshipService.findByItem(context, person2, -1, -1, false, false), + containsInAnyOrder(List.of( + // NOTE: BOTH because new relationship + isRel(newPublication, isAuthorOfPublication, person2, BOTH, 1, 0) + )) + ); + + assertThat( + relationshipService.findByItem(context, project1, -1, -1, false, false), + containsInAnyOrder(List.of( + isRel(originalPublication, isProjectOfPublication, project1, BOTH, 0, 0) + )) + ); + + assertThat( + relationshipService.findByItem(context, orgUnit1, -1, -1, false, false), + containsInAnyOrder(List.of( + isRel(originalPublication, isOrgUnitOfPublication, orgUnit1, BOTH, 0, 0) + )) + ); + + assertThat( + relationshipService.findByItem(context, orgUnit2, -1, -1, false, false), + containsInAnyOrder(List.of( + // NOTE: BOTH because new relationship + isRel(newPublication, isOrgUnitOfPublication, orgUnit2, BOTH, 0, 0) + )) + ); + + assertThat( + relationshipService.findByItem(context, newPublication, -1, -1, false, false), + containsInAnyOrder(List.of( + isRel(newPublication, isAuthorOfPublication, person1, RIGHT_ONLY, 0, 0), + // NOTE: BOTH because new relationship + isRel(newPublication, isAuthorOfPublication, person2, BOTH, 1, 0), + isRel(newPublication, isOrgUnitOfPublication, orgUnit2, BOTH, 0, 0) + )) + ); + + //////////////////////////////////////// + // do item install on new publication // + //////////////////////////////////////// + + WorkspaceItem newPublicationWSI = workspaceItemService.findByItem(context, newPublication); + installItemService.installItem(context, newPublicationWSI); + context.dispatchEvents(); + + /////////////////////////////////////////////////////////////////////// + // verify the relationships of all 7 items (excludeNonLatest = true) // + /////////////////////////////////////////////////////////////////////// + + assertThat( + relationshipService.findByItem(context, originalPublication, -1, -1, false, true), + containsInAnyOrder(List.of( + isRel(originalPublication, isAuthorOfPublication, person1, RIGHT_ONLY, 0, 0), + isRel(originalPublication, isProjectOfPublication, project1, RIGHT_ONLY, 0, 0), + isRel(originalPublication, isOrgUnitOfPublication, orgUnit1, RIGHT_ONLY, 0, 0) + )) + ); + + assertThat( + relationshipService.findByItem(context, person1, -1, -1, false, true), + containsInAnyOrder(List.of( + isRel(newPublication, isAuthorOfPublication, person1, BOTH, 0, 0) + )) + ); + + assertThat( + relationshipService.findByItem(context, person2, -1, -1, false, true), + containsInAnyOrder(List.of( + isRel(newPublication, isAuthorOfPublication, person2, BOTH, 1, 0) + )) + ); + + assertThat( + relationshipService.findByItem(context, project1, -1, -1, false, true), + empty() + ); + + assertThat( + relationshipService.findByItem(context, orgUnit1, -1, -1, false, true), + empty() + ); + + assertThat( + relationshipService.findByItem(context, orgUnit2, -1, -1, false, true), + containsInAnyOrder(List.of( + isRel(newPublication, isOrgUnitOfPublication, orgUnit2, BOTH, 0, 0) + )) + ); + + assertThat( + relationshipService.findByItem(context, newPublication, -1, -1, false, true), + containsInAnyOrder(List.of( + isRel(newPublication, isAuthorOfPublication, person1, BOTH, 0, 0), + isRel(newPublication, isAuthorOfPublication, person2, BOTH, 1, 0), + isRel(newPublication, isOrgUnitOfPublication, orgUnit2, BOTH, 0, 0) + )) + ); + + //////////////////////////////////////////////////////////////////////// + // verify the relationships of all 7 items (excludeNonLatest = false) // + //////////////////////////////////////////////////////////////////////// + + assertThat( + relationshipService.findByItem(context, originalPublication, -1, -1, false, false), + containsInAnyOrder(List.of( + isRel(originalPublication, isAuthorOfPublication, person1, RIGHT_ONLY, 0, 0), + isRel(originalPublication, isProjectOfPublication, project1, RIGHT_ONLY, 0, 0), + isRel(originalPublication, isOrgUnitOfPublication, orgUnit1, RIGHT_ONLY, 0, 0) + )) + ); + + assertThat( + relationshipService.findByItem(context, person1, -1, -1, false, false), + containsInAnyOrder(List.of( + isRel(originalPublication, isAuthorOfPublication, person1, RIGHT_ONLY, 0, 0), + isRel(newPublication, isAuthorOfPublication, person1, BOTH, 0, 0) + )) + ); + + assertThat( + relationshipService.findByItem(context, person2, -1, -1, false, false), + containsInAnyOrder(List.of( + isRel(newPublication, isAuthorOfPublication, person2, BOTH, 1, 0) + )) + ); + + assertThat( + relationshipService.findByItem(context, project1, -1, -1, false, false), + containsInAnyOrder(List.of( + isRel(originalPublication, isProjectOfPublication, project1, RIGHT_ONLY, 0, 0) + )) + ); + + assertThat( + relationshipService.findByItem(context, orgUnit1, -1, -1, false, false), + containsInAnyOrder(List.of( + isRel(originalPublication, isOrgUnitOfPublication, orgUnit1, RIGHT_ONLY, 0, 0) + )) + ); + + assertThat( + relationshipService.findByItem(context, orgUnit2, -1, -1, false, false), + containsInAnyOrder(List.of( + isRel(newPublication, isOrgUnitOfPublication, orgUnit2, BOTH, 0, 0) + )) + ); + + assertThat( + relationshipService.findByItem(context, newPublication, -1, -1, false, false), + containsInAnyOrder(List.of( + isRel(newPublication, isAuthorOfPublication, person1, BOTH, 0, 0), + isRel(newPublication, isAuthorOfPublication, person2, BOTH, 1, 0), + isRel(newPublication, isOrgUnitOfPublication, orgUnit2, BOTH, 0, 0) + )) + ); + + ////////////// + // clean up // + ////////////// + + // need to manually delete all relationships to avoid SQL constraint violation exception + List relationships = relationshipService.findAll(context); + for (Relationship relationship : relationships) { + relationshipService.delete(context, relationship); + } + } + + @Test + public void test_createNewVersionOfItemOnRightSideOfRelationships() throws Exception { + ////////////////////////////////////////// + // create a person with 3 relationships // + ////////////////////////////////////////// + + Item publication1 = ItemBuilder.createItem(context, collection) + .withTitle("publication 1") + .withMetadata("dspace", "entity", "type", publicationEntityType.getLabel()) + .build(); + + Item project1 = ItemBuilder.createItem(context, collection) + .withTitle("project 1") + .withMetadata("dspace", "entity", "type", projectEntityType.getLabel()) + .build(); + + Item orgUnit1 = ItemBuilder.createItem(context, collection) + .withTitle("org unit 1") + .withMetadata("dspace", "entity", "type", orgUnitEntityType.getLabel()) + .build(); + + Item originalPerson = ItemBuilder.createItem(context, collection) + .withTitle("original person") + .withMetadata("dspace", "entity", "type", personEntityType.getLabel()) + .build(); + + RelationshipBuilder.createRelationshipBuilder(context, publication1, originalPerson, isAuthorOfPublication) + .build(); + + RelationshipBuilder.createRelationshipBuilder(context, project1, originalPerson, isMemberOfProject) + .build(); + + RelationshipBuilder.createRelationshipBuilder(context, orgUnit1, originalPerson, isMemberOfOrgUnit) + .build(); + + ///////////////////////////////////////////////////////// + // verify that the relationships were properly created // + ///////////////////////////////////////////////////////// + + assertThat( + relationshipService.findByItem(context, originalPerson, -1, -1, false, false), + containsInAnyOrder(List.of( + isRel(publication1, isAuthorOfPublication, originalPerson, BOTH, 0, 0), + isRel(project1, isMemberOfProject, originalPerson, BOTH, 0, 0), + isRel(orgUnit1, isMemberOfOrgUnit, originalPerson, BOTH, 0, 0) + )) + ); + + assertThat( + relationshipService.findByItem(context, publication1, -1, -1, false, false), + containsInAnyOrder(List.of( + isRel(publication1, isAuthorOfPublication, originalPerson, BOTH, 0, 0) + )) + ); + + assertThat( + relationshipService.findByItem(context, project1, -1, -1, false, false), + containsInAnyOrder(List.of( + isRel(project1, isMemberOfProject, originalPerson, BOTH, 0, 0) + )) + ); + + assertThat( + relationshipService.findByItem(context, orgUnit1, -1, -1, false, false), + containsInAnyOrder(List.of( + isRel(orgUnit1, isMemberOfOrgUnit, originalPerson, BOTH, 0, 0) + )) + ); + + //////////////////////////////////////// + // create a new version of the person // + //////////////////////////////////////// + + Version newVersion = versioningService.createNewVersion(context, originalPerson); + Item newPerson = newVersion.getItem(); + assertNotSame(originalPerson, newPerson); + + /////////////////////////////////////////////////////////////////////// + // verify the relationships of all 5 items (excludeNonLatest = true) // + /////////////////////////////////////////////////////////////////////// + + assertThat( + relationshipService.findByItem(context, originalPerson, -1, -1, false, true), + containsInAnyOrder(List.of( + isRel(publication1, isAuthorOfPublication, originalPerson, BOTH, 0, 0), + isRel(project1, isMemberOfProject, originalPerson, BOTH, 0, 0), + isRel(orgUnit1, isMemberOfOrgUnit, originalPerson, BOTH, 0, 0) + )) + ); + + assertThat( + relationshipService.findByItem(context, publication1, -1, -1, false, true), + containsInAnyOrder(List.of( + isRel(publication1, isAuthorOfPublication, originalPerson, BOTH, 0, 0) + )) + ); + + assertThat( + relationshipService.findByItem(context, project1, -1, -1, false, true), + containsInAnyOrder(List.of( + isRel(project1, isMemberOfProject, originalPerson, BOTH, 0, 0) + )) + ); + + assertThat( + relationshipService.findByItem(context, orgUnit1, -1, -1, false, true), + containsInAnyOrder(List.of( + isRel(orgUnit1, isMemberOfOrgUnit, originalPerson, BOTH, 0, 0) + )) + ); + + assertThat( + relationshipService.findByItem(context, newPerson, -1, -1, false, true), + containsInAnyOrder(List.of( + isRel(publication1, isAuthorOfPublication, newPerson, LEFT_ONLY, 0, 0), + isRel(project1, isMemberOfProject, newPerson, LEFT_ONLY, 0, 0), + isRel(orgUnit1, isMemberOfOrgUnit, newPerson, LEFT_ONLY, 0, 0) + )) + ); + + //////////////////////////////////////////////////////////////////////// + // verify the relationships of all 5 items (excludeNonLatest = false) // + //////////////////////////////////////////////////////////////////////// + + assertThat( + relationshipService.findByItem(context, originalPerson, -1, -1, false, false), + containsInAnyOrder(List.of( + isRel(publication1, isAuthorOfPublication, originalPerson, BOTH, 0, 0), + isRel(project1, isMemberOfProject, originalPerson, BOTH, 0, 0), + isRel(orgUnit1, isMemberOfOrgUnit, originalPerson, BOTH, 0, 0) + )) + ); + + assertThat( + relationshipService.findByItem(context, publication1, -1, -1, false, false), + containsInAnyOrder(List.of( + isRel(publication1, isAuthorOfPublication, originalPerson, BOTH, 0, 0), + isRel(publication1, isAuthorOfPublication, newPerson, LEFT_ONLY, 0, 0) + )) + ); + + assertThat( + relationshipService.findByItem(context, project1, -1, -1, false, false), + containsInAnyOrder(List.of( + isRel(project1, isMemberOfProject, originalPerson, BOTH, 0, 0), + isRel(project1, isMemberOfProject, newPerson, LEFT_ONLY, 0, 0) + )) + ); + + assertThat( + relationshipService.findByItem(context, orgUnit1, -1, -1, false, false), + containsInAnyOrder(List.of( + isRel(orgUnit1, isMemberOfOrgUnit, originalPerson, BOTH, 0, 0), + isRel(orgUnit1, isMemberOfOrgUnit, newPerson, LEFT_ONLY, 0, 0) + )) + ); + + assertThat( + relationshipService.findByItem(context, newPerson, -1, -1, false, false), + containsInAnyOrder(List.of( + isRel(publication1, isAuthorOfPublication, newPerson, LEFT_ONLY, 0, 0), + isRel(project1, isMemberOfProject, newPerson, LEFT_ONLY, 0, 0), + isRel(orgUnit1, isMemberOfOrgUnit, newPerson, LEFT_ONLY, 0, 0) + )) + ); + + /////////////////////////////////// + // do item install on new person // + /////////////////////////////////// + + WorkspaceItem newPersonWSI = workspaceItemService.findByItem(context, newPerson); + installItemService.installItem(context, newPersonWSI); + context.dispatchEvents(); + + /////////////////////////////////////////////////////////////////////// + // verify the relationships of all 5 items (excludeNonLatest = true) // + /////////////////////////////////////////////////////////////////////// + + assertThat( + relationshipService.findByItem(context, originalPerson, -1, -1, false, true), + containsInAnyOrder(List.of( + isRel(publication1, isAuthorOfPublication, originalPerson, LEFT_ONLY, 0, 0), + isRel(project1, isMemberOfProject, originalPerson, LEFT_ONLY, 0, 0), + isRel(orgUnit1, isMemberOfOrgUnit, originalPerson, LEFT_ONLY, 0, 0) + )) + ); + + assertThat( + relationshipService.findByItem(context, publication1, -1, -1, false, true), + containsInAnyOrder(List.of( + isRel(publication1, isAuthorOfPublication, newPerson, BOTH, 0, 0) + )) + ); + + assertThat( + relationshipService.findByItem(context, project1, -1, -1, false, true), + containsInAnyOrder(List.of( + isRel(project1, isMemberOfProject, newPerson, BOTH, 0, 0) + )) + ); + + assertThat( + relationshipService.findByItem(context, orgUnit1, -1, -1, false, true), + containsInAnyOrder(List.of( + isRel(orgUnit1, isMemberOfOrgUnit, newPerson, BOTH, 0, 0) + )) + ); + + assertThat( + relationshipService.findByItem(context, newPerson, -1, -1, false, true), + containsInAnyOrder(List.of( + isRel(publication1, isAuthorOfPublication, newPerson, BOTH, 0, 0), + isRel(project1, isMemberOfProject, newPerson, BOTH, 0, 0), + isRel(orgUnit1, isMemberOfOrgUnit, newPerson, BOTH, 0, 0) + )) + ); + + //////////////////////////////////////////////////////////////////////// + // verify the relationships of all 5 items (excludeNonLatest = false) // + //////////////////////////////////////////////////////////////////////// + + assertThat( + relationshipService.findByItem(context, originalPerson, -1, -1, false, false), + containsInAnyOrder(List.of( + isRel(publication1, isAuthorOfPublication, originalPerson, LEFT_ONLY, 0, 0), + isRel(project1, isMemberOfProject, originalPerson, LEFT_ONLY, 0, 0), + isRel(orgUnit1, isMemberOfOrgUnit, originalPerson, LEFT_ONLY, 0, 0) + )) + ); + + assertThat( + relationshipService.findByItem(context, publication1, -1, -1, false, false), + containsInAnyOrder(List.of( + isRel(publication1, isAuthorOfPublication, originalPerson, LEFT_ONLY, 0, 0), + isRel(publication1, isAuthorOfPublication, newPerson, BOTH, 0, 0) + )) + ); + + assertThat( + relationshipService.findByItem(context, project1, -1, -1, false, false), + containsInAnyOrder(List.of( + isRel(project1, isMemberOfProject, originalPerson, LEFT_ONLY, 0, 0), + isRel(project1, isMemberOfProject, newPerson, BOTH, 0, 0) + )) + ); + + assertThat( + relationshipService.findByItem(context, orgUnit1, -1, -1, false, false), + containsInAnyOrder(List.of( + isRel(orgUnit1, isMemberOfOrgUnit, originalPerson, LEFT_ONLY, 0, 0), + isRel(orgUnit1, isMemberOfOrgUnit, newPerson, BOTH, 0, 0) + )) + ); + + assertThat( + relationshipService.findByItem(context, newPerson, -1, -1, false, false), + containsInAnyOrder(List.of( + isRel(publication1, isAuthorOfPublication, newPerson, BOTH, 0, 0), + isRel(project1, isMemberOfProject, newPerson, BOTH, 0, 0), + isRel(orgUnit1, isMemberOfOrgUnit, newPerson, BOTH, 0, 0) + )) + ); + + ////////////// + // clean up // + ////////////// + + // need to manually delete all relationships to avoid SQL constraint violation exception + List relationships = relationshipService.findAll(context); + for (Relationship relationship : relationships) { + relationshipService.delete(context, relationship); + } + } + + @Test + public void test_createNewVersionOfItemAndVerifyMetadataOrder() throws Exception { + ///////////////////////////////////////// + // create a publication with 6 authors // + ///////////////////////////////////////// + + Item originalPublication = ItemBuilder.createItem(context, collection) + .withTitle("original publication") + .withMetadata("dspace", "entity", "type", publicationEntityType.getLabel()) + .build(); + + // author 1 (plain metadata) + itemService.addMetadata(context, originalPublication, "dc", "contributor", "author", null, "author 1 (plain)"); + + // author 2 (virtual) + Item author2 = ItemBuilder.createItem(context, collection) + .withTitle("author 2 (item)") + .withMetadata("dspace", "entity", "type", personEntityType.getLabel()) + .withPersonIdentifierFirstName("2 (item)") + .withPersonIdentifierLastName("author") + .build(); + RelationshipBuilder.createRelationshipBuilder(context, originalPublication, author2, isAuthorOfPublication) + .build(); + + // author 3 (virtual) + Item author3 = ItemBuilder.createItem(context, collection) + .withTitle("author 3 (item)") + .withMetadata("dspace", "entity", "type", personEntityType.getLabel()) + .withPersonIdentifierFirstName("3 (item)") + .withPersonIdentifierLastName("author") + .build(); + RelationshipBuilder.createRelationshipBuilder(context, originalPublication, author3, isAuthorOfPublication) + .build(); + + // author 4 (virtual) + Item author4 = ItemBuilder.createItem(context, collection) + .withTitle("author 4 (item)") + .withMetadata("dspace", "entity", "type", personEntityType.getLabel()) + .withPersonIdentifierFirstName("4 (item)") + .withPersonIdentifierLastName("author") + .build(); + RelationshipBuilder.createRelationshipBuilder(context, originalPublication, author4, isAuthorOfPublication) + .build(); + + // author 5 (virtual) + Item author5 = ItemBuilder.createItem(context, collection) + .withTitle("author 5 (item)") + .withMetadata("dspace", "entity", "type", personEntityType.getLabel()) + .withPersonIdentifierFirstName("5 (item)") + .withPersonIdentifierLastName("author") + .build(); + RelationshipBuilder.createRelationshipBuilder(context, originalPublication, author5, isAuthorOfPublication) + .build(); + + // author 6 (plain metadata) + itemService.addMetadata(context, originalPublication, "dc", "contributor", "author", null, "author 6 (plain)"); + + // author 7 (virtual) + Item author7 = ItemBuilder.createItem(context, collection) + .withTitle("author 7 (item)") + .withMetadata("dspace", "entity", "type", personEntityType.getLabel()) + .withPersonIdentifierFirstName("7 (item)") + .withPersonIdentifierLastName("author") + .build(); + RelationshipBuilder.createRelationshipBuilder(context, originalPublication, author7, isAuthorOfPublication) + .build(); + + // author 8 (plain metadata) + itemService.addMetadata(context, originalPublication, "dc", "contributor", "author", null, "author 8 (plain)"); + + // author 9 (virtual) + Item author9 = ItemBuilder.createItem(context, collection) + .withTitle("author 9 (item)") + .withMetadata("dspace", "entity", "type", personEntityType.getLabel()) + .withPersonIdentifierFirstName("9 (item)") + .withPersonIdentifierLastName("author") + .build(); + RelationshipBuilder.createRelationshipBuilder(context, originalPublication, author9, isAuthorOfPublication) + .build(); + + //////////////////////////////// + // test dc.contributor.author // + //////////////////////////////// + + List oldMdvs = itemService.getMetadata( + originalPublication, "dc", "contributor", "author", Item.ANY + ); + assertEquals(9, oldMdvs.size()); + + assertFalse(oldMdvs.get(0) instanceof RelationshipMetadataValue); + assertEquals("author 1 (plain)", oldMdvs.get(0).getValue()); + assertEquals(0, oldMdvs.get(0).getPlace()); + + assertTrue(oldMdvs.get(1) instanceof RelationshipMetadataValue); + assertEquals("author, 2 (item)", oldMdvs.get(1).getValue()); + assertEquals(1, oldMdvs.get(1).getPlace()); + + assertTrue(oldMdvs.get(2) instanceof RelationshipMetadataValue); + assertEquals("author, 3 (item)", oldMdvs.get(2).getValue()); + assertEquals(2, oldMdvs.get(2).getPlace()); + + assertTrue(oldMdvs.get(3) instanceof RelationshipMetadataValue); + assertEquals("author, 4 (item)", oldMdvs.get(3).getValue()); + assertEquals(3, oldMdvs.get(3).getPlace()); + + assertTrue(oldMdvs.get(4) instanceof RelationshipMetadataValue); + assertEquals("author, 5 (item)", oldMdvs.get(4).getValue()); + assertEquals(4, oldMdvs.get(4).getPlace()); + + assertFalse(oldMdvs.get(5) instanceof RelationshipMetadataValue); + assertEquals("author 6 (plain)", oldMdvs.get(5).getValue()); + assertEquals(5, oldMdvs.get(5).getPlace()); + + assertTrue(oldMdvs.get(6) instanceof RelationshipMetadataValue); + assertEquals("author, 7 (item)", oldMdvs.get(6).getValue()); + assertEquals(6, oldMdvs.get(6).getPlace()); + + assertFalse(oldMdvs.get(7) instanceof RelationshipMetadataValue); + assertEquals("author 8 (plain)", oldMdvs.get(7).getValue()); + assertEquals(7, oldMdvs.get(7).getPlace()); + + assertTrue(oldMdvs.get(8) instanceof RelationshipMetadataValue); + assertEquals("author, 9 (item)", oldMdvs.get(8).getValue()); + assertEquals(8, oldMdvs.get(8).getPlace()); + + ///////////////////////////////////////////// + // test relationship isAuthorOfPublication // + ///////////////////////////////////////////// + + List oldRelationships = relationshipService.findByItem(context, originalPublication); + assertEquals(6, oldRelationships.size()); + + assertEquals(originalPublication, oldRelationships.get(0).getLeftItem()); + assertEquals(isAuthorOfPublication, oldRelationships.get(0).getRelationshipType()); + assertEquals(author2, oldRelationships.get(0).getRightItem()); + assertEquals(1, oldRelationships.get(0).getLeftPlace()); + assertEquals(0, oldRelationships.get(0).getRightPlace()); + + assertEquals(originalPublication, oldRelationships.get(1).getLeftItem()); + assertEquals(isAuthorOfPublication, oldRelationships.get(1).getRelationshipType()); + assertEquals(author3, oldRelationships.get(1).getRightItem()); + assertEquals(2, oldRelationships.get(1).getLeftPlace()); + assertEquals(0, oldRelationships.get(1).getRightPlace()); + + assertEquals(originalPublication, oldRelationships.get(2).getLeftItem()); + assertEquals(isAuthorOfPublication, oldRelationships.get(2).getRelationshipType()); + assertEquals(author4, oldRelationships.get(2).getRightItem()); + assertEquals(3, oldRelationships.get(2).getLeftPlace()); + assertEquals(0, oldRelationships.get(2).getRightPlace()); + + assertEquals(originalPublication, oldRelationships.get(3).getLeftItem()); + assertEquals(isAuthorOfPublication, oldRelationships.get(3).getRelationshipType()); + assertEquals(author5, oldRelationships.get(3).getRightItem()); + assertEquals(4, oldRelationships.get(3).getLeftPlace()); + assertEquals(0, oldRelationships.get(3).getRightPlace()); + + assertEquals(originalPublication, oldRelationships.get(4).getLeftItem()); + assertEquals(isAuthorOfPublication, oldRelationships.get(4).getRelationshipType()); + assertEquals(author7, oldRelationships.get(4).getRightItem()); + assertEquals(6, oldRelationships.get(4).getLeftPlace()); + assertEquals(0, oldRelationships.get(4).getRightPlace()); + + assertEquals(originalPublication, oldRelationships.get(5).getLeftItem()); + assertEquals(isAuthorOfPublication, oldRelationships.get(5).getRelationshipType()); + assertEquals(author9, oldRelationships.get(5).getRightItem()); + assertEquals(8, oldRelationships.get(5).getLeftPlace()); + assertEquals(0, oldRelationships.get(5).getRightPlace()); + + /////////////////////////////////////// + // create new version of publication // + /////////////////////////////////////// + + Version newVersion = versioningService.createNewVersion(context, originalPublication); + Item newPublication = newVersion.getItem(); + assertNotSame(originalPublication, newPublication); + + //////////////////////////////// + // test dc.contributor.author // + //////////////////////////////// + + List newMdvs = itemService.getMetadata( + newPublication, "dc", "contributor", "author", Item.ANY + ); + assertEquals(9, newMdvs.size()); + + assertFalse(newMdvs.get(0) instanceof RelationshipMetadataValue); + assertEquals("author 1 (plain)", newMdvs.get(0).getValue()); + assertEquals(0, newMdvs.get(0).getPlace()); + + assertTrue(newMdvs.get(1) instanceof RelationshipMetadataValue); + assertEquals("author, 2 (item)", newMdvs.get(1).getValue()); + assertEquals(1, newMdvs.get(1).getPlace()); + + assertTrue(newMdvs.get(2) instanceof RelationshipMetadataValue); + assertEquals("author, 3 (item)", newMdvs.get(2).getValue()); + assertEquals(2, newMdvs.get(2).getPlace()); + + assertTrue(newMdvs.get(3) instanceof RelationshipMetadataValue); + assertEquals("author, 4 (item)", newMdvs.get(3).getValue()); + assertEquals(3, newMdvs.get(3).getPlace()); + + assertTrue(newMdvs.get(4) instanceof RelationshipMetadataValue); + assertEquals("author, 5 (item)", newMdvs.get(4).getValue()); + assertEquals(4, newMdvs.get(4).getPlace()); + + assertFalse(newMdvs.get(5) instanceof RelationshipMetadataValue); + assertEquals("author 6 (plain)", newMdvs.get(5).getValue()); + assertEquals(5, newMdvs.get(5).getPlace()); + + assertTrue(newMdvs.get(6) instanceof RelationshipMetadataValue); + assertEquals("author, 7 (item)", newMdvs.get(6).getValue()); + assertEquals(6, newMdvs.get(6).getPlace()); + + assertFalse(newMdvs.get(7) instanceof RelationshipMetadataValue); + assertEquals("author 8 (plain)", newMdvs.get(7).getValue()); + assertEquals(7, newMdvs.get(7).getPlace()); + + assertTrue(newMdvs.get(8) instanceof RelationshipMetadataValue); + assertEquals("author, 9 (item)", newMdvs.get(8).getValue()); + assertEquals(8, newMdvs.get(8).getPlace()); + + ///////////////////////////////////////////// + // test relationship isAuthorOfPublication // + ///////////////////////////////////////////// + + List newRelationships = relationshipService.findByItem(context, newPublication); + assertEquals(6, newRelationships.size()); + + assertEquals(newPublication, newRelationships.get(0).getLeftItem()); + assertEquals(isAuthorOfPublication, newRelationships.get(0).getRelationshipType()); + assertEquals(author2, newRelationships.get(0).getRightItem()); + assertEquals(1, newRelationships.get(0).getLeftPlace()); + assertEquals(0, newRelationships.get(0).getRightPlace()); + + assertEquals(newPublication, newRelationships.get(1).getLeftItem()); + assertEquals(isAuthorOfPublication, newRelationships.get(1).getRelationshipType()); + assertEquals(author3, newRelationships.get(1).getRightItem()); + assertEquals(2, newRelationships.get(1).getLeftPlace()); + assertEquals(0, newRelationships.get(1).getRightPlace()); + + assertEquals(newPublication, newRelationships.get(2).getLeftItem()); + assertEquals(isAuthorOfPublication, newRelationships.get(2).getRelationshipType()); + assertEquals(author4, newRelationships.get(2).getRightItem()); + assertEquals(3, newRelationships.get(2).getLeftPlace()); + assertEquals(0, newRelationships.get(2).getRightPlace()); + + assertEquals(newPublication, newRelationships.get(3).getLeftItem()); + assertEquals(isAuthorOfPublication, newRelationships.get(3).getRelationshipType()); + assertEquals(author5, newRelationships.get(3).getRightItem()); + assertEquals(4, newRelationships.get(3).getLeftPlace()); + assertEquals(0, newRelationships.get(3).getRightPlace()); + + assertEquals(newPublication, newRelationships.get(4).getLeftItem()); + assertEquals(isAuthorOfPublication, newRelationships.get(4).getRelationshipType()); + assertEquals(author7, newRelationships.get(4).getRightItem()); + assertEquals(6, newRelationships.get(4).getLeftPlace()); + assertEquals(0, newRelationships.get(4).getRightPlace()); + + assertEquals(newPublication, newRelationships.get(5).getLeftItem()); + assertEquals(isAuthorOfPublication, newRelationships.get(5).getRelationshipType()); + assertEquals(author9, newRelationships.get(5).getRightItem()); + assertEquals(8, newRelationships.get(5).getLeftPlace()); + assertEquals(0, newRelationships.get(5).getRightPlace()); + + ////////////// + // clean up // + ////////////// + + // need to manually delete all relationships to avoid SQL constraint violation exception + List relationships = relationshipService.findAll(context); + for (Relationship relationship : relationships) { + relationshipService.delete(context, relationship); + } + } + + /** + * This test will + * - create a publication with 10 projects + * - Remove, move, add projects + * - Verify the order remains correct + * @throws Exception + */ + @Test + public void test_createNewVersionOfItemWithAddRemoveMove() throws Exception { + /////////////////////////////////////////// + // create a publication with 10 projects // + /////////////////////////////////////////// + + Item originalPublication = ItemBuilder.createItem(context, collection) + .withTitle("original publication") + .withMetadata("dspace", "entity", "type", publicationEntityType.getLabel()) + .build(); + + List projects = new ArrayList<>(); + + for (int i = 0; i < 10; i++) { + Item project = ItemBuilder.createItem(context, collection) + .withTitle("project " + i) + .withMetadata("dspace", "entity", "type", projectEntityType.getLabel()) + .build(); + projects.add(project); + + RelationshipBuilder + .createRelationshipBuilder(context, originalPublication, project, isProjectOfPublication) + .build(); + } + + AtomicInteger counterOriginalPublication = new AtomicInteger(); + List> listOriginalPublication = projects.stream().map( + project -> isRel(originalPublication, isProjectOfPublication, project, BOTH, + counterOriginalPublication.getAndIncrement(), 0) + ).collect(Collectors.toCollection(ArrayList::new)); + + ///////////////////////////////////////////////////////////////////// + // verify the relationships of all items (excludeNonLatest = true) // + ///////////////////////////////////////////////////////////////////// + + assertThat( + relationshipService.findByItem(context, originalPublication, -1, -1, false, true), + containsInAnyOrder(listOriginalPublication) + ); + + ////////////////////////////////////////////////////////////////////// + // verify the relationships of all items (excludeNonLatest = false) // + ////////////////////////////////////////////////////////////////////// + + assertThat( + relationshipService.findByItem(context, originalPublication, -1, -1, false, false), + containsInAnyOrder(listOriginalPublication) + ); + + ///////////////////////////////////////////// + // create a new version of the publication // + ///////////////////////////////////////////// + + Version newVersion = versioningService.createNewVersion(context, originalPublication); + Item newPublication = newVersion.getItem(); + assertNotSame(originalPublication, newPublication); + + verifyProjectsMatch(originalPublication, projects, newPublication, projects, false);// + + ///////////////////////////////////////////// + // modify relationships on new publication // + ///////////////////////////////////////////// + + List newProjects = new ArrayList<>(projects); + assertEquals(newProjects.size(), 10); + + removeProject(newPublication, 5, newProjects); + + assertEquals(projects.size(), 10); + assertEquals(newProjects.size(), 9); + verifyProjectsMatch(originalPublication, projects, newPublication, newProjects, false); + + Item project6 = newProjects.get(6); + moveProject(newPublication, 6, 2, newProjects); + assertEquals(newProjects.size(), 9); + assertEquals(newProjects.get(2), project6); + assertNotEquals(projects.get(2), project6); + verifyProjectsMatch(originalPublication, projects, newPublication, newProjects, false); + + Item project1 = newProjects.get(1); + moveProject(newPublication, 1, 5, newProjects); + assertEquals(newProjects.size(), 9); + assertEquals(newProjects.get(5), project1); + assertNotEquals(projects.get(5), project1); + verifyProjectsMatch(originalPublication, projects, newPublication, newProjects, false); + + Item project = ItemBuilder.createItem(context, collection) + .withTitle("project 10") + .withMetadata("dspace", "entity", "type", projectEntityType.getLabel()) + .build(); + newProjects.add(4, project); + + RelationshipBuilder + .createRelationshipBuilder(context, newPublication, project, isProjectOfPublication, 4, -1) + .build(); + + verifyProjectsMatch(originalPublication, projects, newPublication, newProjects, false); + + //////////////////////////////////////// + // do item install on new publication // + //////////////////////////////////////// + + WorkspaceItem newPublicationWSI = workspaceItemService.findByItem(context, newPublication); + installItemService.installItem(context, newPublicationWSI); + context.dispatchEvents(); + + verifyProjectsMatch(originalPublication, projects, newPublication, newProjects, true); + + ////////////// + // clean up // + ////////////// + + // need to manually delete all relationships to avoid SQL constraint violation exception + List relationships = relationshipService.findAll(context); + for (Relationship relationship : relationships) { + relationshipService.delete(context, relationship); + } + } + + protected void removeProject(Item newPublication, int place, List newProjects) + throws SQLException, AuthorizeException { + List projectRels = relationshipService + .findByItemAndRelationshipType(context, newProjects.get(place), isProjectOfPublication, -1, -1, false) + .stream() + .filter( + relationship -> relationship.getLeftItem().equals(newPublication) + ) + .collect(Collectors.toCollection(ArrayList::new)); + assertEquals(1, projectRels.size()); + relationshipService.delete(context, projectRels.get(0)); + newProjects.remove(newProjects.get(place)); + } + + protected void moveProject(Item newPublication, int oldPlace, int newPlace, List newProjects) + throws SQLException, AuthorizeException { + Item project = newProjects.get(oldPlace); + List projectRels = relationshipService + .findByItemAndRelationshipType(context, project, isProjectOfPublication, -1, -1, false) + .stream() + .filter( + relationship -> relationship.getLeftItem().equals(newPublication) + ) + .collect(Collectors.toCollection(ArrayList::new)); + assertEquals(1, projectRels.size()); + relationshipService.move(context, projectRels.get(0), newPlace, null); + newProjects.remove(project); + newProjects.add(newPlace, project); + } + + protected void verifyProjectsMatch(Item originalPublication, List originalProjects, + Item newPublication, List newProjects, boolean newPublicationArchived) + throws SQLException { + + ///////////////////////////////////////////////////////// + // verify that the relationships were properly created // + ///////////////////////////////////////////////////////// + + AtomicInteger counterOriginalPublication = new AtomicInteger(); + List> listOriginalPublication = originalProjects.stream().map( + project -> isRel(originalPublication, isProjectOfPublication, project, + newPublicationArchived ? RIGHT_ONLY : BOTH, + counterOriginalPublication.getAndIncrement(), 0) + ).collect(Collectors.toCollection(ArrayList::new)); + + AtomicInteger counterNewPublication = new AtomicInteger(); + List> listNewPublication = newProjects.stream().map( + project -> isRel(newPublication, isProjectOfPublication, project, + newPublicationArchived || !originalProjects.contains(project) ? + BOTH : RIGHT_ONLY, + counterNewPublication.getAndIncrement(), 0) + ).collect(Collectors.toCollection(ArrayList::new)); + + ///////////////////////////////////////////////////////////////////// + // verify the relationships of all items (excludeNonLatest = true) // + ///////////////////////////////////////////////////////////////////// + + assertEquals( + relationshipService.countByItem(context, originalPublication, false, true), + originalProjects.size() + ); + + assertThat( + relationshipService.findByItem(context, originalPublication, -1, -1, false, true), + containsInAnyOrder(listOriginalPublication) + ); + + assertEquals( + relationshipService.countByItem(context, newPublication, false, true), + newProjects.size() + ); + + assertThat( + relationshipService.findByItem(context, newPublication, -1, -1, false, true), + containsInAnyOrder(listNewPublication) + ); + + ////////////////////////////////////////////////////////////////////// + // verify the relationships of all items (excludeNonLatest = false) // + ////////////////////////////////////////////////////////////////////// + + assertThat( + relationshipService.findByItem(context, originalPublication, -1, -1, false, false), + containsInAnyOrder(listOriginalPublication) + ); + + assertThat( + relationshipService.findByItem(context, newPublication, -1, -1, false, false), + containsInAnyOrder(listNewPublication) + ); + } + + /** + * NOTE: If Spring bean classes would be created with the new keyword, nothing would be autowired. + */ + protected T createBean(Class beanClass) throws Exception { + AutowireCapableBeanFactory factory = DSpaceServicesFactory.getInstance().getServiceManager() + .getApplicationContext().getAutowireCapableBeanFactory(); + + T bean = beanClass.getDeclaredConstructor().newInstance(); + + factory.autowireBean(bean); + + return bean; + } + + /** + * Run the given callback with a virtual metadata config that's different from virtual-metadata.xml, + * and clean up after the callback has terminated. + * @param configModifier lambda that generates the temporary virtual metadata config. + * @param callback the callback that will be executed with the temporary virtual metadata config. + */ + protected void runWithVirtualMetadataConfig( + FailableSupplier>, Exception> configModifier, + FailableRunnable callback + ) throws Exception { + VirtualMetadataPopulator virtualMetadataPopulator = DSpaceServicesFactory.getInstance() + .getServiceManager().getServicesByType(VirtualMetadataPopulator.class).get(0); + + // keep reference to old config + Map> oldConfig = virtualMetadataPopulator.getMap(); + + try { + // set new config + Map> newConfig = configModifier.get(); + virtualMetadataPopulator.setMap(newConfig); + + // run the callback + callback.run(); + } finally { + // reset handlers + virtualMetadataPopulator.setMap(oldConfig); + } + } + + @Test + public void test_placeRecalculationAfterDelete() throws Exception { + // NOTE: this test uses relationship isIssueOfJournalVolume, because it adds virtual metadata + // on both sides of the relationship + + ///////////////////////////////////////// + // properly configure virtual metadata // + ///////////////////////////////////////// + + ServiceManager serviceManager = DSpaceServicesFactory.getInstance().getServiceManager(); + + // virtual metadata field publicationissue.issueNumber needs to be used in place calculations + Collected issueVmd = serviceManager.getServiceByName("journalIssue_number", Collected.class); + assertNotNull(issueVmd); + boolean ogIssueVmdUseForPlace = issueVmd.getUseForPlace(); + issueVmd.setUseForPlace(true); + + ////////////////// + // create items // + ////////////////// + + // journal volume 1.1 + Item v1_1 = ItemBuilder.createItem(context, collection) + .withTitle("journal volume 1") + .withMetadata("dspace", "entity", "type", journalVolumeEntityType.getLabel()) + .withMetadata("publicationvolume", "volumeNumber", null, "volume nr 3 (rel)") + .build(); + + // journal issue 1.1 + Item i1_1 = ItemBuilder.createItem(context, collection) + .withTitle("journal issue 1") + .withMetadata("dspace", "entity", "type", journalIssueEntityType.getLabel()) + .withMetadata("publicationissue", "issueNumber", null, "issue nr 1 (rel)") + .build(); + + // journal issue 3.1 + Item i3_1 = ItemBuilder.createItem(context, collection) + .withTitle("journal issue 3") + .withMetadata("dspace", "entity", "type", journalIssueEntityType.getLabel()) + .withMetadata("publicationissue", "issueNumber", null, "issue nr 3 (rel)") + .build(); + + // journal issue 5.1 + Item i5_1 = ItemBuilder.createItem(context, collection) + .withTitle("journal issue 5") + .withMetadata("dspace", "entity", "type", journalIssueEntityType.getLabel()) + .withMetadata("publicationissue", "issueNumber", null, "issue nr 5 (rel)") + .build(); + + ////////////////////////////////////////////// + // create relationships and metadata values // + ////////////////////////////////////////////// + + // relationship - volume 1 & issue 1 + RelationshipBuilder.createRelationshipBuilder(context, v1_1, i1_1, isIssueOfJournalVolume).build(); + + // metadata - volume 1 & issue 2 + itemService.addMetadata(context, v1_1, "publicationissue", "issueNumber", null, null, "issue nr 2 (plain)"); + + // relationship - volume 1 & issue 3 + RelationshipBuilder.createRelationshipBuilder(context, v1_1, i3_1, isIssueOfJournalVolume).build(); + + // metadata - volume 1 & issue 4 + itemService.addMetadata(context, v1_1, "publicationissue", "issueNumber", null, null, "issue nr 4 (plain)"); + + // relationship - volume 1 & issue 5 + RelationshipBuilder.createRelationshipBuilder(context, v1_1, i5_1, isIssueOfJournalVolume).build(); + + // metadata - volume 1 & issue 6 + itemService.addMetadata(context, v1_1, "publicationissue", "issueNumber", null, null, "issue nr 6 (plain)"); + + // SUMMARY + // + // volume 3 + // - pos 0: issue 1 (rel) + // - pos 1: issue 2 (plain) + // - pos 2: issue 3 (rel) + // - pos 3: issue 4 (plain) + // - pos 4: issue 5 (rel) + // - pos 5: issue 6 (plain) + + ///////////////////////////////// + // initial - verify volume 3.1 // + ///////////////////////////////// + + List mdvs1 = itemService.getMetadata( + v1_1, "publicationissue", "issueNumber", null, Item.ANY + ); + assertEquals(6, mdvs1.size()); + + assertTrue(mdvs1.get(0) instanceof RelationshipMetadataValue); + assertEquals("issue nr 1 (rel)", mdvs1.get(0).getValue()); + assertEquals(0, mdvs1.get(0).getPlace()); + + assertFalse(mdvs1.get(1) instanceof RelationshipMetadataValue); + assertEquals("issue nr 2 (plain)", mdvs1.get(1).getValue()); + assertEquals(1, mdvs1.get(1).getPlace()); + + assertTrue(mdvs1.get(2) instanceof RelationshipMetadataValue); + assertEquals("issue nr 3 (rel)", mdvs1.get(2).getValue()); + assertEquals(2, mdvs1.get(2).getPlace()); + + assertFalse(mdvs1.get(3) instanceof RelationshipMetadataValue); + assertEquals("issue nr 4 (plain)", mdvs1.get(3).getValue()); + assertEquals(3, mdvs1.get(3).getPlace()); + + assertTrue(mdvs1.get(4) instanceof RelationshipMetadataValue); + assertEquals("issue nr 5 (rel)", mdvs1.get(4).getValue()); + assertEquals(4, mdvs1.get(4).getPlace()); + + assertFalse(mdvs1.get(5) instanceof RelationshipMetadataValue); + assertEquals("issue nr 6 (plain)", mdvs1.get(5).getValue()); + assertEquals(5, mdvs1.get(5).getPlace()); + + ///////////////////////////////////// + // create new version - volume 1.2 // + ///////////////////////////////////// + + Item v1_2 = versioningService.createNewVersion(context, v1_1).getItem(); + installItemService.installItem(context, workspaceItemService.findByItem(context, v1_2)); + context.commit(); + + //////////////////////////////////// + // create new version - issue 3.2 // + //////////////////////////////////// + + Item i3_2 = versioningService.createNewVersion(context, i3_1).getItem(); + installItemService.installItem(context, workspaceItemService.findByItem(context, i3_2)); + context.commit(); + + //////////////////////////////////////////////// + // after version creation - verify volume 1.1 // + //////////////////////////////////////////////// + + assertThat( + relationshipService.findByItem(context, v1_1, -1, -1, false, false), + containsInAnyOrder(List.of( + isRel(v1_1, isIssueOfJournalVolume, i1_1, RIGHT_ONLY, 0, 0), + isRel(v1_1, isIssueOfJournalVolume, i3_1, RIGHT_ONLY, 2, 0), + isRel(v1_1, isIssueOfJournalVolume, i5_1, RIGHT_ONLY, 4, 0) + )) + ); + + List mdvs4 = itemService.getMetadata( + v1_1, "publicationissue", "issueNumber", null, Item.ANY + ); + assertEquals(6, mdvs4.size()); + + assertTrue(mdvs4.get(0) instanceof RelationshipMetadataValue); + assertEquals("issue nr 1 (rel)", mdvs4.get(0).getValue()); + assertEquals(0, mdvs4.get(0).getPlace()); + + assertFalse(mdvs4.get(1) instanceof RelationshipMetadataValue); + assertEquals("issue nr 2 (plain)", mdvs4.get(1).getValue()); + assertEquals(1, mdvs4.get(1).getPlace()); + + assertTrue(mdvs4.get(2) instanceof RelationshipMetadataValue); + assertEquals("issue nr 3 (rel)", mdvs4.get(2).getValue()); + assertEquals(2, mdvs4.get(2).getPlace()); + + assertFalse(mdvs4.get(3) instanceof RelationshipMetadataValue); + assertEquals("issue nr 4 (plain)", mdvs4.get(3).getValue()); + assertEquals(3, mdvs4.get(3).getPlace()); + + assertTrue(mdvs4.get(4) instanceof RelationshipMetadataValue); + assertEquals("issue nr 5 (rel)", mdvs4.get(4).getValue()); + assertEquals(4, mdvs4.get(4).getPlace()); + + assertFalse(mdvs4.get(5) instanceof RelationshipMetadataValue); + assertEquals("issue nr 6 (plain)", mdvs4.get(5).getValue()); + assertEquals(5, mdvs4.get(5).getPlace()); + + //////////////////////////////////////////////// + // after version creation - verify volume 1.2 // + //////////////////////////////////////////////// + + assertThat( + relationshipService.findByItem(context, v1_2, -1, -1, false, false), + containsInAnyOrder(List.of( + isRel(v1_2, isIssueOfJournalVolume, i1_1, BOTH, 0, 0), + isRel(v1_2, isIssueOfJournalVolume, i3_1, LEFT_ONLY, 2, 0), + isRel(v1_2, isIssueOfJournalVolume, i5_1, BOTH, 4, 0), + isRel(v1_2, isIssueOfJournalVolume, i3_2, BOTH, 2, 0) + )) + ); + + List mdvs7 = itemService.getMetadata( + v1_2, "publicationissue", "issueNumber", null, Item.ANY + ); + assertEquals(6, mdvs7.size()); + + assertTrue(mdvs7.get(0) instanceof RelationshipMetadataValue); + assertEquals("issue nr 1 (rel)", mdvs7.get(0).getValue()); + assertEquals(0, mdvs7.get(0).getPlace()); + + assertFalse(mdvs7.get(1) instanceof RelationshipMetadataValue); + assertEquals("issue nr 2 (plain)", mdvs7.get(1).getValue()); + assertEquals(1, mdvs7.get(1).getPlace()); + + assertTrue(mdvs7.get(2) instanceof RelationshipMetadataValue); + assertEquals("issue nr 3 (rel)", mdvs7.get(2).getValue()); + assertEquals(2, mdvs7.get(2).getPlace()); + + assertFalse(mdvs7.get(3) instanceof RelationshipMetadataValue); + assertEquals("issue nr 4 (plain)", mdvs7.get(3).getValue()); + assertEquals(3, mdvs7.get(3).getPlace()); + + assertTrue(mdvs7.get(4) instanceof RelationshipMetadataValue); + assertEquals("issue nr 5 (rel)", mdvs7.get(4).getValue()); + assertEquals(4, mdvs7.get(4).getPlace()); + + assertFalse(mdvs7.get(5) instanceof RelationshipMetadataValue); + assertEquals("issue nr 6 (plain)", mdvs7.get(5).getValue()); + assertEquals(5, mdvs7.get(5).getPlace()); + + /////////////////////////////////////////////////////////// + // remove relationship - volume 1.2 & issue 3.2 // + // since an issue needs a relationship, delete the issue // + /////////////////////////////////////////////////////////// + + Relationship rel1 = getRelationship(v1_2, isIssueOfJournalVolume, i3_2); + assertNotNull(rel1); + + itemService.delete(context, context.reloadEntity(i3_2)); + + context.commit(); + + //////////////////////////////////// + // after remove 1 - cache busting // + //////////////////////////////////// + + v1_2.setMetadataModified(); + v1_2 = context.reloadEntity(v1_2); + + //////////////////////////////////////// + // after remove 1 - verify volume 3.1 // + //////////////////////////////////////// + + assertThat( + relationshipService.findByItem(context, v1_1, -1, -1, false, false), + containsInAnyOrder(List.of( + isRel(v1_1, isIssueOfJournalVolume, i1_1, RIGHT_ONLY, 0, 0), + isRel(v1_1, isIssueOfJournalVolume, i3_1, RIGHT_ONLY, 2, 0), + isRel(v1_1, isIssueOfJournalVolume, i5_1, RIGHT_ONLY, 4, 0) + )) + ); + + List mdvs9 = itemService.getMetadata( + v1_1, "publicationissue", "issueNumber", null, Item.ANY + ); + assertEquals(6, mdvs9.size()); + + assertTrue(mdvs9.get(0) instanceof RelationshipMetadataValue); + assertEquals("issue nr 1 (rel)", mdvs9.get(0).getValue()); + assertEquals(0, mdvs9.get(0).getPlace()); + + assertFalse(mdvs9.get(1) instanceof RelationshipMetadataValue); + assertEquals("issue nr 2 (plain)", mdvs9.get(1).getValue()); + assertEquals(1, mdvs9.get(1).getPlace()); + + assertTrue(mdvs9.get(2) instanceof RelationshipMetadataValue); + assertEquals("issue nr 3 (rel)", mdvs9.get(2).getValue()); + assertEquals(2, mdvs9.get(2).getPlace()); + + assertFalse(mdvs9.get(3) instanceof RelationshipMetadataValue); + assertEquals("issue nr 4 (plain)", mdvs9.get(3).getValue()); + assertEquals(3, mdvs9.get(3).getPlace()); + + assertTrue(mdvs9.get(4) instanceof RelationshipMetadataValue); + assertEquals("issue nr 5 (rel)", mdvs9.get(4).getValue()); + assertEquals(4, mdvs9.get(4).getPlace()); + + assertFalse(mdvs9.get(5) instanceof RelationshipMetadataValue); + assertEquals("issue nr 6 (plain)", mdvs9.get(5).getValue()); + assertEquals(5, mdvs9.get(5).getPlace()); + + //////////////////////////////////////// + // after remove 1 - verify volume 1.2 // + //////////////////////////////////////// + + assertThat( + relationshipService.findByItem(context, v1_2, -1, -1, false, false), + containsInAnyOrder(List.of( + isRel(v1_2, isIssueOfJournalVolume, i1_1, BOTH, 0, 0), + isRel(v1_2, isIssueOfJournalVolume, i3_1, LEFT_ONLY, 2, 0), + // NOTE: left place was reduced by one + isRel(v1_2, isIssueOfJournalVolume, i5_1, BOTH, 3, 0) + )) + ); + + List mdvs12 = itemService.getMetadata( + v1_2, "publicationissue", "issueNumber", null, Item.ANY + ); + assertEquals(5, mdvs12.size()); + + assertTrue(mdvs12.get(0) instanceof RelationshipMetadataValue); + assertEquals("issue nr 1 (rel)", mdvs12.get(0).getValue()); + assertEquals(0, mdvs12.get(0).getPlace()); + + assertFalse(mdvs12.get(1) instanceof RelationshipMetadataValue); + assertEquals("issue nr 2 (plain)", mdvs12.get(1).getValue()); + assertEquals(1, mdvs12.get(1).getPlace()); + + assertFalse(mdvs12.get(2) instanceof RelationshipMetadataValue); + assertEquals("issue nr 4 (plain)", mdvs12.get(2).getValue()); + assertEquals(2, mdvs12.get(2).getPlace()); + + assertTrue(mdvs12.get(3) instanceof RelationshipMetadataValue); + assertEquals("issue nr 5 (rel)", mdvs12.get(3).getValue()); + assertEquals(3, mdvs12.get(3).getPlace()); + + assertFalse(mdvs12.get(4) instanceof RelationshipMetadataValue); + assertEquals("issue nr 6 (plain)", mdvs12.get(4).getValue()); + assertEquals(4, mdvs12.get(4).getPlace()); + + //////////////////////////////////////// + // remove metadata value - volume 1.2 // + //////////////////////////////////////// + + MetadataValue removeMdv1 = mdvs12.get(2); + + // let's make sure we have the metadata value that we intended to remove + assertFalse(removeMdv1 instanceof RelationshipMetadataValue); + assertEquals("issue nr 4 (plain)", removeMdv1.getValue()); + assertEquals(2, removeMdv1.getPlace()); + assertEquals(v1_2, removeMdv1.getDSpaceObject()); + + itemService.removeMetadataValues(context, v1_2, List.of(removeMdv1)); + // NOTE: after removal, update is required to do place recalculation, among other things + itemService.update(context, v1_2); + context.commit(); + + //////////////////////////////////////// + // after remove 2 - verify volume 1.1 // + //////////////////////////////////////// + + assertThat( + relationshipService.findByItem(context, v1_1, -1, -1, false, false), + containsInAnyOrder(List.of( + isRel(v1_1, isIssueOfJournalVolume, i1_1, RIGHT_ONLY, 0, 0), + isRel(v1_1, isIssueOfJournalVolume, i3_1, RIGHT_ONLY, 2, 0), + isRel(v1_1, isIssueOfJournalVolume, i5_1, RIGHT_ONLY, 4, 0) + )) + ); + + List mdvs14 = itemService.getMetadata( + v1_1, "publicationissue", "issueNumber", null, Item.ANY + ); + assertEquals(6, mdvs14.size()); + + assertTrue(mdvs14.get(0) instanceof RelationshipMetadataValue); + assertEquals("issue nr 1 (rel)", mdvs14.get(0).getValue()); + assertEquals(0, mdvs14.get(0).getPlace()); + + assertFalse(mdvs14.get(1) instanceof RelationshipMetadataValue); + assertEquals("issue nr 2 (plain)", mdvs14.get(1).getValue()); + assertEquals(1, mdvs14.get(1).getPlace()); + + assertTrue(mdvs14.get(2) instanceof RelationshipMetadataValue); + assertEquals("issue nr 3 (rel)", mdvs14.get(2).getValue()); + assertEquals(2, mdvs14.get(2).getPlace()); + + assertFalse(mdvs14.get(3) instanceof RelationshipMetadataValue); + assertEquals("issue nr 4 (plain)", mdvs14.get(3).getValue()); + assertEquals(3, mdvs14.get(3).getPlace()); + + assertTrue(mdvs14.get(4) instanceof RelationshipMetadataValue); + assertEquals("issue nr 5 (rel)", mdvs14.get(4).getValue()); + assertEquals(4, mdvs14.get(4).getPlace()); + + assertFalse(mdvs14.get(5) instanceof RelationshipMetadataValue); + assertEquals("issue nr 6 (plain)", mdvs14.get(5).getValue()); + assertEquals(5, mdvs14.get(5).getPlace()); + + //////////////////////////////////////// + // after remove 2 - verify volume 1.2 // + //////////////////////////////////////// + + assertThat( + relationshipService.findByItem(context, v1_2, -1, -1, false, false), + containsInAnyOrder(List.of( + isRel(v1_2, isIssueOfJournalVolume, i1_1, BOTH, 0, 0), + isRel(v1_2, isIssueOfJournalVolume, i3_1, LEFT_ONLY, 2, 0), + // NOTE: left place was reduced by one (from 3 to 2) + isRel(v1_2, isIssueOfJournalVolume, i5_1, BOTH, 2, 0) + )) + ); + + List mdvs17 = itemService.getMetadata( + v1_2, "publicationissue", "issueNumber", null, Item.ANY + ); + assertEquals(4, mdvs17.size()); + + assertTrue(mdvs17.get(0) instanceof RelationshipMetadataValue); + assertEquals("issue nr 1 (rel)", mdvs17.get(0).getValue()); + assertEquals(0, mdvs17.get(0).getPlace()); + + assertFalse(mdvs17.get(1) instanceof RelationshipMetadataValue); + assertEquals("issue nr 2 (plain)", mdvs17.get(1).getValue()); + assertEquals(1, mdvs17.get(1).getPlace()); + + assertTrue(mdvs17.get(2) instanceof RelationshipMetadataValue); + assertEquals("issue nr 5 (rel)", mdvs17.get(2).getValue()); + assertEquals(2, mdvs17.get(2).getPlace()); + + assertFalse(mdvs17.get(3) instanceof RelationshipMetadataValue); + assertEquals("issue nr 6 (plain)", mdvs17.get(3).getValue()); + assertEquals(3, mdvs17.get(3).getPlace()); + + ///////////////////////////////////////////// + // delete volume first for min cardinality // + ///////////////////////////////////////////// + + itemService.delete(context, context.reloadEntity(v1_1)); + itemService.delete(context, context.reloadEntity(v1_2)); + + ///////////////////////////// + // clean up config changes // + ///////////////////////////// + + issueVmd.setUseForPlace(ogIssueVmdUseForPlace); + } + + @Test + public void test_placeRecalculationAfterDelete_complex() throws Exception { + runWithVirtualMetadataConfig( + () -> { + // config summary: + // on the Project items, metadata field dc.contributor.author will appear with the Authors' titles + // on the Person items, metadata field dc.relation will appear with the Projects' titles + + Collected dcRelation = createBean(Collected.class); + dcRelation.setFields(List.of("dc.title")); + dcRelation.setUseForPlace(true); + + Collected dcContributorAuthor = createBean(Collected.class); + dcContributorAuthor.setFields(List.of("dc.title")); + dcContributorAuthor.setUseForPlace(true); + + return Map.of( + "isProjectOfPerson", new HashMap<>(Map.of( + "dc.relation", dcRelation + )), + "isPersonOfProject", new HashMap<>(Map.of( + "dc.contributor.author", dcContributorAuthor + )) + ); + }, + () -> { + ////////////////// + // create items // + ////////////////// + + // person 1.1 + Item pe1_1 = ItemBuilder.createItem(context, collection) + .withTitle("person 1 (item)") + .withMetadata("dspace", "entity", "type", personEntityType.getLabel()) + .build(); + + // person 3.1 + Item pe3_1 = ItemBuilder.createItem(context, collection) + .withTitle("person 3 (item)") + .withMetadata("dspace", "entity", "type", personEntityType.getLabel()) + .build(); + + // person 5.1 + Item pe5_1 = ItemBuilder.createItem(context, collection) + .withTitle("person 5 (item)") + .withMetadata("dspace", "entity", "type", personEntityType.getLabel()) + .build(); + + // project 1.1 + Item pr1_1 = ItemBuilder.createItem(context, collection) + .withTitle("project 1 (item)") + .withMetadata("dspace", "entity", "type", projectEntityType.getLabel()) + .build(); + + // project 3.1 + Item pr3_1 = ItemBuilder.createItem(context, collection) + .withTitle("project 3 (item)") + .withMetadata("dspace", "entity", "type", projectEntityType.getLabel()) + .build(); + + // project 5.1 + Item pr5_1 = ItemBuilder.createItem(context, collection) + .withTitle("project 5 (item)") + .withMetadata("dspace", "entity", "type", projectEntityType.getLabel()) + .build(); + + ////////////////////////////////////////////// + // create relationships and metadata values // + ////////////////////////////////////////////// + + // relationship - person 3 & project 1 + RelationshipBuilder.createRelationshipBuilder(context, pe3_1, pr1_1, isProjectOfPerson) + .build(); + + // metadata - person 3 & project 2 + itemService.addMetadata(context, pe3_1, "dc", "relation", null, null, "project 2 (mdv)"); + + // relationship - person 1 & project 3 + RelationshipBuilder.createRelationshipBuilder(context, pe1_1, pr3_1, isProjectOfPerson) + .build(); + + // metadata - person 2 & project 3 + itemService.addMetadata(context, pr3_1, "dc", "contributor", "author", null, "person 2 (mdv)"); + + // relationship - person 3 & project 3 + RelationshipBuilder.createRelationshipBuilder(context, pe3_1, pr3_1, isProjectOfPerson) + .build(); + + // metadata - person 4 & project 3 + itemService.addMetadata(context, pr3_1, "dc", "contributor", "author", null, "person 4 (mdv)"); + + // relationship - person 5 & project 3 + RelationshipBuilder.createRelationshipBuilder(context, pe5_1, pr3_1, isProjectOfPerson) + .build(); + + // metadata - person 6 & project 3 + itemService.addMetadata(context, pr3_1, "dc", "contributor", "author", null, "person 6 (mdv)"); + + // metadata - person 7 & project 5 + itemService.addMetadata(context, pr5_1, "dc", "contributor", "author", null, "person 7 (mdv)"); + + // relationship - person 5 & project 5 + RelationshipBuilder.createRelationshipBuilder(context, pe5_1, pr5_1, isProjectOfPerson) + .build(); + + // metadata - person 3 & project 4 + itemService.addMetadata(context, pe3_1, "dc", "relation", null, null, "project 4 (mdv)"); + + // relationship - person 3 & project 5 + RelationshipBuilder.createRelationshipBuilder(context, pe3_1, pr5_1, isProjectOfPerson) + .build(); + + // metadata - person 3 & project 6 + itemService.addMetadata(context, pe3_1, "dc", "relation", null, null, "project 6 (mdv)"); + + // SUMMARY + // + // person 3 + // - pos 0: project 1 (item) + // - pos 1: project 2 (mdv) + // - pos 2: project 3 (item) [A] + // - pos 3: project 4 (mdv) + // - pos 4: project 5 (item) [B] + // - pos 5: project 6 (mdv) + // + // project 3 + // - pos 0: person 1 (item) + // - pos 1: person 2 (mdv) + // - pos 2: person 3 (item) [A] + // - pos 3: person 4 (mdv) + // - pos 4: person 5 (item) + // - pos 5: person 6 (mdv) + // + // project 5 + // - pos 0: person 7 (mdv) + // - pos 1: person 5 (item) + // - pos 2: person 3 (item) [B] + + ///////////////////////////////// + // initial - verify person 3.1 // + ///////////////////////////////// + + List mdvs1 = itemService.getMetadata( + pe3_1, "dc", "relation", null, Item.ANY + ); + assertEquals(6, mdvs1.size()); + + assertTrue(mdvs1.get(0) instanceof RelationshipMetadataValue); + assertEquals("project 1 (item)", mdvs1.get(0).getValue()); + assertEquals(0, mdvs1.get(0).getPlace()); + + assertFalse(mdvs1.get(1) instanceof RelationshipMetadataValue); + assertEquals("project 2 (mdv)", mdvs1.get(1).getValue()); + assertEquals(1, mdvs1.get(1).getPlace()); + + assertTrue(mdvs1.get(2) instanceof RelationshipMetadataValue); + assertEquals("project 3 (item)", mdvs1.get(2).getValue()); + assertEquals(2, mdvs1.get(2).getPlace()); + + assertFalse(mdvs1.get(3) instanceof RelationshipMetadataValue); + assertEquals("project 4 (mdv)", mdvs1.get(3).getValue()); + assertEquals(3, mdvs1.get(3).getPlace()); + + assertTrue(mdvs1.get(4) instanceof RelationshipMetadataValue); + assertEquals("project 5 (item)", mdvs1.get(4).getValue()); + assertEquals(4, mdvs1.get(4).getPlace()); + + assertFalse(mdvs1.get(5) instanceof RelationshipMetadataValue); + assertEquals("project 6 (mdv)", mdvs1.get(5).getValue()); + assertEquals(5, mdvs1.get(5).getPlace()); + + ////////////////////////////////// + // initial - verify project 3.1 // + ////////////////////////////////// + + List mdvs2 = itemService.getMetadata( + pr3_1, "dc", "contributor", "author", Item.ANY + ); + assertEquals(6, mdvs2.size()); + + assertTrue(mdvs2.get(0) instanceof RelationshipMetadataValue); + assertEquals("person 1 (item)", mdvs2.get(0).getValue()); + assertEquals(0, mdvs2.get(0).getPlace()); + + assertFalse(mdvs2.get(1) instanceof RelationshipMetadataValue); + assertEquals("person 2 (mdv)", mdvs2.get(1).getValue()); + assertEquals(1, mdvs2.get(1).getPlace()); + + assertTrue(mdvs2.get(2) instanceof RelationshipMetadataValue); + assertEquals("person 3 (item)", mdvs2.get(2).getValue()); + assertEquals(2, mdvs2.get(2).getPlace()); + + assertFalse(mdvs2.get(3) instanceof RelationshipMetadataValue); + assertEquals("person 4 (mdv)", mdvs2.get(3).getValue()); + assertEquals(3, mdvs2.get(3).getPlace()); + + assertTrue(mdvs2.get(4) instanceof RelationshipMetadataValue); + assertEquals("person 5 (item)", mdvs2.get(4).getValue()); + assertEquals(4, mdvs2.get(4).getPlace()); + + assertFalse(mdvs2.get(5) instanceof RelationshipMetadataValue); + assertEquals("person 6 (mdv)", mdvs2.get(5).getValue()); + assertEquals(5, mdvs2.get(5).getPlace()); + + ////////////////////////////////// + // initial - verify project 5.1 // + ////////////////////////////////// + + List mdvs3 = itemService.getMetadata( + pr5_1, "dc", "contributor", "author", Item.ANY + ); + assertEquals(3, mdvs3.size()); + + assertFalse(mdvs3.get(0) instanceof RelationshipMetadataValue); + assertEquals("person 7 (mdv)", mdvs3.get(0).getValue()); + assertEquals(0, mdvs3.get(0).getPlace()); + + assertTrue(mdvs3.get(1) instanceof RelationshipMetadataValue); + assertEquals("person 5 (item)", mdvs3.get(1).getValue()); + assertEquals(1, mdvs3.get(1).getPlace()); + + assertTrue(mdvs3.get(2) instanceof RelationshipMetadataValue); + assertEquals("person 3 (item)", mdvs3.get(2).getValue()); + assertEquals(2, mdvs3.get(2).getPlace()); + + ///////////////////////////////////// + // create new version - person 3.2 // + ///////////////////////////////////// + + Item pe3_2 = versioningService.createNewVersion(context, pe3_1).getItem(); + installItemService.installItem(context, workspaceItemService.findByItem(context, pe3_2)); + context.commit(); + + ////////////////////////////////////// + // create new version - project 3.2 // + ////////////////////////////////////// + + Item pr3_2 = versioningService.createNewVersion(context, pr3_1).getItem(); + installItemService.installItem(context, workspaceItemService.findByItem(context, pr3_2)); + context.commit(); + + //////////////////////////////////////////////// + // after version creation - verify person 3.1 // + //////////////////////////////////////////////// + + assertThat( + relationshipService.findByItem(context, pe3_1, -1, -1, false, false), + containsInAnyOrder(List.of( + isRel(pe3_1, isProjectOfPerson, pr1_1, RIGHT_ONLY, 0, 0), + isRel(pe3_1, isProjectOfPerson, pr3_1, RIGHT_ONLY, 2, 2), + isRel(pe3_1, isProjectOfPerson, pr5_1, RIGHT_ONLY, 4, 2) + )) + ); + + List mdvs4 = itemService.getMetadata( + pe3_1, "dc", "relation", null, Item.ANY + ); + assertEquals(6, mdvs4.size()); + + assertTrue(mdvs4.get(0) instanceof RelationshipMetadataValue); + assertEquals("project 1 (item)", mdvs4.get(0).getValue()); + assertEquals(0, mdvs4.get(0).getPlace()); + + assertFalse(mdvs4.get(1) instanceof RelationshipMetadataValue); + assertEquals("project 2 (mdv)", mdvs4.get(1).getValue()); + assertEquals(1, mdvs4.get(1).getPlace()); + + assertTrue(mdvs4.get(2) instanceof RelationshipMetadataValue); + assertEquals("project 3 (item)", mdvs4.get(2).getValue()); + assertEquals(2, mdvs4.get(2).getPlace()); + + assertFalse(mdvs4.get(3) instanceof RelationshipMetadataValue); + assertEquals("project 4 (mdv)", mdvs4.get(3).getValue()); + assertEquals(3, mdvs4.get(3).getPlace()); + + assertTrue(mdvs4.get(4) instanceof RelationshipMetadataValue); + assertEquals("project 5 (item)", mdvs4.get(4).getValue()); + assertEquals(4, mdvs4.get(4).getPlace()); + + assertFalse(mdvs4.get(5) instanceof RelationshipMetadataValue); + assertEquals("project 6 (mdv)", mdvs4.get(5).getValue()); + assertEquals(5, mdvs4.get(5).getPlace()); + + ///////////////////////////////////////////////// + // after version creation - verify project 3.1 // + ///////////////////////////////////////////////// + + assertThat( + relationshipService.findByItem(context, pr3_1, -1, -1, false, false), + containsInAnyOrder(List.of( + isRel(pe1_1, isProjectOfPerson, pr3_1, LEFT_ONLY, 0, 0), + isRel(pe3_1, isProjectOfPerson, pr3_1, RIGHT_ONLY, 2, 2), + isRel(pe5_1, isProjectOfPerson, pr3_1, LEFT_ONLY, 0, 4), + isRel(pe3_2, isProjectOfPerson, pr3_1, LEFT_ONLY, 2, 2) + )) + ); + + List mdvs5 = itemService.getMetadata( + pr3_1, "dc", "contributor", "author", Item.ANY + ); + assertEquals(6, mdvs5.size()); + + assertTrue(mdvs5.get(0) instanceof RelationshipMetadataValue); + assertEquals("person 1 (item)", mdvs5.get(0).getValue()); + assertEquals(0, mdvs5.get(0).getPlace()); + + assertFalse(mdvs5.get(1) instanceof RelationshipMetadataValue); + assertEquals("person 2 (mdv)", mdvs5.get(1).getValue()); + assertEquals(1, mdvs5.get(1).getPlace()); + + assertTrue(mdvs5.get(2) instanceof RelationshipMetadataValue); + assertEquals("person 3 (item)", mdvs5.get(2).getValue()); + assertEquals(2, mdvs5.get(2).getPlace()); + + assertFalse(mdvs5.get(3) instanceof RelationshipMetadataValue); + assertEquals("person 4 (mdv)", mdvs5.get(3).getValue()); + assertEquals(3, mdvs5.get(3).getPlace()); + + assertTrue(mdvs5.get(4) instanceof RelationshipMetadataValue); + assertEquals("person 5 (item)", mdvs5.get(4).getValue()); + assertEquals(4, mdvs5.get(4).getPlace()); + + assertFalse(mdvs5.get(5) instanceof RelationshipMetadataValue); + assertEquals("person 6 (mdv)", mdvs5.get(5).getValue()); + assertEquals(5, mdvs5.get(5).getPlace()); + + ///////////////////////////////////////////////// + // after version creation - verify project 5.1 // + ///////////////////////////////////////////////// + + assertThat( + relationshipService.findByItem(context, pr5_1, -1, -1, false, false), + containsInAnyOrder(List.of( + isRel(pe5_1, isProjectOfPerson, pr5_1, BOTH, 1, 1), + isRel(pe3_1, isProjectOfPerson, pr5_1, RIGHT_ONLY, 4, 2), + isRel(pe3_2, isProjectOfPerson, pr5_1, BOTH, 4, 2) + )) + ); + + List mdvs6 = itemService.getMetadata( + pr5_1, "dc", "contributor", "author", Item.ANY + ); + assertEquals(3, mdvs6.size()); + + assertFalse(mdvs6.get(0) instanceof RelationshipMetadataValue); + assertEquals("person 7 (mdv)", mdvs6.get(0).getValue()); + assertEquals(0, mdvs6.get(0).getPlace()); + + assertTrue(mdvs6.get(1) instanceof RelationshipMetadataValue); + assertEquals("person 5 (item)", mdvs6.get(1).getValue()); + assertEquals(1, mdvs6.get(1).getPlace()); + + assertTrue(mdvs6.get(2) instanceof RelationshipMetadataValue); + assertEquals("person 3 (item)", mdvs6.get(2).getValue()); + assertEquals(2, mdvs6.get(2).getPlace()); + + //////////////////////////////////////////////// + // after version creation - verify volume 3.2 // + //////////////////////////////////////////////// + + assertThat( + relationshipService.findByItem(context, pe3_2, -1, -1, false, false), + containsInAnyOrder(List.of( + isRel(pe3_2, isProjectOfPerson, pr1_1, BOTH, 0, 0), + isRel(pe3_2, isProjectOfPerson, pr3_1, LEFT_ONLY, 2, 2), + isRel(pe3_2, isProjectOfPerson, pr3_2, BOTH, 2, 2), + isRel(pe3_2, isProjectOfPerson, pr5_1, BOTH, 4, 2) + )) + ); + + List mdvs7 = itemService.getMetadata( + pe3_2, "dc", "relation", null, Item.ANY + ); + assertEquals(6, mdvs7.size()); + + assertTrue(mdvs7.get(0) instanceof RelationshipMetadataValue); + assertEquals("project 1 (item)", mdvs7.get(0).getValue()); + assertEquals(0, mdvs7.get(0).getPlace()); + + assertFalse(mdvs7.get(1) instanceof RelationshipMetadataValue); + assertEquals("project 2 (mdv)", mdvs7.get(1).getValue()); + assertEquals(1, mdvs7.get(1).getPlace()); + + assertTrue(mdvs7.get(2) instanceof RelationshipMetadataValue); + assertEquals("project 3 (item)", mdvs7.get(2).getValue()); + assertEquals(2, mdvs7.get(2).getPlace()); + + assertFalse(mdvs7.get(3) instanceof RelationshipMetadataValue); + assertEquals("project 4 (mdv)", mdvs7.get(3).getValue()); + assertEquals(3, mdvs7.get(3).getPlace()); + + assertTrue(mdvs7.get(4) instanceof RelationshipMetadataValue); + assertEquals("project 5 (item)", mdvs7.get(4).getValue()); + assertEquals(4, mdvs7.get(4).getPlace()); + + assertFalse(mdvs7.get(5) instanceof RelationshipMetadataValue); + assertEquals("project 6 (mdv)", mdvs7.get(5).getValue()); + assertEquals(5, mdvs7.get(5).getPlace()); + + ///////////////////////////////////////////////// + // after version creation - verify project 3.2 // + ///////////////////////////////////////////////// + + assertThat( + relationshipService.findByItem(context, pr3_2, -1, -1, false, false), + containsInAnyOrder(List.of( + isRel(pe1_1, isProjectOfPerson, pr3_2, BOTH, 0, 0), + isRel(pe5_1, isProjectOfPerson, pr3_2, BOTH, 0, 4), + isRel(pe3_2, isProjectOfPerson, pr3_2, BOTH, 2, 2) + )) + ); + + List mdvs8 = itemService.getMetadata( + pr3_2, "dc", "contributor", "author", Item.ANY + ); + assertEquals(6, mdvs8.size()); + + assertTrue(mdvs8.get(0) instanceof RelationshipMetadataValue); + assertEquals("person 1 (item)", mdvs8.get(0).getValue()); + assertEquals(0, mdvs8.get(0).getPlace()); + + assertFalse(mdvs8.get(1) instanceof RelationshipMetadataValue); + assertEquals("person 2 (mdv)", mdvs8.get(1).getValue()); + assertEquals(1, mdvs8.get(1).getPlace()); + + assertTrue(mdvs8.get(2) instanceof RelationshipMetadataValue); + assertEquals("person 3 (item)", mdvs8.get(2).getValue()); + assertEquals(2, mdvs8.get(2).getPlace()); + + assertFalse(mdvs8.get(3) instanceof RelationshipMetadataValue); + assertEquals("person 4 (mdv)", mdvs8.get(3).getValue()); + assertEquals(3, mdvs8.get(3).getPlace()); + + assertTrue(mdvs8.get(4) instanceof RelationshipMetadataValue); + assertEquals("person 5 (item)", mdvs8.get(4).getValue()); + assertEquals(4, mdvs8.get(4).getPlace()); + + assertFalse(mdvs8.get(5) instanceof RelationshipMetadataValue); + assertEquals("person 6 (mdv)", mdvs8.get(5).getValue()); + assertEquals(5, mdvs8.get(5).getPlace()); + + //////////////////////////////////////////////////// + // remove relationship - person 3.2 & project 3.2 // + //////////////////////////////////////////////////// + + Relationship rel1 = getRelationship(pe3_2, isProjectOfPerson, pr3_2); + assertNotNull(rel1); + + relationshipService.delete(context, rel1, false, false); + context.commit(); + + //////////////////////////////////// + // after remove 1 - cache busting // + //////////////////////////////////// + + pe3_2.setMetadataModified(); + pe3_2 = context.reloadEntity(pe3_2); + + pr3_2.setMetadataModified(); + pr3_2 = context.reloadEntity(pr3_2); + + //////////////////////////////////////// + // after remove 1 - verify person 3.1 // + //////////////////////////////////////// + + assertThat( + relationshipService.findByItem(context, pe3_1, -1, -1, false, false), + containsInAnyOrder(List.of( + isRel(pe3_1, isProjectOfPerson, pr1_1, RIGHT_ONLY, 0, 0), + isRel(pe3_1, isProjectOfPerson, pr3_1, RIGHT_ONLY, 2, 2), + isRel(pe3_1, isProjectOfPerson, pr5_1, RIGHT_ONLY, 4, 2) + )) + ); + + List mdvs9 = itemService.getMetadata( + pe3_1, "dc", "relation", null, Item.ANY + ); + assertEquals(6, mdvs9.size()); + + assertTrue(mdvs9.get(0) instanceof RelationshipMetadataValue); + assertEquals("project 1 (item)", mdvs9.get(0).getValue()); + assertEquals(0, mdvs9.get(0).getPlace()); + + assertFalse(mdvs9.get(1) instanceof RelationshipMetadataValue); + assertEquals("project 2 (mdv)", mdvs9.get(1).getValue()); + assertEquals(1, mdvs9.get(1).getPlace()); + + assertTrue(mdvs9.get(2) instanceof RelationshipMetadataValue); + assertEquals("project 3 (item)", mdvs9.get(2).getValue()); + assertEquals(2, mdvs9.get(2).getPlace()); + + assertFalse(mdvs9.get(3) instanceof RelationshipMetadataValue); + assertEquals("project 4 (mdv)", mdvs9.get(3).getValue()); + assertEquals(3, mdvs9.get(3).getPlace()); + + assertTrue(mdvs9.get(4) instanceof RelationshipMetadataValue); + assertEquals("project 5 (item)", mdvs9.get(4).getValue()); + assertEquals(4, mdvs9.get(4).getPlace()); + + assertFalse(mdvs9.get(5) instanceof RelationshipMetadataValue); + assertEquals("project 6 (mdv)", mdvs9.get(5).getValue()); + assertEquals(5, mdvs9.get(5).getPlace()); + + ///////////////////////////////////////// + // after remove 1 - verify project 3.1 // + ///////////////////////////////////////// + + assertThat( + relationshipService.findByItem(context, pr3_1, -1, -1, false, false), + containsInAnyOrder(List.of( + isRel(pe1_1, isProjectOfPerson, pr3_1, LEFT_ONLY, 0, 0), + isRel(pe3_1, isProjectOfPerson, pr3_1, RIGHT_ONLY, 2, 2), + isRel(pe3_2, isProjectOfPerson, pr3_1, LEFT_ONLY, 2, 2), + isRel(pe5_1, isProjectOfPerson, pr3_1, LEFT_ONLY, 0, 4) + )) + ); + + List mdvs10 = itemService.getMetadata( + pr3_1, "dc", "contributor", "author", Item.ANY + ); + assertEquals(6, mdvs10.size()); + + assertTrue(mdvs10.get(0) instanceof RelationshipMetadataValue); + assertEquals("person 1 (item)", mdvs10.get(0).getValue()); + assertEquals(0, mdvs10.get(0).getPlace()); + + assertFalse(mdvs10.get(1) instanceof RelationshipMetadataValue); + assertEquals("person 2 (mdv)", mdvs10.get(1).getValue()); + assertEquals(1, mdvs10.get(1).getPlace()); + + assertTrue(mdvs10.get(2) instanceof RelationshipMetadataValue); + assertEquals("person 3 (item)", mdvs10.get(2).getValue()); + assertEquals(2, mdvs10.get(2).getPlace()); + + assertFalse(mdvs10.get(3) instanceof RelationshipMetadataValue); + assertEquals("person 4 (mdv)", mdvs10.get(3).getValue()); + assertEquals(3, mdvs10.get(3).getPlace()); + + assertTrue(mdvs10.get(4) instanceof RelationshipMetadataValue); + assertEquals("person 5 (item)", mdvs10.get(4).getValue()); + assertEquals(4, mdvs10.get(4).getPlace()); + + assertFalse(mdvs10.get(5) instanceof RelationshipMetadataValue); + assertEquals("person 6 (mdv)", mdvs10.get(5).getValue()); + assertEquals(5, mdvs10.get(5).getPlace()); + + ///////////////////////////////////////// + // after remove 1 - verify project 5.1 // + ///////////////////////////////////////// + + assertThat( + relationshipService.findByItem(context, pr5_1, -1, -1, false, false), + containsInAnyOrder(List.of( + isRel(pe5_1, isProjectOfPerson, pr5_1, BOTH, 1, 1), + isRel(pe3_1, isProjectOfPerson, pr5_1, RIGHT_ONLY, 4, 2), + // NOTE: left place was reduced by one + isRel(pe3_2, isProjectOfPerson, pr5_1, BOTH, 3, 2) + )) + ); + + List mdvs11 = itemService.getMetadata( + pr5_1, "dc", "contributor", "author", Item.ANY + ); + assertEquals(3, mdvs11.size()); + + assertFalse(mdvs11.get(0) instanceof RelationshipMetadataValue); + assertEquals("person 7 (mdv)", mdvs11.get(0).getValue()); + assertEquals(0, mdvs11.get(0).getPlace()); + + assertTrue(mdvs11.get(1) instanceof RelationshipMetadataValue); + assertEquals("person 5 (item)", mdvs11.get(1).getValue()); + assertEquals(1, mdvs11.get(1).getPlace()); + + assertTrue(mdvs11.get(2) instanceof RelationshipMetadataValue); + assertEquals("person 3 (item)", mdvs11.get(2).getValue()); + assertEquals(2, mdvs11.get(2).getPlace()); + + //////////////////////////////////////// + // after remove 1 - verify person 3.2 // + //////////////////////////////////////// + + assertThat( + relationshipService.findByItem(context, pe3_2, -1, -1, false, false), + containsInAnyOrder(List.of( + isRel(pe3_2, isProjectOfPerson, pr1_1, BOTH, 0, 0), + isRel(pe3_2, isProjectOfPerson, pr3_1, LEFT_ONLY, 2, 2), + // NOTE: left place was reduced by one (from 4 to 3) + isRel(pe3_2, isProjectOfPerson, pr5_1, BOTH, 3, 2) + )) + ); + + List mdvs12 = itemService.getMetadata( + pe3_2, "dc", "relation", null, Item.ANY + ); + assertEquals(5, mdvs12.size()); + + assertTrue(mdvs12.get(0) instanceof RelationshipMetadataValue); + assertEquals("project 1 (item)", mdvs12.get(0).getValue()); + assertEquals(0, mdvs12.get(0).getPlace()); + + assertFalse(mdvs12.get(1) instanceof RelationshipMetadataValue); + assertEquals("project 2 (mdv)", mdvs12.get(1).getValue()); + assertEquals(1, mdvs12.get(1).getPlace()); + + assertFalse(mdvs12.get(2) instanceof RelationshipMetadataValue); + assertEquals("project 4 (mdv)", mdvs12.get(2).getValue()); + assertEquals(2, mdvs12.get(2).getPlace()); + + assertTrue(mdvs12.get(3) instanceof RelationshipMetadataValue); + assertEquals("project 5 (item)", mdvs12.get(3).getValue()); + assertEquals(3, mdvs12.get(3).getPlace()); + + assertFalse(mdvs12.get(4) instanceof RelationshipMetadataValue); + assertEquals("project 6 (mdv)", mdvs12.get(4).getValue()); + assertEquals(4, mdvs12.get(4).getPlace()); + + ///////////////////////////////////////// + // after remove 1 - verify project 3.2 // + ///////////////////////////////////////// + + assertThat( + relationshipService.findByItem(context, pr3_2, -1, -1, false, false), + containsInAnyOrder(List.of( + isRel(pe1_1, isProjectOfPerson, pr3_2, BOTH, 0, 0), + // NOTE: right place was reduced by one (from 4 to 3) + isRel(pe5_1, isProjectOfPerson, pr3_2, BOTH, 0, 3) + )) + ); + + List mdvs13 = itemService.getMetadata( + pr3_2, "dc", "contributor", "author", Item.ANY + ); + assertEquals(5, mdvs13.size()); + + assertTrue(mdvs13.get(0) instanceof RelationshipMetadataValue); + assertEquals("person 1 (item)", mdvs13.get(0).getValue()); + assertEquals(0, mdvs13.get(0).getPlace()); + + assertFalse(mdvs13.get(1) instanceof RelationshipMetadataValue); + assertEquals("person 2 (mdv)", mdvs13.get(1).getValue()); + assertEquals(1, mdvs13.get(1).getPlace()); + + assertFalse(mdvs13.get(2) instanceof RelationshipMetadataValue); + assertEquals("person 4 (mdv)", mdvs13.get(2).getValue()); + assertEquals(2, mdvs13.get(2).getPlace()); + + assertTrue(mdvs13.get(3) instanceof RelationshipMetadataValue); + assertEquals("person 5 (item)", mdvs13.get(3).getValue()); + assertEquals(3, mdvs13.get(3).getPlace()); + + assertFalse(mdvs13.get(4) instanceof RelationshipMetadataValue); + assertEquals("person 6 (mdv)", mdvs13.get(4).getValue()); + assertEquals(4, mdvs13.get(4).getPlace()); + + //////////////////////////////////////// + // remove metadata value - person 3.2 // + //////////////////////////////////////// + + MetadataValue removeMdv1 = mdvs12.get(2); + + // let's make sure we have the metadata value that we intended to remove + assertFalse(removeMdv1 instanceof RelationshipMetadataValue); + assertEquals("project 4 (mdv)", removeMdv1.getValue()); + assertEquals(2, removeMdv1.getPlace()); + assertEquals(pe3_2, removeMdv1.getDSpaceObject()); + + itemService.removeMetadataValues(context, pe3_2, List.of(removeMdv1)); + itemService.update(context, pe3_2); + context.commit(); + + //////////////////////////////////////// + // after remove 2 - verify person 3.1 // + //////////////////////////////////////// + + assertThat( + relationshipService.findByItem(context, pe3_1, -1, -1, false, false), + containsInAnyOrder(List.of( + isRel(pe3_1, isProjectOfPerson, pr1_1, RIGHT_ONLY, 0, 0), + isRel(pe3_1, isProjectOfPerson, pr3_1, RIGHT_ONLY, 2, 2), + isRel(pe3_1, isProjectOfPerson, pr5_1, RIGHT_ONLY, 4, 2) + )) + ); + + List mdvs14 = itemService.getMetadata( + pe3_1, "dc", "relation", null, Item.ANY + ); + assertEquals(6, mdvs14.size()); + + assertTrue(mdvs14.get(0) instanceof RelationshipMetadataValue); + assertEquals("project 1 (item)", mdvs14.get(0).getValue()); + assertEquals(0, mdvs14.get(0).getPlace()); + + assertFalse(mdvs14.get(1) instanceof RelationshipMetadataValue); + assertEquals("project 2 (mdv)", mdvs14.get(1).getValue()); + assertEquals(1, mdvs14.get(1).getPlace()); + + assertTrue(mdvs14.get(2) instanceof RelationshipMetadataValue); + assertEquals("project 3 (item)", mdvs14.get(2).getValue()); + assertEquals(2, mdvs14.get(2).getPlace()); + + assertFalse(mdvs14.get(3) instanceof RelationshipMetadataValue); + assertEquals("project 4 (mdv)", mdvs14.get(3).getValue()); + assertEquals(3, mdvs14.get(3).getPlace()); + + assertTrue(mdvs14.get(4) instanceof RelationshipMetadataValue); + assertEquals("project 5 (item)", mdvs14.get(4).getValue()); + assertEquals(4, mdvs14.get(4).getPlace()); + + assertFalse(mdvs14.get(5) instanceof RelationshipMetadataValue); + assertEquals("project 6 (mdv)", mdvs14.get(5).getValue()); + assertEquals(5, mdvs14.get(5).getPlace()); + + ///////////////////////////////////////// + // after remove 2 - verify project 3.1 // + ///////////////////////////////////////// + + assertThat( + relationshipService.findByItem(context, pr3_1, -1, -1, false, false), + containsInAnyOrder(List.of( + isRel(pe1_1, isProjectOfPerson, pr3_1, LEFT_ONLY, 0, 0), + isRel(pe3_1, isProjectOfPerson, pr3_1, RIGHT_ONLY, 2, 2), + isRel(pe3_2, isProjectOfPerson, pr3_1, LEFT_ONLY, 2, 2), + isRel(pe5_1, isProjectOfPerson, pr3_1, LEFT_ONLY, 0, 4) + )) + ); + + List mdvs15 = itemService.getMetadata( + pr3_1, "dc", "contributor", "author", Item.ANY + ); + assertEquals(6, mdvs15.size()); + + assertTrue(mdvs15.get(0) instanceof RelationshipMetadataValue); + assertEquals("person 1 (item)", mdvs15.get(0).getValue()); + assertEquals(0, mdvs15.get(0).getPlace()); + + assertFalse(mdvs15.get(1) instanceof RelationshipMetadataValue); + assertEquals("person 2 (mdv)", mdvs15.get(1).getValue()); + assertEquals(1, mdvs15.get(1).getPlace()); + + assertTrue(mdvs15.get(2) instanceof RelationshipMetadataValue); + assertEquals("person 3 (item)", mdvs15.get(2).getValue()); + assertEquals(2, mdvs15.get(2).getPlace()); + + assertFalse(mdvs15.get(3) instanceof RelationshipMetadataValue); + assertEquals("person 4 (mdv)", mdvs15.get(3).getValue()); + assertEquals(3, mdvs15.get(3).getPlace()); + + assertTrue(mdvs15.get(4) instanceof RelationshipMetadataValue); + assertEquals("person 5 (item)", mdvs15.get(4).getValue()); + assertEquals(4, mdvs15.get(4).getPlace()); + + assertFalse(mdvs15.get(5) instanceof RelationshipMetadataValue); + assertEquals("person 6 (mdv)", mdvs15.get(5).getValue()); + assertEquals(5, mdvs15.get(5).getPlace()); + + ///////////////////////////////////////// + // after remove 2 - verify project 5.1 // + ///////////////////////////////////////// + + assertThat( + relationshipService.findByItem(context, pr5_1, -1, -1, false, false), + containsInAnyOrder(List.of( + isRel(pe5_1, isProjectOfPerson, pr5_1, BOTH, 1, 1), + isRel(pe3_1, isProjectOfPerson, pr5_1, RIGHT_ONLY, 4, 2), + // NOTE: left place was reduced by one + isRel(pe3_2, isProjectOfPerson, pr5_1, BOTH, 2, 2) + )) + ); + + List mdvs16 = itemService.getMetadata( + pr5_1, "dc", "contributor", "author", Item.ANY + ); + assertEquals(3, mdvs16.size()); + + assertFalse(mdvs16.get(0) instanceof RelationshipMetadataValue); + assertEquals("person 7 (mdv)", mdvs16.get(0).getValue()); + assertEquals(0, mdvs16.get(0).getPlace()); + + assertTrue(mdvs16.get(1) instanceof RelationshipMetadataValue); + assertEquals("person 5 (item)", mdvs16.get(1).getValue()); + assertEquals(1, mdvs16.get(1).getPlace()); + + assertTrue(mdvs16.get(2) instanceof RelationshipMetadataValue); + assertEquals("person 3 (item)", mdvs16.get(2).getValue()); + assertEquals(2, mdvs16.get(2).getPlace()); + + //////////////////////////////////////// + // after remove 2 - verify person 3.2 // + //////////////////////////////////////// + + assertThat( + relationshipService.findByItem(context, pe3_2, -1, -1, false, false), + containsInAnyOrder(List.of( + isRel(pe3_2, isProjectOfPerson, pr1_1, BOTH, 0, 0), + isRel(pe3_2, isProjectOfPerson, pr3_1, LEFT_ONLY, 2, 2), + // NOTE: left place was reduced by one (from 3 to 2) + isRel(pe3_2, isProjectOfPerson, pr5_1, BOTH, 2, 2) + )) + ); + + List mdvs17 = itemService.getMetadata( + pe3_2, "dc", "relation", null, Item.ANY + ); + assertEquals(4, mdvs17.size()); + + assertTrue(mdvs17.get(0) instanceof RelationshipMetadataValue); + assertEquals("project 1 (item)", mdvs17.get(0).getValue()); + assertEquals(0, mdvs17.get(0).getPlace()); + + assertFalse(mdvs17.get(1) instanceof RelationshipMetadataValue); + assertEquals("project 2 (mdv)", mdvs17.get(1).getValue()); + assertEquals(1, mdvs17.get(1).getPlace()); + + assertTrue(mdvs17.get(2) instanceof RelationshipMetadataValue); + assertEquals("project 5 (item)", mdvs17.get(2).getValue()); + assertEquals(2, mdvs17.get(2).getPlace()); + + assertFalse(mdvs17.get(3) instanceof RelationshipMetadataValue); + assertEquals("project 6 (mdv)", mdvs17.get(3).getValue()); + assertEquals(3, mdvs17.get(3).getPlace()); + + ///////////////////////////////////////// + // after remove 2 - verify project 3.2 // + ///////////////////////////////////////// + + assertThat( + relationshipService.findByItem(context, pr3_2, -1, -1, false, false), + containsInAnyOrder(List.of( + isRel(pe1_1, isProjectOfPerson, pr3_2, BOTH, 0, 0), + isRel(pe5_1, isProjectOfPerson, pr3_2, BOTH, 0, 3) + )) + ); + + List mdvs18 = itemService.getMetadata( + pr3_2, "dc", "contributor", "author", Item.ANY + ); + assertEquals(5, mdvs18.size()); + + assertTrue(mdvs18.get(0) instanceof RelationshipMetadataValue); + assertEquals("person 1 (item)", mdvs18.get(0).getValue()); + assertEquals(0, mdvs18.get(0).getPlace()); + + assertFalse(mdvs18.get(1) instanceof RelationshipMetadataValue); + assertEquals("person 2 (mdv)", mdvs18.get(1).getValue()); + assertEquals(1, mdvs18.get(1).getPlace()); + + assertFalse(mdvs18.get(2) instanceof RelationshipMetadataValue); + assertEquals("person 4 (mdv)", mdvs18.get(2).getValue()); + assertEquals(2, mdvs18.get(2).getPlace()); + + assertTrue(mdvs18.get(3) instanceof RelationshipMetadataValue); + assertEquals("person 5 (item)", mdvs18.get(3).getValue()); + assertEquals(3, mdvs18.get(3).getPlace()); + + assertFalse(mdvs18.get(4) instanceof RelationshipMetadataValue); + assertEquals("person 6 (mdv)", mdvs18.get(4).getValue()); + assertEquals(4, mdvs18.get(4).getPlace()); + } + ); + } + + @Test + public void test_placeRecalculationNoUseForPlace() throws Exception { + // NOTE: this test uses relationship isIssueOfJournalVolume, because it adds virtual metadata + // on both sides of the relationship + + ////////////////// + // create items // + ////////////////// + + // journal volume 1.1 + Item v1_1 = ItemBuilder.createItem(context, collection) + .withTitle("journal volume 1") + .withMetadata("dspace", "entity", "type", journalVolumeEntityType.getLabel()) + .withMetadata("publicationvolume", "volumeNumber", null, "volume nr 1 (rel)") + .build(); + + // journal issue 1.1 + Item i1_1 = ItemBuilder.createItem(context, collection) + .withTitle("journal issue 1") + .withMetadata("dspace", "entity", "type", journalIssueEntityType.getLabel()) + .withMetadata("publicationissue", "issueNumber", null, "issue nr 1 (rel)") + .build(); + + // journal issue 2.1 + Item i2_1 = ItemBuilder.createItem(context, collection) + .withTitle("journal issue 2") + .withMetadata("dspace", "entity", "type", journalIssueEntityType.getLabel()) + .withMetadata("publicationissue", "issueNumber", null, "issue nr 2 (rel)") + .build(); + + // journal issue 3.1 + Item i3_1 = ItemBuilder.createItem(context, collection) + .withTitle("journal issue 3") + .withMetadata("dspace", "entity", "type", journalIssueEntityType.getLabel()) + .withMetadata("publicationissue", "issueNumber", null, "issue nr 3 (rel)") + .build(); + + // journal issue 4.1 + Item i4_1 = ItemBuilder.createItem(context, collection) + .withTitle("journal issue 4") + .withMetadata("dspace", "entity", "type", journalIssueEntityType.getLabel()) + .withMetadata("publicationissue", "issueNumber", null, "issue nr 4 (rel)") + .build(); + + // journal issue 5.1 + Item i5_1 = ItemBuilder.createItem(context, collection) + .withTitle("journal issue 5") + .withMetadata("dspace", "entity", "type", journalIssueEntityType.getLabel()) + .withMetadata("publicationissue", "issueNumber", null, "issue nr 5 (rel)") + .build(); + + ////////////////////////////////////////////// + // create relationships and metadata values // + ////////////////////////////////////////////// + + // relationship - volume 1 & issue 1 + RelationshipBuilder.createRelationshipBuilder(context, v1_1, i1_1, isIssueOfJournalVolume) + .build(); + + // relationship - volume 1 & issue 2 + RelationshipBuilder.createRelationshipBuilder(context, v1_1, i2_1, isIssueOfJournalVolume) + .build(); + + // relationship - volume 1 & issue 3 + RelationshipBuilder.createRelationshipBuilder(context, v1_1, i3_1, isIssueOfJournalVolume) + .build(); + + // relationship - volume 1 & issue 4 + RelationshipBuilder.createRelationshipBuilder(context, v1_1, i4_1, isIssueOfJournalVolume) + .build(); + + // relationship - volume 1 & issue 5 + RelationshipBuilder.createRelationshipBuilder(context, v1_1, i5_1, isIssueOfJournalVolume) + .build(); + + ///////////////////////////////// + // initial - verify volume 3.1 // + ///////////////////////////////// + + List mdvs1 = itemService.getMetadata( + v1_1, "publicationissue", "issueNumber", null, Item.ANY + ); + assertEquals(5, mdvs1.size()); + + assertTrue(mdvs1.get(0) instanceof RelationshipMetadataValue); + assertEquals("issue nr 1 (rel)", mdvs1.get(0).getValue()); + assertEquals(0, mdvs1.get(0).getPlace()); + + assertTrue(mdvs1.get(1) instanceof RelationshipMetadataValue); + assertEquals("issue nr 2 (rel)", mdvs1.get(1).getValue()); + assertEquals(1, mdvs1.get(1).getPlace()); + + assertTrue(mdvs1.get(2) instanceof RelationshipMetadataValue); + assertEquals("issue nr 3 (rel)", mdvs1.get(2).getValue()); + assertEquals(2, mdvs1.get(2).getPlace()); + + assertTrue(mdvs1.get(3) instanceof RelationshipMetadataValue); + assertEquals("issue nr 4 (rel)", mdvs1.get(3).getValue()); + assertEquals(3, mdvs1.get(3).getPlace()); + + assertTrue(mdvs1.get(4) instanceof RelationshipMetadataValue); + assertEquals("issue nr 5 (rel)", mdvs1.get(4).getValue()); + assertEquals(4, mdvs1.get(4).getPlace()); + + ///////////////////////////////////// + // create new version - volume 1.2 // + ///////////////////////////////////// + + Item v1_2 = versioningService.createNewVersion(context, v1_1).getItem(); + installItemService.installItem(context, workspaceItemService.findByItem(context, v1_2)); + context.commit(); + + //////////////////////////////////// + // create new version - issue 3.2 // + //////////////////////////////////// + + Item i3_2 = versioningService.createNewVersion(context, i3_1).getItem(); + installItemService.installItem(context, workspaceItemService.findByItem(context, i3_2)); + context.commit(); + + //////////////////////////////////////////////// + // after version creation - verify volume 3.1 // + //////////////////////////////////////////////// + + assertThat( + relationshipService.findByItem(context, v1_1, -1, -1, false, false), + containsInAnyOrder(List.of( + isRel(v1_1, isIssueOfJournalVolume, i1_1, RIGHT_ONLY, 0, 0), + isRel(v1_1, isIssueOfJournalVolume, i2_1, RIGHT_ONLY, 1, 0), + isRel(v1_1, isIssueOfJournalVolume, i3_1, RIGHT_ONLY, 2, 0), + isRel(v1_1, isIssueOfJournalVolume, i4_1, RIGHT_ONLY, 3, 0), + isRel(v1_1, isIssueOfJournalVolume, i5_1, RIGHT_ONLY, 4, 0) + )) + ); + + List mdvs4 = itemService.getMetadata( + v1_1, "publicationissue", "issueNumber", null, Item.ANY + ); + assertEquals(5, mdvs4.size()); + + assertTrue(mdvs4.get(0) instanceof RelationshipMetadataValue); + assertEquals("issue nr 1 (rel)", mdvs4.get(0).getValue()); + assertEquals(0, mdvs4.get(0).getPlace()); + + assertTrue(mdvs4.get(1) instanceof RelationshipMetadataValue); + assertEquals("issue nr 2 (rel)", mdvs4.get(1).getValue()); + assertEquals(1, mdvs4.get(1).getPlace()); + + assertTrue(mdvs4.get(2) instanceof RelationshipMetadataValue); + assertEquals("issue nr 3 (rel)", mdvs4.get(2).getValue()); + assertEquals(2, mdvs4.get(2).getPlace()); + + assertTrue(mdvs4.get(3) instanceof RelationshipMetadataValue); + assertEquals("issue nr 4 (rel)", mdvs4.get(3).getValue()); + assertEquals(3, mdvs4.get(3).getPlace()); + + assertTrue(mdvs4.get(4) instanceof RelationshipMetadataValue); + assertEquals("issue nr 5 (rel)", mdvs4.get(4).getValue()); + assertEquals(4, mdvs4.get(4).getPlace()); + + //////////////////////////////////////////////// + // after version creation - verify volume 1.2 // + //////////////////////////////////////////////// + + assertThat( + relationshipService.findByItem(context, v1_2, -1, -1, false, false), + containsInAnyOrder(List.of( + isRel(v1_2, isIssueOfJournalVolume, i1_1, BOTH, 0, 0), + isRel(v1_2, isIssueOfJournalVolume, i2_1, BOTH, 1, 0), + isRel(v1_2, isIssueOfJournalVolume, i3_1, LEFT_ONLY, 2, 0), + isRel(v1_2, isIssueOfJournalVolume, i3_2, BOTH, 2, 0), + isRel(v1_2, isIssueOfJournalVolume, i4_1, BOTH, 3, 0), + isRel(v1_2, isIssueOfJournalVolume, i5_1, BOTH, 4, 0) + )) + ); + + List mdvs7 = itemService.getMetadata( + v1_2, "publicationissue", "issueNumber", null, Item.ANY + ); + assertEquals(5, mdvs7.size()); + + assertTrue(mdvs7.get(0) instanceof RelationshipMetadataValue); + assertEquals("issue nr 1 (rel)", mdvs7.get(0).getValue()); + assertEquals(0, mdvs7.get(0).getPlace()); + + assertTrue(mdvs7.get(1) instanceof RelationshipMetadataValue); + assertEquals("issue nr 2 (rel)", mdvs7.get(1).getValue()); + assertEquals(1, mdvs7.get(1).getPlace()); + + assertTrue(mdvs7.get(2) instanceof RelationshipMetadataValue); + assertEquals("issue nr 3 (rel)", mdvs7.get(2).getValue()); + assertEquals(2, mdvs7.get(2).getPlace()); + + assertTrue(mdvs7.get(3) instanceof RelationshipMetadataValue); + assertEquals("issue nr 4 (rel)", mdvs7.get(3).getValue()); + assertEquals(3, mdvs7.get(3).getPlace()); + + assertTrue(mdvs7.get(4) instanceof RelationshipMetadataValue); + assertEquals("issue nr 5 (rel)", mdvs7.get(4).getValue()); + assertEquals(4, mdvs7.get(4).getPlace()); + + /////////////////////////////////////////////////////////// + // remove relationship - volume 1.2 & issue 3.2 // + // since an issue needs a relationship, delete the issue // + /////////////////////////////////////////////////////////// + + Relationship rel1 = getRelationship(v1_2, isIssueOfJournalVolume, i3_2); + assertNotNull(rel1); + + itemService.delete(context, context.reloadEntity(i3_2)); + + context.commit(); + + //////////////////////////////////// + // after remove 1 - cache busting // + //////////////////////////////////// + + v1_2.setMetadataModified(); + v1_2 = context.reloadEntity(v1_2); + + i3_2.setMetadataModified(); + i3_2 = context.reloadEntity(i3_2); + + //////////////////////////////////////// + // after remove 1 - verify volume 3.1 // + //////////////////////////////////////// + + assertThat( + relationshipService.findByItem(context, v1_1, -1, -1, false, false), + containsInAnyOrder(List.of( + isRel(v1_1, isIssueOfJournalVolume, i1_1, RIGHT_ONLY, 0, 0), + isRel(v1_1, isIssueOfJournalVolume, i2_1, RIGHT_ONLY, 1, 0), + isRel(v1_1, isIssueOfJournalVolume, i3_1, RIGHT_ONLY, 2, 0), + isRel(v1_1, isIssueOfJournalVolume, i4_1, RIGHT_ONLY, 3, 0), + isRel(v1_1, isIssueOfJournalVolume, i5_1, RIGHT_ONLY, 4, 0) + )) + ); + + List mdvs9 = itemService.getMetadata( + v1_1, "publicationissue", "issueNumber", null, Item.ANY + ); + assertEquals(5, mdvs9.size()); + + assertTrue(mdvs9.get(0) instanceof RelationshipMetadataValue); + assertEquals("issue nr 1 (rel)", mdvs9.get(0).getValue()); + assertEquals(0, mdvs9.get(0).getPlace()); + + assertTrue(mdvs9.get(1) instanceof RelationshipMetadataValue); + assertEquals("issue nr 2 (rel)", mdvs9.get(1).getValue()); + assertEquals(1, mdvs9.get(1).getPlace()); + + assertTrue(mdvs9.get(2) instanceof RelationshipMetadataValue); + assertEquals("issue nr 3 (rel)", mdvs9.get(2).getValue()); + assertEquals(2, mdvs9.get(2).getPlace()); + + assertTrue(mdvs9.get(3) instanceof RelationshipMetadataValue); + assertEquals("issue nr 4 (rel)", mdvs9.get(3).getValue()); + assertEquals(3, mdvs9.get(3).getPlace()); + + assertTrue(mdvs9.get(4) instanceof RelationshipMetadataValue); + assertEquals("issue nr 5 (rel)", mdvs9.get(4).getValue()); + assertEquals(4, mdvs9.get(4).getPlace()); + + //////////////////////////////////////// + // after remove 1 - verify volume 3.2 // + //////////////////////////////////////// + + assertThat( + relationshipService.findByItem(context, v1_2, -1, -1, false, false), + containsInAnyOrder(List.of( + isRel(v1_2, isIssueOfJournalVolume, i1_1, BOTH, 0, 0), + isRel(v1_2, isIssueOfJournalVolume, i2_1, BOTH, 1, 0), + isRel(v1_2, isIssueOfJournalVolume, i3_1, LEFT_ONLY, 2, 0), + // NOTE: left place was reduced by one + isRel(v1_2, isIssueOfJournalVolume, i4_1, BOTH, 2, 0), + isRel(v1_2, isIssueOfJournalVolume, i5_1, BOTH, 3, 0) + )) + ); + + List mdvs12 = itemService.getMetadata( + v1_2, "publicationissue", "issueNumber", null, Item.ANY + ); + assertEquals(4, mdvs12.size()); + + assertTrue(mdvs12.get(0) instanceof RelationshipMetadataValue); + assertEquals("issue nr 1 (rel)", mdvs12.get(0).getValue()); + assertEquals(0, mdvs12.get(0).getPlace()); + + assertTrue(mdvs12.get(1) instanceof RelationshipMetadataValue); + assertEquals("issue nr 2 (rel)", mdvs12.get(1).getValue()); + assertEquals(1, mdvs12.get(1).getPlace()); + + assertTrue(mdvs12.get(2) instanceof RelationshipMetadataValue); + assertEquals("issue nr 4 (rel)", mdvs12.get(2).getValue()); + assertEquals(2, mdvs12.get(2).getPlace()); + + assertTrue(mdvs12.get(3) instanceof RelationshipMetadataValue); + assertEquals("issue nr 5 (rel)", mdvs12.get(3).getValue()); + assertEquals(3, mdvs12.get(3).getPlace()); + + //////////////////////////////////// + // create new version - issue 3.3 // + //////////////////////////////////// + + // journal issue 3.3 + Item i3_3 = ItemBuilder.createItem(context, collection) + .withTitle("journal issue 3") + .withMetadata("dspace", "entity", "type", journalIssueEntityType.getLabel()) + .withMetadata("publicationissue", "issueNumber", null, "issue nr 3 (rel)") + .build(); + + /////////////////////////////////////////////// + // add relationship - volume 1.2 & issue 3.3 // + /////////////////////////////////////////////// + + RelationshipBuilder.createRelationshipBuilder(context, v1_2, i3_3, isIssueOfJournalVolume, 2, -1) + .build(); + + context.commit(); + + //////////////////////////////////////////// + // after add relationship - cache busting // + //////////////////////////////////////////// + + v1_2.setMetadataModified(); + v1_2 = context.reloadEntity(v1_2); + + i3_3.setMetadataModified(); + i3_3 = context.reloadEntity(i3_3); + + //////////////////////////////////////////////// + // after add relationship - verify volume 1.1 // + //////////////////////////////////////////////// + + assertThat( + relationshipService.findByItem(context, v1_1, -1, -1, false, false), + containsInAnyOrder(List.of( + isRel(v1_1, isIssueOfJournalVolume, i1_1, RIGHT_ONLY, 0, 0), + isRel(v1_1, isIssueOfJournalVolume, i2_1, RIGHT_ONLY, 1, 0), + isRel(v1_1, isIssueOfJournalVolume, i3_1, RIGHT_ONLY, 2, 0), + isRel(v1_1, isIssueOfJournalVolume, i4_1, RIGHT_ONLY, 3, 0), + isRel(v1_1, isIssueOfJournalVolume, i5_1, RIGHT_ONLY, 4, 0) + )) + ); + + List mdvs14 = itemService.getMetadata( + v1_1, "publicationissue", "issueNumber", null, Item.ANY + ); + assertEquals(5, mdvs14.size()); + + assertTrue(mdvs14.get(0) instanceof RelationshipMetadataValue); + assertEquals("issue nr 1 (rel)", mdvs14.get(0).getValue()); + assertEquals(0, mdvs14.get(0).getPlace()); + + assertTrue(mdvs14.get(1) instanceof RelationshipMetadataValue); + assertEquals("issue nr 2 (rel)", mdvs14.get(1).getValue()); + assertEquals(1, mdvs14.get(1).getPlace()); + + assertTrue(mdvs14.get(2) instanceof RelationshipMetadataValue); + assertEquals("issue nr 3 (rel)", mdvs14.get(2).getValue()); + assertEquals(2, mdvs14.get(2).getPlace()); + + assertTrue(mdvs14.get(3) instanceof RelationshipMetadataValue); + assertEquals("issue nr 4 (rel)", mdvs14.get(3).getValue()); + assertEquals(3, mdvs14.get(3).getPlace()); + + assertTrue(mdvs14.get(4) instanceof RelationshipMetadataValue); + assertEquals("issue nr 5 (rel)", mdvs14.get(4).getValue()); + assertEquals(4, mdvs14.get(4).getPlace()); + + //////////////////////////////////////////////// + // after add relationship - verify volume 1.2 // + //////////////////////////////////////////////// + + assertThat( + relationshipService.findByItem(context, v1_2, -1, -1, false, false), + containsInAnyOrder(List.of( + isRel(v1_2, isIssueOfJournalVolume, i1_1, BOTH, 0, 0), + isRel(v1_2, isIssueOfJournalVolume, i2_1, BOTH, 1, 0), + isRel(v1_2, isIssueOfJournalVolume, i3_1, LEFT_ONLY, 2, 0), + isRel(v1_2, isIssueOfJournalVolume, i3_3, BOTH, 2, 0), + isRel(v1_2, isIssueOfJournalVolume, i4_1, BOTH, 3, 0), + isRel(v1_2, isIssueOfJournalVolume, i5_1, BOTH, 4, 0) + )) + ); + + assertEquals( + 6, + relationshipService.countByItem(context, v1_2, false, false) + ); + + List mdvs17 = itemService.getMetadata( + v1_2, "publicationissue", "issueNumber", null, Item.ANY + ); + assertEquals(5, mdvs17.size()); + + assertTrue(mdvs17.get(0) instanceof RelationshipMetadataValue); + assertEquals("issue nr 1 (rel)", mdvs17.get(0).getValue()); + assertEquals(0, mdvs17.get(0).getPlace()); + + assertTrue(mdvs17.get(1) instanceof RelationshipMetadataValue); + assertEquals("issue nr 2 (rel)", mdvs17.get(1).getValue()); + assertEquals(1, mdvs17.get(1).getPlace()); + + assertTrue(mdvs7.get(2) instanceof RelationshipMetadataValue); + assertEquals("issue nr 3 (rel)", mdvs7.get(2).getValue()); + assertEquals(2, mdvs7.get(2).getPlace()); + + assertTrue(mdvs17.get(3) instanceof RelationshipMetadataValue); + assertEquals("issue nr 4 (rel)", mdvs17.get(3).getValue()); + assertEquals(3, mdvs17.get(3).getPlace()); + + assertTrue(mdvs17.get(4) instanceof RelationshipMetadataValue); + assertEquals("issue nr 5 (rel)", mdvs17.get(4).getValue()); + assertEquals(4, mdvs17.get(4).getPlace()); + + ///////////////////////////////////////////// + // delete volume first for min cardinality // + ///////////////////////////////////////////// + + itemService.delete(context, context.reloadEntity(v1_1)); + itemService.delete(context, context.reloadEntity(v1_2)); + } + + protected void verifySolrField(Item item, String fieldName, List expectedValues) throws Exception { + QueryResponse result = solrSearchCore.getSolr().query(new SolrQuery(String.format( + "search.resourcetype:\"Item\" AND search.resourceid:\"%s\"", item.getID() + ))); + + SolrDocumentList docs = result.getResults(); + Assert.assertEquals(1, docs.size()); + SolrDocument doc = docs.get(0); + + java.util.Collection actualValues = doc.getFieldValues(fieldName); + + if (expectedValues == null) { + assertNull(actualValues); + } else { + assertThat(actualValues, containsInAnyOrder(expectedValues.toArray())); + } + } + + /** + * Setup: + * - two people are linked to one publication + * - create a new version of the publication + * - create a new version of person 1 + * - create a new version of person 2 + * + * Goals: + * - check that the metadata (plain text and from relationships) of the items have the correct value and place, + * as new versions of the items get created and edited + * - verify that changes to newer versions and relationships don't affect older versions and relationships + * - verify that the (versions of) items are properly indexed in the Solr search core + */ + @Test + public void test_virtualMetadataPreserved() throws Exception { + ////////////////////////////////////////////// + // create a publication and link two people // + ////////////////////////////////////////////// + + Item publication1V1 = ItemBuilder.createItem(context, collection) + .withTitle("publication 1V1") + .withMetadata("dspace", "entity", "type", publicationEntityType.getLabel()) + .build(); + + Item person1V1 = ItemBuilder.createItem(context, collection) + .withTitle("person 1V1") + .withMetadata("dspace", "entity", "type", personEntityType.getLabel()) + .withPersonIdentifierFirstName("Donald") + .withPersonIdentifierLastName("Smith") + .build(); + + Item person2V1 = ItemBuilder.createItem(context, collection) + .withTitle("person 2V1") + .withMetadata("dspace", "entity", "type", personEntityType.getLabel()) + .withPersonIdentifierFirstName("Jane") + .withPersonIdentifierLastName("Doe") + .build(); + + RelationshipBuilder.createRelationshipBuilder(context, publication1V1, person1V1, isAuthorOfPublication) + .build(); + + RelationshipBuilder.createRelationshipBuilder(context, publication1V1, person2V1, isAuthorOfPublication) + .withRightwardValue("Doe, J.") + .build(); + + /////////////////////////////////////////////// + // test dc.contributor.author of publication // + /////////////////////////////////////////////// + + List mdvs1 = itemService.getMetadata( + publication1V1, "dc", "contributor", "author", Item.ANY + ); + assertEquals(2, mdvs1.size()); + + assertTrue(mdvs1.get(0) instanceof RelationshipMetadataValue); + assertEquals("Smith, Donald", mdvs1.get(0).getValue()); + assertEquals(0, mdvs1.get(0).getPlace()); + + assertTrue(mdvs1.get(1) instanceof RelationshipMetadataValue); + assertEquals("Doe, J.", mdvs1.get(1).getValue()); + assertEquals(1, mdvs1.get(1).getPlace()); + + verifySolrField(publication1V1, "dc.contributor.author", List.of( + "Smith, Donald", "Doe, J." + )); + + //////////////////////////////////////////////////////// + // test relation.isAuthorOfPublication of publication // + //////////////////////////////////////////////////////// + + List mdvsR1 = itemService.getMetadata( + publication1V1, "relation", "isAuthorOfPublication", null, Item.ANY + ); + assertEquals(2, mdvsR1.size()); + + assertTrue(mdvsR1.get(0) instanceof RelationshipMetadataValue); + assertEquals(person1V1.getID().toString(), mdvsR1.get(0).getValue()); + assertEquals(0, mdvsR1.get(0).getPlace()); + + assertTrue(mdvsR1.get(1) instanceof RelationshipMetadataValue); + assertEquals(person2V1.getID().toString(), mdvsR1.get(1).getValue()); + assertEquals(1, mdvsR1.get(1).getPlace()); + + verifySolrField(publication1V1, "relation.isAuthorOfPublication", List.of( + person1V1.getID().toString(), person2V1.getID().toString() + )); + + /////////////////////////////////////////////////////////////////////////// + // test relation.isAuthorOfPublication.latestForDiscovery of publication // + /////////////////////////////////////////////////////////////////////////// + + assertThat( + itemService.getMetadata( + publication1V1, "relation", "isAuthorOfPublication", "latestForDiscovery", Item.ANY + ), + containsInAnyOrder( + allOf( + instanceOf(RelationshipMetadataValue.class), + hasProperty("value", is(person1V1.getID().toString())), + hasProperty("place", is(-1)), + hasProperty("authority", startsWith(Constants.VIRTUAL_AUTHORITY_PREFIX)) + ), + allOf( + instanceOf(RelationshipMetadataValue.class), + hasProperty("value", is(person2V1.getID().toString())), + hasProperty("place", is(-1)), + hasProperty("authority", startsWith(Constants.VIRTUAL_AUTHORITY_PREFIX)) + ) + ) + ); + + verifySolrField(publication1V1, "relation.isAuthorOfPublication.latestForDiscovery", List.of( + person1V1.getID().toString(), person2V1.getID().toString() + )); + + /////////////////////////////////////////////////////// + // create a new version of publication 1 and archive // + /////////////////////////////////////////////////////// + + Item publication1V2 = versioningService.createNewVersion(context, publication1V1).getItem(); + installItemService.installItem(context, workspaceItemService.findByItem(context, publication1V2)); + context.dispatchEvents(); + + //////////////////////////////////// + // create new version of person 1 // + //////////////////////////////////// + + Item person1V2 = versioningService.createNewVersion(context, person1V1).getItem(); + // update "Smith, Donald" to "Smith, D." + itemService.replaceMetadata( + context, person1V2, "person", "givenName", null, null, "D.", + null, -1, 0 + ); + itemService.update(context, person1V2); + context.dispatchEvents(); + + /////////////////// + // cache busting // + /////////////////// + + publication1V1.setMetadataModified(); + publication1V1 = context.reloadEntity(publication1V1); + + publication1V2.setMetadataModified(); + publication1V2 = context.reloadEntity(publication1V2); + + /////////////////////////////////////////////////// + // test dc.contributor.author of old publication // + /////////////////////////////////////////////////// + + assertThat( + relationshipService.findByItem(context, publication1V1, -1, -1, false, false), + containsInAnyOrder(List.of( + isRel(publication1V1, isAuthorOfPublication, person1V1, RIGHT_ONLY, 0, 0), + isRel(publication1V1, isAuthorOfPublication, person2V1, RIGHT_ONLY, null, "Doe, J.", 1, 0) + )) + ); + + List mdvs2 = itemService.getMetadata( + publication1V1, "dc", "contributor", "author", Item.ANY + ); + assertEquals(2, mdvs2.size()); + + assertTrue(mdvs2.get(0) instanceof RelationshipMetadataValue); + assertEquals("Smith, Donald", mdvs2.get(0).getValue()); + assertEquals(0, mdvs2.get(0).getPlace()); + + assertTrue(mdvs2.get(1) instanceof RelationshipMetadataValue); + assertEquals("Doe, J.", mdvs2.get(1).getValue()); + assertEquals(1, mdvs2.get(1).getPlace()); + + verifySolrField(publication1V1, "dc.contributor.author", List.of( + "Smith, Donald", "Doe, J." + )); + + //////////////////////////////////////////////////////////// + // test relation.isAuthorOfPublication of old publication // + //////////////////////////////////////////////////////////// + + List mdvsR2 = itemService.getMetadata( + publication1V1, "relation", "isAuthorOfPublication", null, Item.ANY + ); + assertEquals(2, mdvsR2.size()); + + assertTrue(mdvsR2.get(0) instanceof RelationshipMetadataValue); + assertEquals(person1V1.getID().toString(), mdvsR2.get(0).getValue()); + assertEquals(0, mdvsR2.get(0).getPlace()); + + assertTrue(mdvsR2.get(1) instanceof RelationshipMetadataValue); + assertEquals(person2V1.getID().toString(), mdvsR2.get(1).getValue()); + assertEquals(1, mdvsR2.get(1).getPlace()); + + verifySolrField(publication1V1, "relation.isAuthorOfPublication", List.of( + person1V1.getID().toString(), person2V1.getID().toString() + )); + + /////////////////////////////////////////////////////////////////////////////// + // test relation.isAuthorOfPublication.latestForDiscovery of old publication // + /////////////////////////////////////////////////////////////////////////////// + + assertThat( + itemService.getMetadata( + publication1V1, "relation", "isAuthorOfPublication", "latestForDiscovery", Item.ANY + ), + containsInAnyOrder() + ); + + verifySolrField(publication1V1, "relation.isAuthorOfPublication.latestForDiscovery", null); + + /////////////////////////////////////////////////// + // test dc.contributor.author of new publication // + /////////////////////////////////////////////////// + + assertThat( + relationshipService.findByItem(context, publication1V2, -1, -1, false, false), + containsInAnyOrder(List.of( + isRel(publication1V2, isAuthorOfPublication, person1V1, BOTH, 0, 0), + isRel(publication1V2, isAuthorOfPublication, person1V2, LEFT_ONLY, 0, 0), + isRel(publication1V2, isAuthorOfPublication, person2V1, BOTH, null, "Doe, J.", 1, 0) + )) + ); + + List mdvs3 = itemService.getMetadata( + publication1V2, "dc", "contributor", "author", Item.ANY + ); + assertEquals(2, mdvs3.size()); + + assertTrue(mdvs3.get(0) instanceof RelationshipMetadataValue); + assertEquals("Smith, Donald", mdvs3.get(0).getValue()); + assertEquals(0, mdvs3.get(0).getPlace()); + + assertTrue(mdvs3.get(1) instanceof RelationshipMetadataValue); + assertEquals("Doe, J.", mdvs3.get(1).getValue()); + assertEquals(1, mdvs3.get(1).getPlace()); + + verifySolrField(publication1V2, "dc.contributor.author", List.of( + "Smith, Donald", "Doe, J." + )); + + //////////////////////////////////////////////////////////// + // test relation.isAuthorOfPublication of new publication // + //////////////////////////////////////////////////////////// + + List mdvsR3 = itemService.getMetadata( + publication1V2, "relation", "isAuthorOfPublication", null, Item.ANY + ); + assertEquals(2, mdvsR3.size()); + + assertTrue(mdvsR3.get(0) instanceof RelationshipMetadataValue); + assertEquals(person1V1.getID().toString(), mdvsR3.get(0).getValue()); + assertEquals(0, mdvsR3.get(0).getPlace()); + + assertTrue(mdvsR3.get(1) instanceof RelationshipMetadataValue); + assertEquals(person2V1.getID().toString(), mdvsR3.get(1).getValue()); + assertEquals(1, mdvsR3.get(1).getPlace()); + + verifySolrField(publication1V2, "relation.isAuthorOfPublication", List.of( + person1V1.getID().toString(), person2V1.getID().toString() + )); + + /////////////////////////////////////////////////////////////////////////////// + // test relation.isAuthorOfPublication.latestForDiscovery of new publication // + /////////////////////////////////////////////////////////////////////////////// + + assertThat( + itemService.getMetadata( + publication1V2, "relation", "isAuthorOfPublication", "latestForDiscovery", Item.ANY + ), + containsInAnyOrder( + allOf( + instanceOf(RelationshipMetadataValue.class), + hasProperty("value", is(person1V1.getID().toString())), + hasProperty("place", is(-1)), + hasProperty("authority", startsWith(Constants.VIRTUAL_AUTHORITY_PREFIX)) + ), + allOf( + instanceOf(RelationshipMetadataValue.class), + hasProperty("value", is(person1V2.getID().toString())), + hasProperty("place", is(-1)), + hasProperty("authority", startsWith(Constants.VIRTUAL_AUTHORITY_PREFIX)) + ), + allOf( + instanceOf(RelationshipMetadataValue.class), + hasProperty("value", is(person2V1.getID().toString())), + hasProperty("place", is(-1)), + hasProperty("authority", startsWith(Constants.VIRTUAL_AUTHORITY_PREFIX)) + ) + ) + ); + + verifySolrField(publication1V2, "relation.isAuthorOfPublication.latestForDiscovery", List.of( + person1V1.getID().toString(), person1V2.getID().toString(), person2V1.getID().toString() + )); + + ///////////////////////////////////// + // archive new version of person 1 // + ///////////////////////////////////// + + installItemService.installItem(context, workspaceItemService.findByItem(context, person1V2)); + context.dispatchEvents(); + + /////////////////// + // cache busting // + /////////////////// + + publication1V1.setMetadataModified(); + publication1V1 = context.reloadEntity(publication1V1); + + publication1V2.setMetadataModified(); + publication1V2 = context.reloadEntity(publication1V2); + + /////////////////////////////////////////////////// + // test dc.contributor.author of old publication // + /////////////////////////////////////////////////// + + assertThat( + relationshipService.findByItem(context, publication1V1, -1, -1, false, false), + containsInAnyOrder(List.of( + isRel(publication1V1, isAuthorOfPublication, person1V1, RIGHT_ONLY, 0, 0), + isRel(publication1V1, isAuthorOfPublication, person2V1, RIGHT_ONLY, null, "Doe, J.", 1, 0) + )) + ); + + List mdvs4 = itemService.getMetadata( + publication1V1, "dc", "contributor", "author", Item.ANY + ); + assertEquals(2, mdvs4.size()); + + assertTrue(mdvs4.get(0) instanceof RelationshipMetadataValue); + assertEquals("Smith, Donald", mdvs4.get(0).getValue()); + assertEquals(0, mdvs4.get(0).getPlace()); + + assertTrue(mdvs4.get(1) instanceof RelationshipMetadataValue); + assertEquals("Doe, J.", mdvs4.get(1).getValue()); + assertEquals(1, mdvs4.get(1).getPlace()); + + verifySolrField(publication1V1, "dc.contributor.author", List.of( + "Smith, Donald", "Doe, J." + )); + + //////////////////////////////////////////////////////// + // test relation.isAuthorOfPublication of publication // + //////////////////////////////////////////////////////// + + List mdvsR4 = itemService.getMetadata( + publication1V1, "relation", "isAuthorOfPublication", null, Item.ANY + ); + assertEquals(2, mdvsR4.size()); + + assertTrue(mdvsR4.get(0) instanceof RelationshipMetadataValue); + assertEquals(person1V1.getID().toString(), mdvsR4.get(0).getValue()); + assertEquals(0, mdvsR4.get(0).getPlace()); + + assertTrue(mdvsR4.get(1) instanceof RelationshipMetadataValue); + assertEquals(person2V1.getID().toString(), mdvsR4.get(1).getValue()); + assertEquals(1, mdvsR4.get(1).getPlace()); + + verifySolrField(publication1V1, "relation.isAuthorOfPublication", List.of( + person1V1.getID().toString(), person2V1.getID().toString() + )); + + /////////////////////////////////////////////////////////////////////////// + // test relation.isAuthorOfPublication.latestForDiscovery of publication // + /////////////////////////////////////////////////////////////////////////// + + assertThat( + itemService.getMetadata( + publication1V1, "relation", "isAuthorOfPublication", "latestForDiscovery", Item.ANY + ), + containsInAnyOrder() + ); + + verifySolrField(publication1V1, "relation.isAuthorOfPublication.latestForDiscovery", null); + + /////////////////////////////////////////////////// + // test dc.contributor.author of new publication // + /////////////////////////////////////////////////// + + assertThat( + relationshipService.findByItem(context, publication1V2, -1, -1, false, false), + containsInAnyOrder(List.of( + isRel(publication1V2, isAuthorOfPublication, person1V1, LEFT_ONLY, 0, 0), + isRel(publication1V2, isAuthorOfPublication, person1V2, BOTH, 0, 0), + isRel(publication1V2, isAuthorOfPublication, person2V1, BOTH, null, "Doe, J.", 1, 0) + )) + ); + + List mdvs5 = itemService.getMetadata( + publication1V2, "dc", "contributor", "author", Item.ANY + ); + assertEquals(2, mdvs5.size()); + + assertTrue(mdvs5.get(0) instanceof RelationshipMetadataValue); + assertEquals("Smith, D.", mdvs5.get(0).getValue()); + assertEquals(0, mdvs5.get(0).getPlace()); + + assertTrue(mdvs5.get(1) instanceof RelationshipMetadataValue); + assertEquals("Doe, J.", mdvs5.get(1).getValue()); + assertEquals(1, mdvs5.get(1).getPlace()); + + verifySolrField(publication1V2, "dc.contributor.author", List.of( + "Smith, D.", "Doe, J." + )); + + //////////////////////////////////////////////////////////// + // test relation.isAuthorOfPublication of new publication // + //////////////////////////////////////////////////////////// + + List mdvsR5 = itemService.getMetadata( + publication1V2, "relation", "isAuthorOfPublication", null, Item.ANY + ); + assertEquals(2, mdvsR5.size()); + + assertTrue(mdvsR5.get(0) instanceof RelationshipMetadataValue); + assertEquals(person1V2.getID().toString(), mdvsR5.get(0).getValue()); + assertEquals(0, mdvsR5.get(0).getPlace()); + + assertTrue(mdvsR5.get(1) instanceof RelationshipMetadataValue); + assertEquals(person2V1.getID().toString(), mdvsR5.get(1).getValue()); + assertEquals(1, mdvsR5.get(1).getPlace()); + + verifySolrField(publication1V2, "relation.isAuthorOfPublication", List.of( + person1V2.getID().toString(), person2V1.getID().toString() + )); + + /////////////////////////////////////////////////////////////////////////////// + // test relation.isAuthorOfPublication.latestForDiscovery of new publication // + /////////////////////////////////////////////////////////////////////////////// + + assertThat( + itemService.getMetadata( + publication1V2, "relation", "isAuthorOfPublication", "latestForDiscovery", Item.ANY + ), + containsInAnyOrder( + allOf( + instanceOf(RelationshipMetadataValue.class), + hasProperty("value", is(person1V1.getID().toString())), + hasProperty("place", is(-1)), + hasProperty("authority", startsWith(Constants.VIRTUAL_AUTHORITY_PREFIX)) + ), + allOf( + instanceOf(RelationshipMetadataValue.class), + hasProperty("value", is(person1V2.getID().toString())), + hasProperty("place", is(-1)), + hasProperty("authority", startsWith(Constants.VIRTUAL_AUTHORITY_PREFIX)) + ), + allOf( + instanceOf(RelationshipMetadataValue.class), + hasProperty("value", is(person2V1.getID().toString())), + hasProperty("place", is(-1)), + hasProperty("authority", startsWith(Constants.VIRTUAL_AUTHORITY_PREFIX)) + ) + ) + ); + + verifySolrField(publication1V2, "relation.isAuthorOfPublication.latestForDiscovery", List.of( + person1V1.getID().toString(), person1V2.getID().toString(), person2V1.getID().toString() + )); + + //////////////////////////////////// + // create new version of person 2 // + //////////////////////////////////// + + Item person2V2 = versioningService.createNewVersion(context, person2V1).getItem(); + Relationship rel1 = getRelationship(publication1V2, isAuthorOfPublication, person2V2); + assertNotNull(rel1); + rel1.setRightwardValue("Doe, Jane Jr"); + relationshipService.update(context, rel1); + context.dispatchEvents(); + + /////////////////// + // cache busting // + /////////////////// + + publication1V1.setMetadataModified(); + publication1V1 = context.reloadEntity(publication1V1); + + publication1V2.setMetadataModified(); + publication1V2 = context.reloadEntity(publication1V2); + + /////////////////////////////////////////////////// + // test dc.contributor.author of old publication // + /////////////////////////////////////////////////// + + assertThat( + relationshipService.findByItem(context, publication1V1, -1, -1, false, false), + containsInAnyOrder(List.of( + isRel(publication1V1, isAuthorOfPublication, person1V1, RIGHT_ONLY, 0, 0), + isRel(publication1V1, isAuthorOfPublication, person2V1, RIGHT_ONLY, null, "Doe, J.", 1, 0) + )) + ); + + List mdvs6 = itemService.getMetadata( + publication1V1, "dc", "contributor", "author", Item.ANY + ); + assertEquals(2, mdvs6.size()); + + assertTrue(mdvs6.get(0) instanceof RelationshipMetadataValue); + assertEquals("Smith, Donald", mdvs6.get(0).getValue()); + assertEquals(0, mdvs6.get(0).getPlace()); + + assertTrue(mdvs6.get(1) instanceof RelationshipMetadataValue); + assertEquals("Doe, J.", mdvs6.get(1).getValue()); + assertEquals(1, mdvs6.get(1).getPlace()); + + verifySolrField(publication1V1, "dc.contributor.author", List.of( + "Smith, Donald", "Doe, J." + )); + + //////////////////////////////////////////////////////////// + // test relation.isAuthorOfPublication of old publication // + //////////////////////////////////////////////////////////// + + List mdvsR6 = itemService.getMetadata( + publication1V1, "relation", "isAuthorOfPublication", null, Item.ANY + ); + assertEquals(2, mdvsR6.size()); + + assertTrue(mdvsR6.get(0) instanceof RelationshipMetadataValue); + assertEquals(person1V1.getID().toString(), mdvsR6.get(0).getValue()); + assertEquals(0, mdvsR6.get(0).getPlace()); + + assertTrue(mdvsR6.get(1) instanceof RelationshipMetadataValue); + assertEquals(person2V1.getID().toString(), mdvsR6.get(1).getValue()); + assertEquals(1, mdvsR6.get(1).getPlace()); + + verifySolrField(publication1V1, "relation.isAuthorOfPublication", List.of( + person1V1.getID().toString(), person2V1.getID().toString() + )); + + /////////////////////////////////////////////////////////////////////////////// + // test relation.isAuthorOfPublication.latestForDiscovery of old publication // + /////////////////////////////////////////////////////////////////////////////// + + assertThat( + itemService.getMetadata( + publication1V1, "relation", "isAuthorOfPublication", "latestForDiscovery", Item.ANY + ), + containsInAnyOrder() + ); + + verifySolrField(publication1V1, "relation.isAuthorOfPublication.latestForDiscovery", null); + + /////////////////////////////////////////////////// + // test dc.contributor.author of new publication // + /////////////////////////////////////////////////// + + assertThat( + relationshipService.findByItem(context, publication1V2, -1, -1, false, false), + containsInAnyOrder(List.of( + isRel(publication1V2, isAuthorOfPublication, person1V1, LEFT_ONLY, 0, 0), + isRel(publication1V2, isAuthorOfPublication, person1V2, BOTH, 0, 0), + isRel(publication1V2, isAuthorOfPublication, person2V1, BOTH, null, "Doe, J.", 1, 0), + isRel(publication1V2, isAuthorOfPublication, person2V2, LEFT_ONLY, null, "Doe, Jane Jr", 1, 0) + )) + ); + + List mdvs7 = itemService.getMetadata( + publication1V2, "dc", "contributor", "author", Item.ANY + ); + assertEquals(2, mdvs7.size()); + + assertTrue(mdvs7.get(0) instanceof RelationshipMetadataValue); + assertEquals("Smith, D.", mdvs7.get(0).getValue()); + assertEquals(0, mdvs7.get(0).getPlace()); + + assertTrue(mdvs7.get(1) instanceof RelationshipMetadataValue); + assertEquals("Doe, J.", mdvs7.get(1).getValue()); + assertEquals(1, mdvs7.get(1).getPlace()); + + verifySolrField(publication1V2, "dc.contributor.author", List.of( + "Smith, D.", "Doe, J." + )); + + //////////////////////////////////////////////////////////// + // test relation.isAuthorOfPublication of new publication // + //////////////////////////////////////////////////////////// + + List mdvsR7 = itemService.getMetadata( + publication1V2, "relation", "isAuthorOfPublication", null, Item.ANY + ); + assertEquals(2, mdvsR7.size()); + + assertTrue(mdvsR7.get(0) instanceof RelationshipMetadataValue); + assertEquals(person1V2.getID().toString(), mdvsR7.get(0).getValue()); + assertEquals(0, mdvsR7.get(0).getPlace()); + + assertTrue(mdvsR7.get(1) instanceof RelationshipMetadataValue); + assertEquals(person2V1.getID().toString(), mdvsR7.get(1).getValue()); + assertEquals(1, mdvsR7.get(1).getPlace()); + + verifySolrField(publication1V2, "relation.isAuthorOfPublication", List.of( + person1V2.getID().toString(), person2V1.getID().toString() + )); + + /////////////////////////////////////////////////////////////////////////////// + // test relation.isAuthorOfPublication.latestForDiscovery of new publication // + /////////////////////////////////////////////////////////////////////////////// + + assertThat( + itemService.getMetadata( + publication1V2, "relation", "isAuthorOfPublication", "latestForDiscovery", Item.ANY + ), + containsInAnyOrder( + allOf( + instanceOf(RelationshipMetadataValue.class), + hasProperty("value", is(person1V1.getID().toString())), + hasProperty("place", is(-1)), + hasProperty("authority", startsWith(Constants.VIRTUAL_AUTHORITY_PREFIX)) + ), + allOf( + instanceOf(RelationshipMetadataValue.class), + hasProperty("value", is(person1V2.getID().toString())), + hasProperty("place", is(-1)), + hasProperty("authority", startsWith(Constants.VIRTUAL_AUTHORITY_PREFIX)) + ), + allOf( + instanceOf(RelationshipMetadataValue.class), + hasProperty("value", is(person2V1.getID().toString())), + hasProperty("place", is(-1)), + hasProperty("authority", startsWith(Constants.VIRTUAL_AUTHORITY_PREFIX)) + ), + allOf( + instanceOf(RelationshipMetadataValue.class), + hasProperty("value", is(person2V2.getID().toString())), + hasProperty("place", is(-1)), + hasProperty("authority", startsWith(Constants.VIRTUAL_AUTHORITY_PREFIX)) + ) + ) + ); + + verifySolrField(publication1V2, "relation.isAuthorOfPublication.latestForDiscovery", List.of( + person1V1.getID().toString(), person1V2.getID().toString(), + person2V1.getID().toString(), person2V2.getID().toString() + )); + + ///////////////////////////////////// + // archive new version of person 2 // + ///////////////////////////////////// + + installItemService.installItem(context, workspaceItemService.findByItem(context, person2V2)); + context.dispatchEvents(); + + /////////////////// + // cache busting // + /////////////////// + + publication1V1.setMetadataModified(); + publication1V1 = context.reloadEntity(publication1V1); + + publication1V2.setMetadataModified(); + publication1V2 = context.reloadEntity(publication1V2); + + /////////////////////////////////////////////////// + // test dc.contributor.author of old publication // + /////////////////////////////////////////////////// + + assertThat( + relationshipService.findByItem(context, publication1V1, -1, -1, false, false), + containsInAnyOrder(List.of( + isRel(publication1V1, isAuthorOfPublication, person1V1, RIGHT_ONLY, 0, 0), + isRel(publication1V1, isAuthorOfPublication, person2V1, RIGHT_ONLY, null, "Doe, J.", 1, 0) + )) + ); + + List mdvs8 = itemService.getMetadata( + publication1V1, "dc", "contributor", "author", Item.ANY + ); + assertEquals(2, mdvs8.size()); + + assertTrue(mdvs8.get(0) instanceof RelationshipMetadataValue); + assertEquals("Smith, Donald", mdvs8.get(0).getValue()); + assertEquals(0, mdvs8.get(0).getPlace()); + + assertTrue(mdvs8.get(1) instanceof RelationshipMetadataValue); + assertEquals("Doe, J.", mdvs8.get(1).getValue()); + assertEquals(1, mdvs8.get(1).getPlace()); + + verifySolrField(publication1V1, "dc.contributor.author", List.of( + "Smith, Donald", "Doe, J." + )); + + //////////////////////////////////////////////////////////// + // test relation.isAuthorOfPublication of old publication // + //////////////////////////////////////////////////////////// + + List mdvsR8 = itemService.getMetadata( + publication1V1, "relation", "isAuthorOfPublication", null, Item.ANY + ); + assertEquals(2, mdvsR8.size()); + + assertTrue(mdvsR8.get(0) instanceof RelationshipMetadataValue); + assertEquals(person1V1.getID().toString(), mdvsR8.get(0).getValue()); + assertEquals(0, mdvsR8.get(0).getPlace()); + + assertTrue(mdvsR8.get(1) instanceof RelationshipMetadataValue); + assertEquals(person2V1.getID().toString(), mdvsR8.get(1).getValue()); + assertEquals(1, mdvsR8.get(1).getPlace()); + + verifySolrField(publication1V1, "relation.isAuthorOfPublication", List.of( + person1V1.getID().toString(), person2V1.getID().toString() + )); + + /////////////////////////////////////////////////////////////////////////////// + // test relation.isAuthorOfPublication.latestForDiscovery of old publication // + /////////////////////////////////////////////////////////////////////////////// + + assertThat( + itemService.getMetadata( + publication1V1, "relation", "isAuthorOfPublication", "latestForDiscovery", Item.ANY + ), + containsInAnyOrder() + ); + + verifySolrField(publication1V1, "relation.isAuthorOfPublication.latestForDiscovery", null); + + /////////////////////////////////////////////////// + // test dc.contributor.author of new publication // + /////////////////////////////////////////////////// + + assertThat( + relationshipService.findByItem(context, publication1V2, -1, -1, false, false), + containsInAnyOrder(List.of( + isRel(publication1V2, isAuthorOfPublication, person1V1, LEFT_ONLY, 0, 0), + isRel(publication1V2, isAuthorOfPublication, person1V2, BOTH, 0, 0), + isRel(publication1V2, isAuthorOfPublication, person2V1, LEFT_ONLY, null, "Doe, J.", 1, 0), + isRel(publication1V2, isAuthorOfPublication, person2V2, BOTH, null, "Doe, Jane Jr", 1, 0) + )) + ); + + List mdvs9 = itemService.getMetadata( + publication1V2, "dc", "contributor", "author", Item.ANY + ); + assertEquals(2, mdvs9.size()); + + assertTrue(mdvs9.get(0) instanceof RelationshipMetadataValue); + assertEquals("Smith, D.", mdvs9.get(0).getValue()); + assertEquals(0, mdvs9.get(0).getPlace()); + + assertTrue(mdvs9.get(1) instanceof RelationshipMetadataValue); + assertEquals("Doe, Jane Jr", mdvs9.get(1).getValue()); + assertEquals(1, mdvs9.get(1).getPlace()); + + verifySolrField(publication1V2, "dc.contributor.author", List.of( + "Smith, D.", "Doe, Jane Jr" + )); + + //////////////////////////////////////////////////////////// + // test relation.isAuthorOfPublication of new publication // + //////////////////////////////////////////////////////////// + + List mdvsR9 = itemService.getMetadata( + publication1V2, "relation", "isAuthorOfPublication", null, Item.ANY + ); + assertEquals(2, mdvsR9.size()); + + assertTrue(mdvsR9.get(0) instanceof RelationshipMetadataValue); + assertEquals(person1V2.getID().toString(), mdvsR9.get(0).getValue()); + assertEquals(0, mdvsR9.get(0).getPlace()); + + assertTrue(mdvsR9.get(1) instanceof RelationshipMetadataValue); + assertEquals(person2V2.getID().toString(), mdvsR9.get(1).getValue()); + assertEquals(1, mdvsR9.get(1).getPlace()); + + verifySolrField(publication1V2, "relation.isAuthorOfPublication", List.of( + person1V2.getID().toString(), person2V2.getID().toString() + )); + + /////////////////////////////////////////////////////////////////////////////// + // test relation.isAuthorOfPublication.latestForDiscovery of new publication // + /////////////////////////////////////////////////////////////////////////////// + + assertThat( + itemService.getMetadata( + publication1V2, "relation", "isAuthorOfPublication", "latestForDiscovery", Item.ANY + ), + containsInAnyOrder( + allOf( + instanceOf(RelationshipMetadataValue.class), + hasProperty("value", is(person1V1.getID().toString())), + hasProperty("place", is(-1)), + hasProperty("authority", startsWith(Constants.VIRTUAL_AUTHORITY_PREFIX)) + ), + allOf( + instanceOf(RelationshipMetadataValue.class), + hasProperty("value", is(person1V2.getID().toString())), + hasProperty("place", is(-1)), + hasProperty("authority", startsWith(Constants.VIRTUAL_AUTHORITY_PREFIX)) + ), + allOf( + instanceOf(RelationshipMetadataValue.class), + hasProperty("value", is(person2V1.getID().toString())), + hasProperty("place", is(-1)), + hasProperty("authority", startsWith(Constants.VIRTUAL_AUTHORITY_PREFIX)) + ), + allOf( + instanceOf(RelationshipMetadataValue.class), + hasProperty("value", is(person2V2.getID().toString())), + hasProperty("place", is(-1)), + hasProperty("authority", startsWith(Constants.VIRTUAL_AUTHORITY_PREFIX)) + ) + ) + ); + + verifySolrField(publication1V2, "relation.isAuthorOfPublication.latestForDiscovery", List.of( + person1V1.getID().toString(), person1V2.getID().toString(), + person2V1.getID().toString(), person2V2.getID().toString() + )); + } + +} diff --git a/dspace-api/src/test/java/org/dspace/content/crosswalk/QDCCrosswalkTest.java b/dspace-api/src/test/java/org/dspace/content/crosswalk/QDCCrosswalkTest.java index efed8ad8dc..2eafc03986 100644 --- a/dspace-api/src/test/java/org/dspace/content/crosswalk/QDCCrosswalkTest.java +++ b/dspace-api/src/test/java/org/dspace/content/crosswalk/QDCCrosswalkTest.java @@ -14,7 +14,7 @@ import org.dspace.AbstractDSpaceTest; import org.dspace.core.service.PluginService; import org.dspace.services.ConfigurationService; import org.dspace.services.factory.DSpaceServicesFactory; -import org.jdom.Namespace; +import org.jdom2.Namespace; import org.junit.After; import org.junit.AfterClass; import org.junit.Before; diff --git a/dspace-api/src/test/java/org/dspace/content/dao/RelationshipDAOImplTest.java b/dspace-api/src/test/java/org/dspace/content/dao/RelationshipDAOImplTest.java index 2143090fcf..b6f5da6be0 100644 --- a/dspace-api/src/test/java/org/dspace/content/dao/RelationshipDAOImplTest.java +++ b/dspace-api/src/test/java/org/dspace/content/dao/RelationshipDAOImplTest.java @@ -138,28 +138,6 @@ public class RelationshipDAOImplTest extends AbstractIntegrationTest { -1, -1, false)); } - /** - * Test findNextLeftPlaceByLeftItem should return 0 given our test left Item itemOne. - * - * @throws Exception - */ - @Test - public void testFindNextLeftPlaceByLeftItem() throws Exception { - assertEquals("TestNextLeftPlaceByLeftItem 0", 1, relationshipService.findNextLeftPlaceByLeftItem(context, - itemOne)); - } - - /** - * Test findNextRightPlaceByRightItem should return 0 given our test right Item itemTwo. - * - * @throws Exception - */ - @Test - public void testFindNextRightPlaceByRightItem() throws Exception { - assertEquals("TestNextRightPlaceByRightItem 0", 1, relationshipService.findNextRightPlaceByRightItem(context, - itemTwo)); - } - /** * Test findByRelationshipType should return our defined relationshipsList given our test RelationshipType * relationshipType diff --git a/dspace-api/src/test/java/org/dspace/content/logic/LogicalFilterTest.java b/dspace-api/src/test/java/org/dspace/content/logic/LogicalFilterTest.java index 7c8268a03b..0e08646220 100644 --- a/dspace-api/src/test/java/org/dspace/content/logic/LogicalFilterTest.java +++ b/dspace-api/src/test/java/org/dspace/content/logic/LogicalFilterTest.java @@ -408,6 +408,7 @@ public class LogicalFilterTest extends AbstractUnitTest { // Create condition to match pattern on dc.title metadata Condition condition = new MetadataValuesMatchCondition(); + condition.setItemService(ContentServiceFactory.getInstance().getItemService()); Map parameters = new HashMap<>(); // Match on the dc.title field parameters.put("field", "dc.title"); @@ -461,6 +462,7 @@ public class LogicalFilterTest extends AbstractUnitTest { // Instantiate new filter for testing this condition DefaultFilter filter = new DefaultFilter(); Condition condition = new InCollectionCondition(); + condition.setItemService(ContentServiceFactory.getInstance().getItemService()); Map parameters = new HashMap<>(); // Add collectionOne handle to the collections parameter - ie. we are testing to see if the item is diff --git a/dspace-api/src/test/java/org/dspace/content/packager/ITDSpaceAIP.java b/dspace-api/src/test/java/org/dspace/content/packager/ITDSpaceAIP.java index 33e353f457..a634b98130 100644 --- a/dspace-api/src/test/java/org/dspace/content/packager/ITDSpaceAIP.java +++ b/dspace-api/src/test/java/org/dspace/content/packager/ITDSpaceAIP.java @@ -194,7 +194,7 @@ public class ITDSpaceAIP extends AbstractIntegrationTest { ePersonService.update(context, submitter); context.setCurrentUser(submitter); - //Make our test ePerson an admin so he can perform deletes and restores + //Make our test ePerson an admin so it can perform deletes and restores GroupService groupService = EPersonServiceFactory.getInstance().getGroupService(); Group adminGroup = groupService.findByName(context, Group.ADMIN); groupService.addMember(context, adminGroup, submitter); diff --git a/dspace-api/src/test/java/org/dspace/content/service/ItemServiceTest.java b/dspace-api/src/test/java/org/dspace/content/service/ItemServiceTest.java new file mode 100644 index 0000000000..267d66ac2f --- /dev/null +++ b/dspace-api/src/test/java/org/dspace/content/service/ItemServiceTest.java @@ -0,0 +1,485 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.content.service; + +import static org.hamcrest.CoreMatchers.equalTo; +import static org.hamcrest.MatcherAssert.assertThat; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.fail; + +import java.sql.SQLException; +import java.util.Comparator; +import java.util.List; +import java.util.stream.Collectors; + +import org.apache.logging.log4j.Logger; +import org.dspace.AbstractIntegrationTestWithDatabase; +import org.dspace.authorize.AuthorizeException; +import org.dspace.builder.CollectionBuilder; +import org.dspace.builder.CommunityBuilder; +import org.dspace.builder.EntityTypeBuilder; +import org.dspace.builder.ItemBuilder; +import org.dspace.builder.RelationshipBuilder; +import org.dspace.builder.RelationshipTypeBuilder; +import org.dspace.content.Collection; +import org.dspace.content.Community; +import org.dspace.content.EntityType; +import org.dspace.content.Item; +import org.dspace.content.MetadataValue; +import org.dspace.content.Relationship; +import org.dspace.content.RelationshipType; +import org.dspace.content.WorkspaceItem; +import org.dspace.content.factory.ContentServiceFactory; +import org.dspace.versioning.Version; +import org.dspace.versioning.factory.VersionServiceFactory; +import org.dspace.versioning.service.VersioningService; +import org.junit.Before; +import org.junit.Test; + +public class ItemServiceTest extends AbstractIntegrationTestWithDatabase { + private static final Logger log = org.apache.logging.log4j.LogManager.getLogger(ItemServiceTest.class); + + protected RelationshipService relationshipService = ContentServiceFactory.getInstance().getRelationshipService(); + protected RelationshipTypeService relationshipTypeService = ContentServiceFactory.getInstance() + .getRelationshipTypeService(); + protected EntityTypeService entityTypeService = ContentServiceFactory.getInstance().getEntityTypeService(); + protected CommunityService communityService = ContentServiceFactory.getInstance().getCommunityService(); + protected CollectionService collectionService = ContentServiceFactory.getInstance().getCollectionService(); + protected ItemService itemService = ContentServiceFactory.getInstance().getItemService(); + protected InstallItemService installItemService = ContentServiceFactory.getInstance().getInstallItemService(); + protected WorkspaceItemService workspaceItemService = ContentServiceFactory.getInstance().getWorkspaceItemService(); + protected MetadataValueService metadataValueService = ContentServiceFactory.getInstance().getMetadataValueService(); + protected VersioningService versioningService = VersionServiceFactory.getInstance().getVersionService(); + + Community community; + Collection collection1; + + Item item; + + String authorQualifier = "author"; + String contributorElement = "contributor"; + String dcSchema = "dc"; + String subjectElement = "subject"; + String descriptionElement = "description"; + String abstractQualifier = "abstract"; + + /** + * This method will be run before every test as per @Before. It will + * initialize resources required for the tests. + */ + @Before + @Override + public void setUp() throws Exception { + super.setUp(); + try { + context.turnOffAuthorisationSystem(); + + community = CommunityBuilder.createCommunity(context) + .build(); + + collection1 = CollectionBuilder.createCollection(context, community) + .withEntityType("Publication") + .build(); + + WorkspaceItem is = workspaceItemService.create(context, collection1, false); + + item = installItemService.installItem(context, is); + + context.restoreAuthSystemState(); + } catch (AuthorizeException ex) { + log.error("Authorization Error in init", ex); + fail("Authorization Error in init: " + ex.getMessage()); + } catch (SQLException ex) { + log.error("SQL Error in init", ex); + fail("SQL Error in init: " + ex.getMessage()); + } + } + + @Test + public void InsertAndMoveMetadataShiftPlaceTest() throws Exception { + context.turnOffAuthorisationSystem(); + + // Here we add the first set of metadata to the item + itemService.addMetadata(context, item, dcSchema, contributorElement, authorQualifier, null, "test, one"); + itemService.addMetadata(context, item, dcSchema, contributorElement, authorQualifier, null, "test, two"); + itemService.addMetadata(context, item, dcSchema, contributorElement, authorQualifier, null, "test, three"); + + context.restoreAuthSystemState(); + + // The code below performs the mentioned assertions to ensure the place is correct + List list = itemService + .getMetadata(item, dcSchema, contributorElement, authorQualifier, Item.ANY); + assertThat(list.size(), equalTo(3)); + + assertMetadataValue(authorQualifier, contributorElement, dcSchema, "test, one", null, 0, list.get(0)); + assertMetadataValue(authorQualifier, contributorElement, dcSchema, "test, two", null, 1, list.get(1)); + assertMetadataValue(authorQualifier, contributorElement, dcSchema, "test, three", null, 2, list.get(2)); + + context.turnOffAuthorisationSystem(); + + // This is where we add metadata at place=1 + itemService.addAndShiftRightMetadata( + context, item, dcSchema, contributorElement, authorQualifier, null, "test, four", null, -1, 1 + ); + + // Here we retrieve the list of metadata again to perform the assertions on the places below as mentioned + list = itemService.getMetadata(item, dcSchema, contributorElement, authorQualifier, Item.ANY) + .stream() + .sorted(Comparator.comparingInt(MetadataValue::getPlace)) + .collect(Collectors.toList()); + assertThat(list.size(), equalTo(4)); + assertMetadataValue(authorQualifier, contributorElement, dcSchema, "test, one", null, 0, list.get(0)); + assertMetadataValue(authorQualifier, contributorElement, dcSchema, "test, four", null, 1, list.get(1)); + assertMetadataValue(authorQualifier, contributorElement, dcSchema, "test, two", null, 2, list.get(2)); + assertMetadataValue(authorQualifier, contributorElement, dcSchema, "test, three", null, 3, list.get(3)); + + // And move metadata from place=2 to place=0 + itemService.moveMetadata(context, item, dcSchema, contributorElement, authorQualifier, 2, 0); + + context.restoreAuthSystemState(); + + // Here we retrieve the list of metadata again to perform the assertions on the places below as mentioned + list = itemService.getMetadata(item, dcSchema, contributorElement, authorQualifier, Item.ANY) + .stream() + .sorted(Comparator.comparingInt(MetadataValue::getPlace)) + .collect(Collectors.toList()); + assertThat(list.size(), equalTo(4)); + assertMetadataValue(authorQualifier, contributorElement, dcSchema, "test, two", null, 0, list.get(0)); + assertMetadataValue(authorQualifier, contributorElement, dcSchema, "test, one", null, 1, list.get(1)); + assertMetadataValue(authorQualifier, contributorElement, dcSchema, "test, four", null, 2, list.get(2)); + assertMetadataValue(authorQualifier, contributorElement, dcSchema, "test, three", null, 3, list.get(3)); + } + + @Test + public void InsertAndMoveMetadataShiftPlaceTest_complex() throws Exception { + context.turnOffAuthorisationSystem(); + + // Here we add the first set of metadata to the item + itemService.addMetadata(context, item, dcSchema, contributorElement, authorQualifier, null, "test, one"); + + // NOTE: dc.subject should NOT affect dc.contributor.author + itemService.addMetadata(context, item, dcSchema, subjectElement, null, null, "test, sub1"); + // NOTE: dc.subject should NOT affect dc.contributor.author + itemService.addMetadata(context, item, dcSchema, subjectElement, null, null, "test, sub2"); + + itemService.addMetadata(context, item, dcSchema, contributorElement, authorQualifier, null, "test, two"); + itemService.addMetadata(context, item, dcSchema, contributorElement, authorQualifier, null, "test, three"); + + // NOTE: dc.description.abstract should NOT affect dc.contributor.author + itemService.addMetadata(context, item, dcSchema, descriptionElement, abstractQualifier, null, "test, abs1"); + + context.restoreAuthSystemState(); + + // The code below performs the mentioned assertions to ensure the place is correct + List list1 = itemService + .getMetadata(item, dcSchema, contributorElement, authorQualifier, Item.ANY); + assertThat(list1.size(), equalTo(3)); + + assertMetadataValue(authorQualifier, contributorElement, dcSchema, "test, one", null, 0, list1.get(0)); + assertMetadataValue(authorQualifier, contributorElement, dcSchema, "test, two", null, 1, list1.get(1)); + assertMetadataValue(authorQualifier, contributorElement, dcSchema, "test, three", null, 2, list1.get(2)); + + List list2 = itemService + .getMetadata(item, dcSchema, subjectElement, null, Item.ANY); + assertThat(list2.size(), equalTo(2)); + + assertMetadataValue(null, subjectElement, dcSchema, "test, sub1", null, 0, list2.get(0)); + assertMetadataValue(null, subjectElement, dcSchema, "test, sub2", null, 1, list2.get(1)); + + List list3 = itemService + .getMetadata(item, dcSchema, descriptionElement, abstractQualifier, Item.ANY); + assertThat(list3.size(), equalTo(1)); + + assertMetadataValue(abstractQualifier, descriptionElement, dcSchema, "test, abs1", null, 0, list3.get(0)); + + context.turnOffAuthorisationSystem(); + + // This is where we add metadata at place=1 + itemService.addAndShiftRightMetadata( + context, item, dcSchema, contributorElement, authorQualifier, null, "test, four", null, -1, 1 + ); + + // Here we retrieve the list of metadata again to perform the assertions on the places below as mentioned + List list4 = itemService + .getMetadata(item, dcSchema, contributorElement, authorQualifier, Item.ANY) + .stream() + .sorted(Comparator.comparingInt(MetadataValue::getPlace)) + .collect(Collectors.toList()); + assertThat(list4.size(), equalTo(4)); + assertMetadataValue(authorQualifier, contributorElement, dcSchema, "test, one", null, 0, list4.get(0)); + assertMetadataValue(authorQualifier, contributorElement, dcSchema, "test, four", null, 1, list4.get(1)); + assertMetadataValue(authorQualifier, contributorElement, dcSchema, "test, two", null, 2, list4.get(2)); + assertMetadataValue(authorQualifier, contributorElement, dcSchema, "test, three", null, 3, list4.get(3)); + + List list5 = itemService + .getMetadata(item, dcSchema, subjectElement, null, Item.ANY); + assertThat(list5.size(), equalTo(2)); + + assertMetadataValue(null, subjectElement, dcSchema, "test, sub1", null, 0, list5.get(0)); + assertMetadataValue(null, subjectElement, dcSchema, "test, sub2", null, 1, list5.get(1)); + + List list6 = itemService + .getMetadata(item, dcSchema, descriptionElement, abstractQualifier, Item.ANY); + assertThat(list3.size(), equalTo(1)); + + assertMetadataValue(abstractQualifier, descriptionElement, dcSchema, "test, abs1", null, 0, list6.get(0)); + + // And move metadata from place=2 to place=0 + itemService.moveMetadata(context, item, dcSchema, contributorElement, authorQualifier, 2, 0); + + context.restoreAuthSystemState(); + + // Here we retrieve the list of metadata again to perform the assertions on the places below as mentioned + List list7 = itemService + .getMetadata(item, dcSchema, contributorElement, authorQualifier, Item.ANY) + .stream() + .sorted(Comparator.comparingInt(MetadataValue::getPlace)) + .collect(Collectors.toList()); + assertThat(list7.size(), equalTo(4)); + assertMetadataValue(authorQualifier, contributorElement, dcSchema, "test, two", null, 0, list7.get(0)); + assertMetadataValue(authorQualifier, contributorElement, dcSchema, "test, one", null, 1, list7.get(1)); + assertMetadataValue(authorQualifier, contributorElement, dcSchema, "test, four", null, 2, list7.get(2)); + assertMetadataValue(authorQualifier, contributorElement, dcSchema, "test, three", null, 3, list7.get(3)); + + List list8 = itemService + .getMetadata(item, dcSchema, subjectElement, null, Item.ANY); + assertThat(list8.size(), equalTo(2)); + + assertMetadataValue(null, subjectElement, dcSchema, "test, sub1", null, 0, list8.get(0)); + assertMetadataValue(null, subjectElement, dcSchema, "test, sub2", null, 1, list8.get(1)); + + List list9 = itemService + .getMetadata(item, dcSchema, descriptionElement, abstractQualifier, Item.ANY); + assertThat(list9.size(), equalTo(1)); + + assertMetadataValue(abstractQualifier, descriptionElement, dcSchema, "test, abs1", null, 0, list9.get(0)); + } + + @Test + public void InsertAndMoveMetadataOnePlaceForwardTest() throws Exception { + context.turnOffAuthorisationSystem(); + + // Here we add the first set of metadata to the item + itemService.addMetadata(context, item, dcSchema, contributorElement, authorQualifier, null, "test, one"); + itemService.addMetadata(context, item, dcSchema, contributorElement, authorQualifier, null, "test, two"); + itemService.addMetadata(context, item, dcSchema, contributorElement, authorQualifier, null, "test, three"); + + context.restoreAuthSystemState(); + + // The code below performs the mentioned assertions to ensure the place is correct + List list = itemService + .getMetadata(item, dcSchema, contributorElement, authorQualifier, Item.ANY); + assertThat(list.size(), equalTo(3)); + + assertMetadataValue(authorQualifier, contributorElement, dcSchema, "test, one", null, 0, list.get(0)); + assertMetadataValue(authorQualifier, contributorElement, dcSchema, "test, two", null, 1, list.get(1)); + assertMetadataValue(authorQualifier, contributorElement, dcSchema, "test, three", null, 2, list.get(2)); + + context.turnOffAuthorisationSystem(); + + // This is where we add metadata at place=1 + itemService.addAndShiftRightMetadata( + context, item, dcSchema, contributorElement, authorQualifier, null, "test, four", null, -1, 1 + ); + + // Here we retrieve the list of metadata again to perform the assertions on the places below as mentioned + list = itemService.getMetadata(item, dcSchema, contributorElement, authorQualifier, Item.ANY) + .stream() + .sorted(Comparator.comparingInt(MetadataValue::getPlace)) + .collect(Collectors.toList()); + assertThat(list.size(), equalTo(4)); + assertMetadataValue(authorQualifier, contributorElement, dcSchema, "test, one", null, 0, list.get(0)); + assertMetadataValue(authorQualifier, contributorElement, dcSchema, "test, four", null, 1, list.get(1)); + assertMetadataValue(authorQualifier, contributorElement, dcSchema, "test, two", null, 2, list.get(2)); + assertMetadataValue(authorQualifier, contributorElement, dcSchema, "test, three", null, 3, list.get(3)); + + // And move metadata from place=1 to place=2 + itemService.moveMetadata(context, item, dcSchema, contributorElement, authorQualifier, 1, 2); + + context.restoreAuthSystemState(); + + // Here we retrieve the list of metadata again to perform the assertions on the places below as mentioned + list = itemService.getMetadata(item, dcSchema, contributorElement, authorQualifier, Item.ANY) + .stream() + .sorted(Comparator.comparingInt(MetadataValue::getPlace)) + .collect(Collectors.toList()); + assertThat(list.size(), equalTo(4)); + assertMetadataValue(authorQualifier, contributorElement, dcSchema, "test, one", null, 0, list.get(0)); + assertMetadataValue(authorQualifier, contributorElement, dcSchema, "test, two", null, 1, list.get(1)); + assertMetadataValue(authorQualifier, contributorElement, dcSchema, "test, four", null, 2, list.get(2)); + assertMetadataValue(authorQualifier, contributorElement, dcSchema, "test, three", null, 3, list.get(3)); + } + + @Test + public void InsertAndMoveMetadataOnePlaceForwardTest_complex() throws Exception { + context.turnOffAuthorisationSystem(); + + // NOTE: dc.description.abstract should NOT affect dc.contributor.author + itemService.addMetadata(context, item, dcSchema, descriptionElement, abstractQualifier, null, "test, abs1"); + + // Here we add the first set of metadata to the item + itemService.addMetadata(context, item, dcSchema, contributorElement, authorQualifier, null, "test, one"); + + // NOTE: dc.subject should NOT affect dc.contributor.author + itemService.addMetadata(context, item, dcSchema, subjectElement, null, null, "test, sub1"); + + itemService.addMetadata(context, item, dcSchema, contributorElement, authorQualifier, null, "test, two"); + itemService.addMetadata(context, item, dcSchema, contributorElement, authorQualifier, null, "test, three"); + + // NOTE: dc.subject should NOT affect dc.contributor.author + itemService.addMetadata(context, item, dcSchema, subjectElement, null, null, "test, sub2"); + + context.restoreAuthSystemState(); + + // The code below performs the mentioned assertions to ensure the place is correct + List list1 = itemService + .getMetadata(item, dcSchema, contributorElement, authorQualifier, Item.ANY); + assertThat(list1.size(), equalTo(3)); + + assertMetadataValue(authorQualifier, contributorElement, dcSchema, "test, one", null, 0, list1.get(0)); + assertMetadataValue(authorQualifier, contributorElement, dcSchema, "test, two", null, 1, list1.get(1)); + assertMetadataValue(authorQualifier, contributorElement, dcSchema, "test, three", null, 2, list1.get(2)); + + List list2 = itemService + .getMetadata(item, dcSchema, subjectElement, null, Item.ANY); + assertThat(list2.size(), equalTo(2)); + + assertMetadataValue(null, subjectElement, dcSchema, "test, sub1", null, 0, list2.get(0)); + assertMetadataValue(null, subjectElement, dcSchema, "test, sub2", null, 1, list2.get(1)); + + List list3 = itemService + .getMetadata(item, dcSchema, descriptionElement, abstractQualifier, Item.ANY); + assertThat(list3.size(), equalTo(1)); + + assertMetadataValue(abstractQualifier, descriptionElement, dcSchema, "test, abs1", null, 0, list3.get(0)); + + context.turnOffAuthorisationSystem(); + + // This is where we add metadata at place=1 + itemService.addAndShiftRightMetadata( + context, item, dcSchema, contributorElement, authorQualifier, null, "test, four", null, -1, 1 + ); + + // Here we retrieve the list of metadata again to perform the assertions on the places below as mentioned + List list4 = itemService + .getMetadata(item, dcSchema, contributorElement, authorQualifier, Item.ANY) + .stream() + .sorted(Comparator.comparingInt(MetadataValue::getPlace)) + .collect(Collectors.toList()); + assertThat(list4.size(), equalTo(4)); + assertMetadataValue(authorQualifier, contributorElement, dcSchema, "test, one", null, 0, list4.get(0)); + assertMetadataValue(authorQualifier, contributorElement, dcSchema, "test, four", null, 1, list4.get(1)); + assertMetadataValue(authorQualifier, contributorElement, dcSchema, "test, two", null, 2, list4.get(2)); + assertMetadataValue(authorQualifier, contributorElement, dcSchema, "test, three", null, 3, list4.get(3)); + + List list5 = itemService + .getMetadata(item, dcSchema, subjectElement, null, Item.ANY); + assertThat(list5.size(), equalTo(2)); + + assertMetadataValue(null, subjectElement, dcSchema, "test, sub1", null, 0, list5.get(0)); + assertMetadataValue(null, subjectElement, dcSchema, "test, sub2", null, 1, list5.get(1)); + + List list6 = itemService + .getMetadata(item, dcSchema, descriptionElement, abstractQualifier, Item.ANY); + assertThat(list6.size(), equalTo(1)); + + assertMetadataValue(abstractQualifier, descriptionElement, dcSchema, "test, abs1", null, 0, list6.get(0)); + + // And move metadata from place=1 to place=2 + itemService.moveMetadata(context, item, dcSchema, contributorElement, authorQualifier, 1, 2); + + context.restoreAuthSystemState(); + + // Here we retrieve the list of metadata again to perform the assertions on the places below as mentioned + List list7 = itemService + .getMetadata(item, dcSchema, contributorElement, authorQualifier, Item.ANY) + .stream() + .sorted(Comparator.comparingInt(MetadataValue::getPlace)) + .collect(Collectors.toList()); + assertThat(list7.size(), equalTo(4)); + assertMetadataValue(authorQualifier, contributorElement, dcSchema, "test, one", null, 0, list7.get(0)); + assertMetadataValue(authorQualifier, contributorElement, dcSchema, "test, two", null, 1, list7.get(1)); + assertMetadataValue(authorQualifier, contributorElement, dcSchema, "test, four", null, 2, list7.get(2)); + assertMetadataValue(authorQualifier, contributorElement, dcSchema, "test, three", null, 3, list7.get(3)); + + List list8 = itemService + .getMetadata(item, dcSchema, subjectElement, null, Item.ANY); + assertThat(list8.size(), equalTo(2)); + + assertMetadataValue(null, subjectElement, dcSchema, "test, sub1", null, 0, list8.get(0)); + assertMetadataValue(null, subjectElement, dcSchema, "test, sub2", null, 1, list8.get(1)); + + List list9 = itemService + .getMetadata(item, dcSchema, descriptionElement, abstractQualifier, Item.ANY); + assertThat(list9.size(), equalTo(1)); + + assertMetadataValue(abstractQualifier, descriptionElement, dcSchema, "test, abs1", null, 0, list9.get(0)); + } + + @Test + public void testDeleteItemWithMultipleVersions() throws Exception { + context.turnOffAuthorisationSystem(); + + EntityType publicationEntityType = EntityTypeBuilder.createEntityTypeBuilder(context, "Publication") + .build(); + + EntityType personEntityType = EntityTypeBuilder.createEntityTypeBuilder(context, "Person") + .build(); + + RelationshipType isAuthorOfPublication = RelationshipTypeBuilder.createRelationshipTypeBuilder( + context, publicationEntityType, personEntityType, "isAuthorOfPublication", "isPublicationOfAuthor", + null, null, null, null + ) + .withCopyToLeft(false) + .withCopyToRight(false) + .build(); + + Collection collection2 = CollectionBuilder.createCollection(context, community) + .withEntityType("Person") + .build(); + + Item publication1 = ItemBuilder.createItem(context, collection1) + .withTitle("publication 1") + // NOTE: entity type comes from collection + .build(); + + Item person1 = ItemBuilder.createItem(context, collection2) + .withTitle("person 2") + // NOTE: entity type comes from collection + .build(); + + RelationshipBuilder.createRelationshipBuilder(context, publication1, person1, isAuthorOfPublication); + + // create a new version, which results in a non-latest relationship attached person 1. + Version newVersion = versioningService.createNewVersion(context, publication1); + Item newPublication1 = newVersion.getItem(); + WorkspaceItem newPublication1WSI = workspaceItemService.findByItem(context, newPublication1); + installItemService.installItem(context, newPublication1WSI); + context.dispatchEvents(); + + // verify person1 has a non-latest relationship, which should also be removed + List relationships1 = relationshipService.findByItem(context, person1, -1, -1, false, true); + assertEquals(1, relationships1.size()); + List relationships2 = relationshipService.findByItem(context, person1, -1, -1, false, false); + assertEquals(2, relationships2.size()); + + itemService.delete(context, person1); + + context.restoreAuthSystemState(); + } + + private void assertMetadataValue(String authorQualifier, String contributorElement, String dcSchema, String value, + String authority, int place, MetadataValue metadataValue) { + assertThat(metadataValue.getValue(), equalTo(value)); + assertThat(metadataValue.getMetadataField().getMetadataSchema().getName(), equalTo(dcSchema)); + assertThat(metadataValue.getMetadataField().getElement(), equalTo(contributorElement)); + assertThat(metadataValue.getMetadataField().getQualifier(), equalTo(authorQualifier)); + assertThat(metadataValue.getAuthority(), equalTo(authority)); + assertThat(metadataValue.getPlace(), equalTo(place)); + } +} diff --git a/dspace-api/src/test/java/org/dspace/core/ContextTest.java b/dspace-api/src/test/java/org/dspace/core/ContextTest.java index 811582c569..c6cd849d21 100644 --- a/dspace-api/src/test/java/org/dspace/core/ContextTest.java +++ b/dspace-api/src/test/java/org/dspace/core/ContextTest.java @@ -8,6 +8,7 @@ package org.dspace.core; import static org.hamcrest.CoreMatchers.equalTo; +import static org.hamcrest.CoreMatchers.hasItems; import static org.hamcrest.CoreMatchers.notNullValue; import static org.hamcrest.CoreMatchers.nullValue; import static org.hamcrest.MatcherAssert.assertThat; @@ -511,9 +512,8 @@ public class ContextTest extends AbstractUnitTest { // Now get our special groups List specialGroups = instance.getSpecialGroups(); - assertThat("testGetSpecialGroup 0", specialGroups.size(), equalTo(2)); - assertThat("testGetSpecialGroup 1", specialGroups.get(0), equalTo(group)); - assertThat("testGetSpecialGroup 1", specialGroups.get(1), equalTo(adminGroup)); + assertThat("testGetSpecialGroup size", specialGroups.size(), equalTo(2)); + assertThat("testGetSpecialGroup content", specialGroups, hasItems(group, adminGroup)); // Cleanup our context & group groupService.delete(instance, group); diff --git a/dspace-api/src/test/java/org/dspace/ctask/general/CreateMissingIdentifiersIT.java b/dspace-api/src/test/java/org/dspace/ctask/general/CreateMissingIdentifiersIT.java new file mode 100644 index 0000000000..2a07799dee --- /dev/null +++ b/dspace-api/src/test/java/org/dspace/ctask/general/CreateMissingIdentifiersIT.java @@ -0,0 +1,89 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.ctask.general; + +import static org.junit.Assert.assertEquals; + +import java.io.IOException; + +import org.dspace.AbstractIntegrationTestWithDatabase; +import org.dspace.builder.CollectionBuilder; +import org.dspace.builder.CommunityBuilder; +import org.dspace.builder.ItemBuilder; +import org.dspace.content.Collection; +import org.dspace.content.Item; +import org.dspace.core.factory.CoreServiceFactory; +import org.dspace.curate.Curator; +import org.dspace.identifier.VersionedHandleIdentifierProviderWithCanonicalHandles; +import org.dspace.services.ConfigurationService; +import org.dspace.services.factory.DSpaceServicesFactory; +import org.junit.After; +import org.junit.Test; + +/** + * Rudimentary test of the curation task. + * + * @author mwood + */ +public class CreateMissingIdentifiersIT + extends AbstractIntegrationTestWithDatabase { + private static final String P_TASK_DEF + = "plugin.named.org.dspace.curate.CurationTask"; + private static final String TASK_NAME = "test"; + + @Test + public void testPerform() + throws IOException { + // Must remove any cached named plugins before creating a new one + CoreServiceFactory.getInstance().getPluginService().clearNamedPluginClasses(); + ConfigurationService configurationService = kernelImpl.getConfigurationService(); + // Define a new task dynamically + configurationService.setProperty(P_TASK_DEF, + CreateMissingIdentifiers.class.getCanonicalName() + " = " + TASK_NAME); + + Curator curator = new Curator(); + curator.addTask(TASK_NAME); + + context.setCurrentUser(admin); + parentCommunity = CommunityBuilder.createCommunity(context) + .build(); + Collection collection = CollectionBuilder.createCollection(context, parentCommunity) + .build(); + Item item = ItemBuilder.createItem(context, collection) + .build(); + + /* + * Curate with regular test configuration -- should succeed. + */ + curator.curate(context, item); + int status = curator.getStatus(TASK_NAME); + assertEquals("Curation should succeed", Curator.CURATE_SUCCESS, status); + + /* + * Now install an incompatible provider to make the task fail. + */ + DSpaceServicesFactory.getInstance() + .getServiceManager() + .registerServiceClass( + VersionedHandleIdentifierProviderWithCanonicalHandles.class.getCanonicalName(), + VersionedHandleIdentifierProviderWithCanonicalHandles.class); + + curator.curate(context, item); + System.out.format("With incompatible provider, result is '%s'.\n", + curator.getResult(TASK_NAME)); + assertEquals("Curation should fail", Curator.CURATE_ERROR, + curator.getStatus(TASK_NAME)); + } + + @Override + @After + public void destroy() throws Exception { + super.destroy(); + DSpaceServicesFactory.getInstance().getServiceManager().getApplicationContext().refresh(); + } +} diff --git a/dspace-api/src/test/java/org/dspace/discovery/DiscoveryIT.java b/dspace-api/src/test/java/org/dspace/discovery/DiscoveryIT.java index 9504d01393..0d1cc13106 100644 --- a/dspace-api/src/test/java/org/dspace/discovery/DiscoveryIT.java +++ b/dspace-api/src/test/java/org/dspace/discovery/DiscoveryIT.java @@ -8,13 +8,18 @@ package org.dspace.discovery; import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertTrue; import java.io.IOException; import java.sql.SQLException; +import java.util.ArrayList; +import java.util.Iterator; import java.util.List; import javax.servlet.http.HttpServletRequest; import org.dspace.AbstractIntegrationTestWithDatabase; +import org.dspace.app.launcher.ScriptLauncher; +import org.dspace.app.scripts.handler.impl.TestDSpaceRunnableHandler; import org.dspace.authorize.AuthorizeException; import org.dspace.builder.ClaimedTaskBuilder; import org.dspace.builder.CollectionBuilder; @@ -55,6 +60,7 @@ import org.dspace.xmlworkflow.storedcomponents.ClaimedTask; import org.dspace.xmlworkflow.storedcomponents.PoolTask; import org.dspace.xmlworkflow.storedcomponents.XmlWorkflowItem; import org.dspace.xmlworkflow.storedcomponents.service.ClaimedTaskService; +import org.junit.Before; import org.junit.Test; import org.springframework.mock.web.MockHttpServletRequest; @@ -64,7 +70,7 @@ import org.springframework.mock.web.MockHttpServletRequest; public class DiscoveryIT extends AbstractIntegrationTestWithDatabase { protected WorkspaceItemService workspaceItemService = ContentServiceFactory.getInstance().getWorkspaceItemService(); - protected SearchService searchService = SearchUtils.getSearchService(); + protected SearchService searchService; XmlWorkflowService workflowService = XmlWorkflowServiceFactory.getInstance().getXmlWorkflowService(); @@ -86,6 +92,14 @@ public class DiscoveryIT extends AbstractIntegrationTestWithDatabase { MetadataAuthorityService metadataAuthorityService = ContentAuthorityServiceFactory.getInstance() .getMetadataAuthorityService(); + @Override + @Before + public void setUp() throws Exception { + super.setUp(); + configurationService.setProperty("solr-database-resync.time-until-reindex", 1); + searchService = SearchUtils.getSearchService(); + } + @Test public void solrRecordsAfterDepositOrDeletionOfWorkspaceItemTest() throws Exception { context.turnOffAuthorisationSystem(); @@ -371,7 +385,8 @@ public class DiscoveryIT extends AbstractIntegrationTestWithDatabase { collectionService.delete(context, col1); context.restoreAuthSystemState(); assertSearchQuery(IndexableCollection.TYPE, 2); - assertSearchQuery(IndexableItem.TYPE, 2); + // Deleted item contained within totalFound due to predb status (SolrDatabaseResyncCli takes care of this) + assertSearchQuery(IndexableItem.TYPE, 2, 3, 0, -1); } @Test @@ -453,6 +468,10 @@ public class DiscoveryIT extends AbstractIntegrationTestWithDatabase { assertSearchQuery(IndexableCollection.TYPE, 2, 2, 0, -1); // check Item type with start=0 and limit=2, we expect: indexableObjects=2, totalFound=6 assertSearchQuery(IndexableItem.TYPE, 2, 6, 0, 2); + + // Run SolrDatabaseResyncCli, updating items with "preDB" status and removing stale items + performSolrDatabaseResyncScript(); + // check Item type with start=2 and limit=4, we expect: indexableObjects=1, totalFound=3 assertSearchQuery(IndexableItem.TYPE, 1, 3, 2, 4); // check Item type with start=0 and limit=default, we expect: indexableObjects=3, totalFound=3 @@ -639,16 +658,85 @@ public class DiscoveryIT extends AbstractIntegrationTestWithDatabase { // check Item type with start=0 and limit=default, // we expect: indexableObjects=3, totalFound=6 (3 stale objects here) assertSearchQuery(IndexableItem.TYPE, 3, 6, 0, -1); - // as the previous query hit the stale objects running a new query should lead to a clean situation + + // Run SolrDatabaseResyncCli, updating items with "preDB" status and removing stale items + performSolrDatabaseResyncScript(); + + // as SolrDatabaseResyncCli removed the stale objects, running a new query should lead to a clean situation assertSearchQuery(IndexableItem.TYPE, 3, 3, 0, -1); } + @Test + public void iteratorSearchServiceTest() throws SearchServiceException { + String subject1 = "subject1"; + String subject2 = "subject2"; + int numberItemsSubject1 = 30; + int numberItemsSubject2 = 2; + Item[] itemsSubject1 = new Item[numberItemsSubject1]; + Item[] itemsSubject2 = new Item[numberItemsSubject2]; + context.turnOffAuthorisationSystem(); + Community community = CommunityBuilder.createCommunity(context).build(); + Collection collection = CollectionBuilder.createCollection(context, community).build(); + for (int i = 0; i < numberItemsSubject1; i++) { + itemsSubject1[i] = ItemBuilder.createItem(context, collection) + .withTitle("item subject 1 number" + i) + .withSubject(subject1) + .build(); + } + + for (int i = 0; i < numberItemsSubject2; i++) { + itemsSubject2[i] = ItemBuilder.createItem(context, collection) + .withTitle("item subject 2 number " + i) + .withSubject(subject2) + .build(); + } + + Collection collection2 = CollectionBuilder.createCollection(context, community).build(); + ItemBuilder.createItem(context, collection2) + .withTitle("item collection2") + .withSubject(subject1) + .build(); + context.restoreAuthSystemState(); + + + DiscoverQuery discoverQuery = new DiscoverQuery(); + discoverQuery.addFilterQueries("subject:" + subject1); + + Iterator itemIterator = + searchService.iteratorSearch(context, new IndexableCollection(collection), discoverQuery); + int counter = 0; + List foundItems = new ArrayList<>(); + while (itemIterator.hasNext()) { + foundItems.add(itemIterator.next()); + counter++; + } + for (Item item : itemsSubject1) { + assertTrue(foundItems.contains(item)); + } + assertEquals(numberItemsSubject1, counter); + + discoverQuery = new DiscoverQuery(); + discoverQuery.addFilterQueries("subject:" + subject2); + + itemIterator = searchService.iteratorSearch(context, null, discoverQuery); + counter = 0; + foundItems = new ArrayList<>(); + while (itemIterator.hasNext()) { + foundItems.add(itemIterator.next()); + counter++; + } + assertEquals(numberItemsSubject2, counter); + for (Item item : itemsSubject2) { + assertTrue(foundItems.contains(item)); + } + } + private void assertSearchQuery(String resourceType, int size) throws SearchServiceException { assertSearchQuery(resourceType, size, size, 0, -1); } private void assertSearchQuery(String resourceType, int size, int totalFound, int start, int limit) - throws SearchServiceException { + throws SearchServiceException { DiscoverQuery discoverQuery = new DiscoverQuery(); discoverQuery.setQuery("*:*"); discoverQuery.setStart(start); @@ -739,6 +827,13 @@ public class DiscoveryIT extends AbstractIntegrationTestWithDatabase { context.setCurrentUser(previousUser); } + public void performSolrDatabaseResyncScript() throws Exception { + String[] args = new String[] {"solr-database-resync"}; + TestDSpaceRunnableHandler testDSpaceRunnableHandler = new TestDSpaceRunnableHandler(); + ScriptLauncher + .handleScript(args, ScriptLauncher.getConfig(kernelImpl), testDSpaceRunnableHandler, kernelImpl); + } + private void abort(XmlWorkflowItem workflowItem) throws SQLException, AuthorizeException, IOException, SearchServiceException { final EPerson previousUser = context.getCurrentUser(); diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/utils/DiscoverQueryBuilderTest.java b/dspace-api/src/test/java/org/dspace/discovery/utils/DiscoverQueryBuilderTest.java similarity index 79% rename from dspace-server-webapp/src/test/java/org/dspace/app/rest/utils/DiscoverQueryBuilderTest.java rename to dspace-api/src/test/java/org/dspace/discovery/utils/DiscoverQueryBuilderTest.java index 9a8f07e76a..07652e8c0c 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/utils/DiscoverQueryBuilderTest.java +++ b/dspace-api/src/test/java/org/dspace/discovery/utils/DiscoverQueryBuilderTest.java @@ -5,7 +5,7 @@ * * http://www.dspace.org/license/ */ -package org.dspace.app.rest.utils; +package org.dspace.discovery.utils; import static java.util.Collections.emptyList; import static org.dspace.discovery.configuration.DiscoveryConfigurationParameters.SORT.COUNT; @@ -16,10 +16,10 @@ import static org.hamcrest.Matchers.allOf; import static org.hamcrest.Matchers.contains; import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.empty; -import static org.hamcrest.Matchers.emptyOrNullString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.isEmptyOrNullString; import static org.junit.Assert.assertThat; import static org.mockito.ArgumentMatchers.any; import static org.mockito.ArgumentMatchers.anyInt; @@ -35,9 +35,6 @@ import java.util.LinkedList; import java.util.List; import java.util.function.Function; -import org.dspace.app.rest.exception.DSpaceBadRequestException; -import org.dspace.app.rest.exception.InvalidSearchRequestException; -import org.dspace.app.rest.parameter.SearchFilter; import org.dspace.core.Context; import org.dspace.discovery.DiscoverFacetField; import org.dspace.discovery.DiscoverFilterQuery; @@ -45,6 +42,7 @@ import org.dspace.discovery.DiscoverHitHighlightingField; import org.dspace.discovery.DiscoverQuery; import org.dspace.discovery.FacetYearRange; import org.dspace.discovery.IndexableObject; +import org.dspace.discovery.SearchServiceException; import org.dspace.discovery.SolrServiceImpl; import org.dspace.discovery.configuration.DiscoveryConfiguration; import org.dspace.discovery.configuration.DiscoveryConfigurationParameters; @@ -56,6 +54,7 @@ import org.dspace.discovery.configuration.DiscoverySortFieldConfiguration; import org.dspace.discovery.configuration.HierarchicalSidebarFacetConfiguration; import org.dspace.discovery.indexobject.IndexableItem; import org.dspace.discovery.indexobject.factory.IndexFactory; +import org.dspace.discovery.utils.parameter.QueryBuilderSearchFilter; import org.dspace.services.ConfigurationService; import org.hamcrest.FeatureMatcher; import org.hamcrest.Matcher; @@ -65,8 +64,7 @@ import org.junit.runner.RunWith; import org.mockito.InjectMocks; import org.mockito.Mock; import org.mockito.junit.MockitoJUnitRunner; -import org.springframework.data.domain.PageRequest; -import org.springframework.data.domain.Sort; + /** * Unit tests for {@link DiscoverQueryBuilder} @@ -94,8 +92,14 @@ public class DiscoverQueryBuilderTest { private DiscoveryConfiguration discoveryConfiguration; private String query; - private SearchFilter searchFilter; - private PageRequest page; + + private int pageSize = 10; + private long offset = 10; + private String sortProperty = "dc.title"; + private String sortDirection = "ASC"; + + private QueryBuilderSearchFilter searchFilter; + @Before public void setUp() throws Exception { @@ -106,33 +110,35 @@ public class DiscoverQueryBuilderTest { when(configurationService.getIntProperty(eq("rest.search.max.results"), anyInt())).thenReturn(100); when(searchService.toSortFieldIndex(any(String.class), any(String.class))) - .then(invocation -> invocation.getArguments()[0] + "_sort"); + .then(invocation -> invocation.getArguments()[0] + "_sort"); when(searchService - .getFacetYearRange(eq(context), nullable(IndexableObject.class), any(DiscoverySearchFilterFacet.class), - any(), any(DiscoverQuery.class))) - .then(invocation -> new FacetYearRange((DiscoverySearchFilterFacet) invocation.getArguments()[2])); + .getFacetYearRange(eq(context), nullable(IndexableObject.class), + any(DiscoverySearchFilterFacet.class), + any(), any(DiscoverQuery.class))) + .then(invocation -> new FacetYearRange((DiscoverySearchFilterFacet) invocation.getArguments()[2])); when(searchService.toFilterQuery(any(Context.class), any(String.class), any(String.class), any(String.class), - any(DiscoveryConfiguration.class))) - .then(invocation -> new DiscoverFilterQuery((String) invocation.getArguments()[1], - invocation.getArguments()[1] + ":\"" + invocation.getArguments()[3] + "\"", - (String) invocation.getArguments()[3])); + any(DiscoveryConfiguration.class))) + .then(invocation -> new DiscoverFilterQuery((String) invocation.getArguments()[1], + invocation.getArguments()[1] + ":\"" + invocation + .getArguments()[3] + "\"", + (String) invocation.getArguments()[3])); discoveryConfiguration = new DiscoveryConfiguration(); discoveryConfiguration.setDefaultFilterQueries(Arrays.asList("archived:true")); DiscoveryHitHighlightingConfiguration discoveryHitHighlightingConfiguration = - new DiscoveryHitHighlightingConfiguration(); + new DiscoveryHitHighlightingConfiguration(); List discoveryHitHighlightFieldConfigurations = new LinkedList<>(); DiscoveryHitHighlightFieldConfiguration discoveryHitHighlightFieldConfiguration = - new DiscoveryHitHighlightFieldConfiguration(); + new DiscoveryHitHighlightFieldConfiguration(); discoveryHitHighlightFieldConfiguration.setField("dc.title"); DiscoveryHitHighlightFieldConfiguration discoveryHitHighlightFieldConfiguration1 = - new DiscoveryHitHighlightFieldConfiguration(); + new DiscoveryHitHighlightFieldConfiguration(); discoveryHitHighlightFieldConfiguration1.setField("fulltext"); discoveryHitHighlightFieldConfigurations.add(discoveryHitHighlightFieldConfiguration1); @@ -177,9 +183,8 @@ public class DiscoverQueryBuilderTest { discoveryConfiguration.setSidebarFacets(Arrays.asList(subjectFacet, dateFacet, hierarchyFacet)); discoveryConfiguration.setSearchFilters(Arrays.asList(subjectFacet, dateFacet, hierarchyFacet)); + searchFilter = new QueryBuilderSearchFilter("subject", "equals", "Java"); query = "my test case"; - searchFilter = new SearchFilter("subject", "equals", "Java"); - page = PageRequest.of(1, 10, Sort.Direction.ASC, "dc.title"); queryBuilder.afterPropertiesSet(); } @@ -188,7 +193,8 @@ public class DiscoverQueryBuilderTest { public void testBuildQuery() throws Exception { DiscoverQuery discoverQuery = queryBuilder - .buildQuery(context, scope, discoveryConfiguration, query, Arrays.asList(searchFilter), "item", page); + .buildQuery(context, scope, discoveryConfiguration, query, Collections.singletonList(searchFilter), + "item", pageSize, offset, sortProperty, sortDirection); assertThat(discoverQuery.getFilterQueries(), containsInAnyOrder("archived:true", "subject:\"Java\"")); assertThat(discoverQuery.getQuery(), is(query)); @@ -214,10 +220,11 @@ public class DiscoverQueryBuilderTest { @Test public void testBuildQueryDefaults() throws Exception { DiscoverQuery discoverQuery = - queryBuilder.buildQuery(context, null, discoveryConfiguration, null, null, emptyList(), null); + queryBuilder.buildQuery(context, null, discoveryConfiguration, null, null, emptyList(), null, null, + null, null); assertThat(discoverQuery.getFilterQueries(), containsInAnyOrder("archived:true")); - assertThat(discoverQuery.getQuery(), is(emptyOrNullString())); + assertThat(discoverQuery.getQuery(), isEmptyOrNullString()); assertThat(discoverQuery.getDSpaceObjectFilters(), is(empty())); //Note this should actually be "dc.date.accessioned_dt" but remember that our searchService is just a stupid // mock @@ -241,13 +248,12 @@ public class DiscoverQueryBuilderTest { @Test public void testSortByScore() throws Exception { - page = PageRequest.of(2, 10, Sort.Direction.ASC, "SCORE"); - DiscoverQuery discoverQuery = - queryBuilder.buildQuery(context, null, discoveryConfiguration, null, null, emptyList(), page); + queryBuilder.buildQuery(context, null, discoveryConfiguration, null, null, emptyList(), 10, 20L, + "SCORE", "ASC"); assertThat(discoverQuery.getFilterQueries(), containsInAnyOrder("archived:true")); - assertThat(discoverQuery.getQuery(), is(emptyOrNullString())); + assertThat(discoverQuery.getQuery(), isEmptyOrNullString()); assertThat(discoverQuery.getDSpaceObjectFilters(), is(empty())); //Note this should actually be "dc.date.accessioned_dt" but remember that our searchService is just a stupid // mock @@ -269,48 +275,50 @@ public class DiscoverQueryBuilderTest { )); } - @Test(expected = DSpaceBadRequestException.class) + @Test(expected = IllegalArgumentException.class) public void testInvalidDSOType() throws Exception { queryBuilder - .buildQuery(context, scope, discoveryConfiguration, query, Arrays.asList(searchFilter), "TEST", page); + .buildQuery(context, scope, discoveryConfiguration, query, Collections.singletonList(searchFilter), + "TEST", pageSize, offset, sortProperty, sortDirection); } - @Test(expected = InvalidSearchRequestException.class) + @Test(expected = SearchServiceException.class) public void testInvalidSortField() throws Exception { - page = PageRequest.of(2, 10, Sort.Direction.ASC, "test"); queryBuilder - .buildQuery(context, scope, discoveryConfiguration, query, Arrays.asList(searchFilter), "ITEM", page); + .buildQuery(context, scope, discoveryConfiguration, query, Collections.singletonList(searchFilter), + "ITEM", pageSize, 20L, "test", sortDirection); } - @Test(expected = DSpaceBadRequestException.class) + @Test(expected = IllegalArgumentException.class) public void testInvalidSearchFilter1() throws Exception { - searchFilter = new SearchFilter("test", "equals", "Smith, Donald"); + searchFilter = new QueryBuilderSearchFilter("test", "equals", "Smith, Donald"); queryBuilder - .buildQuery(context, scope, discoveryConfiguration, query, Arrays.asList(searchFilter), "ITEM", page); + .buildQuery(context, scope, discoveryConfiguration, query, Arrays.asList(searchFilter), "ITEM", + pageSize, offset, sortProperty, sortDirection); } - @Test(expected = DSpaceBadRequestException.class) + @Test(expected = IllegalArgumentException.class) public void testInvalidSearchFilter2() throws Exception { when(searchService.toFilterQuery(any(Context.class), any(String.class), any(String.class), any(String.class), - any(DiscoveryConfiguration.class))) - .thenThrow(SQLException.class); + any(DiscoveryConfiguration.class))) + .thenThrow(SQLException.class); queryBuilder - .buildQuery(context, scope, discoveryConfiguration, query, Arrays.asList(searchFilter), "ITEM", page); + .buildQuery(context, scope, discoveryConfiguration, query, Arrays.asList(searchFilter), "ITEM", + pageSize, offset, sortProperty, sortDirection); } @Test public void testBuildFacetQuery() throws Exception { - DiscoverQuery discoverQuery = queryBuilder.buildFacetQuery(context, scope, discoveryConfiguration, - "prefix", query, - Arrays.asList(searchFilter), "item", page, - "subject"); + DiscoverQuery discoverQuery = queryBuilder.buildFacetQuery(context, scope, discoveryConfiguration, "prefix", + query, Collections.singletonList(searchFilter), + "item", pageSize, offset, "subject"); assertThat(discoverQuery.getFilterQueries(), containsInAnyOrder("archived:true", "subject:\"Java\"")); assertThat(discoverQuery.getQuery(), is(query)); assertThat(discoverQuery.getDSpaceObjectFilters(), contains(IndexableItem.TYPE)); - assertThat(discoverQuery.getSortField(), is(emptyOrNullString())); + assertThat(discoverQuery.getSortField(), isEmptyOrNullString()); assertThat(discoverQuery.getMaxResults(), is(0)); assertThat(discoverQuery.getStart(), is(0)); assertThat(discoverQuery.getFacetMinCount(), is(1)); @@ -321,10 +329,10 @@ public class DiscoverQueryBuilderTest { )); } - @Test(expected = DSpaceBadRequestException.class) + @Test(expected = IllegalArgumentException.class) public void testInvalidSearchFacet() throws Exception { queryBuilder.buildFacetQuery(context, scope, discoveryConfiguration, null, query, - Arrays.asList(searchFilter), "item", page, "test"); + Collections.singletonList(searchFilter), "item", pageSize, offset, "test"); } public Matcher discoverFacetFieldMatcher(DiscoverFacetField expected) { diff --git a/dspace-api/src/test/java/org/dspace/external/provider/impl/MockPubmedImportMetadataSourceServiceImpl.java b/dspace-api/src/test/java/org/dspace/external/provider/impl/MockPubmedImportMetadataSourceServiceImpl.java deleted file mode 100644 index 1a88c1e55b..0000000000 --- a/dspace-api/src/test/java/org/dspace/external/provider/impl/MockPubmedImportMetadataSourceServiceImpl.java +++ /dev/null @@ -1,87 +0,0 @@ -/** - * The contents of this file are subject to the license and copyright - * detailed in the LICENSE and NOTICE files at the root of the source - * tree and available online at - * - * http://www.dspace.org/license/ - */ -package org.dspace.external.provider.impl; - -import static org.mockito.Mockito.when; - -import java.io.IOException; -import java.io.InputStream; -import java.io.InputStreamReader; -import java.io.Reader; -import java.io.UncheckedIOException; -import java.nio.charset.StandardCharsets; -import javax.ws.rs.client.Invocation; -import javax.ws.rs.client.WebTarget; -import javax.ws.rs.core.MediaType; -import javax.ws.rs.core.Response; - -import org.dspace.importer.external.pubmed.service.PubmedImportMetadataSourceServiceImpl; -import org.mockito.ArgumentCaptor; -import org.mockito.ArgumentMatchers; -import org.mockito.Mockito; -import org.mockito.invocation.InvocationOnMock; -import org.mockito.stubbing.Answer; -import org.springframework.util.FileCopyUtils; - -/** - * we override the init method to mock the rest call to pubmed the following - * mock definitions will allow to answer to efetch or esearch requests using the - * test resource files (pubmed-esearch.fcgi.xml or pubmed-efetch.fcgi.xml) - * - * @author Andrea Bollini (andrea.bollini at 4science.it) - * - */ -public class MockPubmedImportMetadataSourceServiceImpl extends PubmedImportMetadataSourceServiceImpl { - - @Override - public void init() throws Exception { - pubmedWebTarget = Mockito.mock(WebTarget.class); - ArgumentCaptor valueCapture = ArgumentCaptor.forClass(String.class); - when(pubmedWebTarget.queryParam(ArgumentMatchers.any(), ArgumentMatchers.any())) - .thenAnswer(new Answer() { - @Override - public WebTarget answer(InvocationOnMock invocation) throws Throwable { - return pubmedWebTarget; - } - }); - when(pubmedWebTarget.path(valueCapture.capture())).thenAnswer(new Answer() { - @Override - public WebTarget answer(InvocationOnMock invocation) throws Throwable { - return pubmedWebTarget; - } - }); - when(pubmedWebTarget.request(ArgumentMatchers.any(MediaType.class))) - .thenAnswer(new Answer() { - @Override - public Invocation.Builder answer(InvocationOnMock invocation) throws Throwable { - Invocation.Builder builder = Mockito.mock(Invocation.Builder.class); - when(builder.get()).thenAnswer(new Answer() { - @Override - public Response answer(InvocationOnMock invocation) throws Throwable { - Response response = Mockito.mock(Response.class); - when(response.readEntity(ArgumentMatchers.eq(String.class))).then(new Answer() { - @Override - public String answer(InvocationOnMock invocation) throws Throwable { - String resourceName = "pubmed-" + valueCapture.getValue() + ".xml"; - InputStream resource = getClass().getResourceAsStream(resourceName); - try (Reader reader = new InputStreamReader(resource, StandardCharsets.UTF_8)) { - return FileCopyUtils.copyToString(reader); - } catch (IOException e) { - throw new UncheckedIOException(e); - } - } - }); - return response; - } - }); - return builder; - }; - }); - } - -} diff --git a/dspace-api/src/test/java/org/dspace/external/provider/impl/OrcidPublicationDataProviderIT.java b/dspace-api/src/test/java/org/dspace/external/provider/impl/OrcidPublicationDataProviderIT.java new file mode 100644 index 0000000000..dae14115b8 --- /dev/null +++ b/dspace-api/src/test/java/org/dspace/external/provider/impl/OrcidPublicationDataProviderIT.java @@ -0,0 +1,434 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.external.provider.impl; + +import static java.util.Optional.of; +import static org.dspace.app.matcher.LambdaMatcher.has; +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.empty; +import static org.hamcrest.Matchers.hasSize; +import static org.hamcrest.Matchers.is; +import static org.junit.Assert.assertThrows; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.eq; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.verifyNoMoreInteractions; +import static org.mockito.Mockito.when; + +import java.io.File; +import java.net.URL; +import java.util.List; +import java.util.Optional; +import java.util.function.Predicate; +import javax.xml.bind.JAXBContext; +import javax.xml.bind.Unmarshaller; + +import org.apache.commons.codec.binary.StringUtils; +import org.dspace.AbstractIntegrationTestWithDatabase; +import org.dspace.builder.CollectionBuilder; +import org.dspace.builder.CommunityBuilder; +import org.dspace.builder.ItemBuilder; +import org.dspace.builder.OrcidTokenBuilder; +import org.dspace.content.Collection; +import org.dspace.content.Item; +import org.dspace.content.MetadataFieldName; +import org.dspace.content.dto.MetadataValueDTO; +import org.dspace.external.model.ExternalDataObject; +import org.dspace.orcid.client.OrcidClient; +import org.dspace.orcid.client.OrcidConfiguration; +import org.dspace.orcid.model.OrcidTokenResponseDTO; +import org.dspace.utils.DSpace; +import org.junit.After; +import org.junit.Before; +import org.junit.Test; +import org.orcid.jaxb.model.v3.release.record.Work; +import org.orcid.jaxb.model.v3.release.record.WorkBulk; +import org.orcid.jaxb.model.v3.release.record.summary.Works; + +/** + * Integration tests for {@link OrcidPublicationDataProvider}. + * + * @author Luca Giamminonni (luca.giamminonni at 4science.it) + * + */ +public class OrcidPublicationDataProviderIT extends AbstractIntegrationTestWithDatabase { + + private static final String BASE_XML_DIR_PATH = "org/dspace/app/orcid-works/"; + + private static final String ACCESS_TOKEN = "32c83ccb-c6d5-4981-b6ea-6a34a36de8ab"; + + private static final String ORCID = "0000-1111-2222-3333"; + + private OrcidPublicationDataProvider dataProvider; + + private OrcidConfiguration orcidConfiguration; + + private OrcidClient orcidClient; + + private OrcidClient orcidClientMock; + + private String originalClientId; + + private Collection persons; + + @Before + public void setup() throws Exception { + context.turnOffAuthorisationSystem(); + + parentCommunity = CommunityBuilder.createCommunity(context) + .withName("Parent Community") + .build(); + + persons = CollectionBuilder.createCollection(context, parentCommunity) + .withEntityType("Person") + .withName("Profiles") + .build(); + + context.restoreAuthSystemState(); + + dataProvider = new DSpace().getServiceManager() + .getServiceByName("orcidPublicationDataProvider", OrcidPublicationDataProvider.class); + + orcidConfiguration = new DSpace().getServiceManager() + .getServiceByName("org.dspace.orcid.client.OrcidConfiguration", OrcidConfiguration.class); + + orcidClientMock = mock(OrcidClient.class); + orcidClient = dataProvider.getOrcidClient(); + + dataProvider.setReadPublicAccessToken(null); + dataProvider.setOrcidClient(orcidClientMock); + + originalClientId = orcidConfiguration.getClientId(); + orcidConfiguration.setClientId("DSPACE-CLIENT-ID"); + orcidConfiguration.setClientSecret("DSPACE-CLIENT-SECRET"); + + when(orcidClientMock.getReadPublicAccessToken()).thenReturn(buildTokenResponse(ACCESS_TOKEN)); + + when(orcidClientMock.getWorks(any(), eq(ORCID))).thenReturn(unmarshall("works.xml", Works.class)); + when(orcidClientMock.getWorks(eq(ORCID))).thenReturn(unmarshall("works.xml", Works.class)); + + when(orcidClientMock.getObject(any(), eq(ORCID), any(), eq(Work.class))) + .then((invocation) -> of(unmarshall("work-" + invocation.getArgument(2) + ".xml", Work.class))); + when(orcidClientMock.getObject(eq(ORCID), any(), eq(Work.class))) + .then((invocation) -> of(unmarshall("work-" + invocation.getArgument(1) + ".xml", Work.class))); + + when(orcidClientMock.getWorkBulk(any(), eq(ORCID), any())) + .then((invocation) -> unmarshallWorkBulk(invocation.getArgument(2))); + when(orcidClientMock.getWorkBulk(eq(ORCID), any())) + .then((invocation) -> unmarshallWorkBulk(invocation.getArgument(1))); + + } + + @After + public void after() { + dataProvider.setOrcidClient(orcidClient); + orcidConfiguration.setClientId(originalClientId); + } + + @Test + public void testSearchWithoutPagination() throws Exception { + + List externalObjects = dataProvider.searchExternalDataObjects(ORCID, 0, -1); + assertThat(externalObjects, hasSize(3)); + + ExternalDataObject firstObject = externalObjects.get(0); + assertThat(firstObject.getDisplayValue(), is("The elements of style and the survey of ophthalmology.")); + assertThat(firstObject.getValue(), is("The elements of style and the survey of ophthalmology.")); + assertThat(firstObject.getId(), is(ORCID + "::277904")); + assertThat(firstObject.getSource(), is("orcidWorks")); + + List metadata = firstObject.getMetadata(); + assertThat(metadata, hasSize(7)); + assertThat(metadata, has(metadata("dc.date.issued", "2011"))); + assertThat(metadata, has(metadata("dc.source", "Test Journal"))); + assertThat(metadata, has(metadata("dc.language.iso", "it"))); + assertThat(metadata, has(metadata("dc.type", "Other"))); + assertThat(metadata, has(metadata("dc.identifier.doi", "10.11234.12"))); + assertThat(metadata, has(metadata("dc.contributor.author", "Walter White"))); + assertThat(metadata, has(metadata("dc.title", "The elements of style and the survey of ophthalmology."))); + + ExternalDataObject secondObject = externalObjects.get(1); + assertThat(secondObject.getDisplayValue(), is("Another cautionary tale.")); + assertThat(secondObject.getValue(), is("Another cautionary tale.")); + assertThat(secondObject.getId(), is(ORCID + "::277902")); + assertThat(secondObject.getSource(), is("orcidWorks")); + + metadata = secondObject.getMetadata(); + assertThat(metadata, hasSize(8)); + assertThat(metadata, has(metadata("dc.date.issued", "2011-05-01"))); + assertThat(metadata, has(metadata("dc.description.abstract", "Short description"))); + assertThat(metadata, has(metadata("dc.relation.ispartof", "Journal title"))); + assertThat(metadata, has(metadata("dc.contributor.author", "Walter White"))); + assertThat(metadata, has(metadata("dc.contributor.author", "John White"))); + assertThat(metadata, has(metadata("dc.contributor.editor", "Jesse Pinkman"))); + assertThat(metadata, has(metadata("dc.title", "Another cautionary tale."))); + assertThat(metadata, has(metadata("dc.type", "Article"))); + + ExternalDataObject thirdObject = externalObjects.get(2); + assertThat(thirdObject.getDisplayValue(), is("Branch artery occlusion in a young woman.")); + assertThat(thirdObject.getValue(), is("Branch artery occlusion in a young woman.")); + assertThat(thirdObject.getId(), is(ORCID + "::277871")); + assertThat(thirdObject.getSource(), is("orcidWorks")); + + metadata = thirdObject.getMetadata(); + assertThat(metadata, hasSize(3)); + assertThat(metadata, has(metadata("dc.date.issued", "1985-07-01"))); + assertThat(metadata, has(metadata("dc.title", "Branch artery occlusion in a young woman."))); + assertThat(metadata, has(metadata("dc.type", "Article"))); + + verify(orcidClientMock).getReadPublicAccessToken(); + verify(orcidClientMock).getWorks(ACCESS_TOKEN, ORCID); + verify(orcidClientMock).getWorkBulk(ACCESS_TOKEN, ORCID, List.of("277904", "277902", "277871")); + verifyNoMoreInteractions(orcidClientMock); + + } + + @Test + public void testSearchWithInvalidOrcidId() { + + IllegalArgumentException exception = assertThrows(IllegalArgumentException.class, + () -> dataProvider.searchExternalDataObjects("0000-1111-2222", 0, -1)); + + assertThat(exception.getMessage(), is("The given ORCID ID is not valid: 0000-1111-2222")); + + } + + @Test + public void testSearchWithStoredAccessToken() throws Exception { + + context.turnOffAuthorisationSystem(); + + String accessToken = "95cb5ed9-c208-4bbc-bc99-aa0bd76e4452"; + + Item profile = ItemBuilder.createItem(context, persons) + .withTitle("Profile") + .withOrcidIdentifier(ORCID) + .withDspaceObjectOwner(eperson.getEmail(), eperson.getID().toString()) + .build(); + + OrcidTokenBuilder.create(context, eperson, accessToken) + .withProfileItem(profile) + .build(); + + context.restoreAuthSystemState(); + + List externalObjects = dataProvider.searchExternalDataObjects(ORCID, 0, -1); + assertThat(externalObjects, hasSize(3)); + + verify(orcidClientMock).getWorks(accessToken, ORCID); + verify(orcidClientMock).getWorkBulk(accessToken, ORCID, List.of("277904", "277902", "277871")); + verifyNoMoreInteractions(orcidClientMock); + } + + @Test + public void testSearchWithProfileWithoutAccessToken() throws Exception { + + context.turnOffAuthorisationSystem(); + + ItemBuilder.createItem(context, persons) + .withTitle("Profile") + .withOrcidIdentifier(ORCID) + .build(); + + context.restoreAuthSystemState(); + + List externalObjects = dataProvider.searchExternalDataObjects(ORCID, 0, -1); + assertThat(externalObjects, hasSize(3)); + verify(orcidClientMock).getReadPublicAccessToken(); + verify(orcidClientMock).getWorks(ACCESS_TOKEN, ORCID); + verify(orcidClientMock).getWorkBulk(ACCESS_TOKEN, ORCID, List.of("277904", "277902", "277871")); + verifyNoMoreInteractions(orcidClientMock); + } + + @Test + public void testSearchWithoutResults() throws Exception { + + String unknownOrcid = "1111-2222-3333-4444"; + when(orcidClientMock.getWorks(ACCESS_TOKEN, unknownOrcid)).thenReturn(new Works()); + + List externalObjects = dataProvider.searchExternalDataObjects(unknownOrcid, 0, -1); + assertThat(externalObjects, empty()); + + verify(orcidClientMock).getReadPublicAccessToken(); + verify(orcidClientMock).getWorks(ACCESS_TOKEN, unknownOrcid); + verifyNoMoreInteractions(orcidClientMock); + } + + @Test + public void testClientCredentialsTokenCache() throws Exception { + + List externalObjects = dataProvider.searchExternalDataObjects(ORCID, 0, -1); + assertThat(externalObjects, hasSize(3)); + + verify(orcidClientMock).getReadPublicAccessToken(); + + externalObjects = dataProvider.searchExternalDataObjects(ORCID, 0, -1); + assertThat(externalObjects, hasSize(3)); + + verify(orcidClientMock, times(1)).getReadPublicAccessToken(); + + dataProvider.setReadPublicAccessToken(null); + + externalObjects = dataProvider.searchExternalDataObjects(ORCID, 0, -1); + assertThat(externalObjects, hasSize(3)); + + verify(orcidClientMock, times(2)).getReadPublicAccessToken(); + + } + + @Test + public void testSearchPagination() throws Exception { + + List externalObjects = dataProvider.searchExternalDataObjects(ORCID, 0, -1); + assertThat(externalObjects, hasSize(3)); + assertThat(externalObjects, has((externalObject -> externalObject.getId().equals(ORCID + "::277904")))); + assertThat(externalObjects, has((externalObject -> externalObject.getId().equals(ORCID + "::277902")))); + assertThat(externalObjects, has((externalObject -> externalObject.getId().equals(ORCID + "::277871")))); + + verify(orcidClientMock).getReadPublicAccessToken(); + verify(orcidClientMock).getWorks(ACCESS_TOKEN, ORCID); + verify(orcidClientMock).getWorkBulk(ACCESS_TOKEN, ORCID, List.of("277904", "277902", "277871")); + + externalObjects = dataProvider.searchExternalDataObjects(ORCID, 0, 5); + assertThat(externalObjects, hasSize(3)); + assertThat(externalObjects, has((externalObject -> externalObject.getId().equals(ORCID + "::277904")))); + assertThat(externalObjects, has((externalObject -> externalObject.getId().equals(ORCID + "::277902")))); + assertThat(externalObjects, has((externalObject -> externalObject.getId().equals(ORCID + "::277871")))); + + verify(orcidClientMock, times(2)).getWorks(ACCESS_TOKEN, ORCID); + verify(orcidClientMock, times(2)).getWorkBulk(ACCESS_TOKEN, ORCID, List.of("277904", "277902", "277871")); + + externalObjects = dataProvider.searchExternalDataObjects(ORCID, 0, 2); + assertThat(externalObjects, hasSize(2)); + assertThat(externalObjects, has((externalObject -> externalObject.getId().equals(ORCID + "::277904")))); + assertThat(externalObjects, has((externalObject -> externalObject.getId().equals(ORCID + "::277902")))); + + verify(orcidClientMock, times(3)).getWorks(ACCESS_TOKEN, ORCID); + verify(orcidClientMock).getWorkBulk(ACCESS_TOKEN, ORCID, List.of("277904", "277902")); + + externalObjects = dataProvider.searchExternalDataObjects(ORCID, 1, 1); + assertThat(externalObjects, hasSize(1)); + assertThat(externalObjects, has((externalObject -> externalObject.getId().equals(ORCID + "::277902")))); + + verify(orcidClientMock, times(4)).getWorks(ACCESS_TOKEN, ORCID); + verify(orcidClientMock).getObject(ACCESS_TOKEN, ORCID, "277902", Work.class); + + externalObjects = dataProvider.searchExternalDataObjects(ORCID, 2, 1); + assertThat(externalObjects, hasSize(1)); + assertThat(externalObjects, has((externalObject -> externalObject.getId().equals(ORCID + "::277871")))); + + verify(orcidClientMock, times(5)).getWorks(ACCESS_TOKEN, ORCID); + verify(orcidClientMock).getObject(ACCESS_TOKEN, ORCID, "277871", Work.class); + + verifyNoMoreInteractions(orcidClientMock); + + } + + @Test + public void testGetExternalDataObject() { + Optional optional = dataProvider.getExternalDataObject(ORCID + "::277902"); + assertThat(optional.isPresent(), is(true)); + + ExternalDataObject externalDataObject = optional.get(); + assertThat(externalDataObject.getDisplayValue(), is("Another cautionary tale.")); + assertThat(externalDataObject.getValue(), is("Another cautionary tale.")); + assertThat(externalDataObject.getId(), is(ORCID + "::277902")); + assertThat(externalDataObject.getSource(), is("orcidWorks")); + + List metadata = externalDataObject.getMetadata(); + assertThat(metadata, hasSize(8)); + assertThat(metadata, has(metadata("dc.date.issued", "2011-05-01"))); + assertThat(metadata, has(metadata("dc.description.abstract", "Short description"))); + assertThat(metadata, has(metadata("dc.relation.ispartof", "Journal title"))); + assertThat(metadata, has(metadata("dc.contributor.author", "Walter White"))); + assertThat(metadata, has(metadata("dc.contributor.author", "John White"))); + assertThat(metadata, has(metadata("dc.contributor.editor", "Jesse Pinkman"))); + assertThat(metadata, has(metadata("dc.title", "Another cautionary tale."))); + assertThat(metadata, has(metadata("dc.type", "Article"))); + + verify(orcidClientMock).getReadPublicAccessToken(); + verify(orcidClientMock).getObject(ACCESS_TOKEN, ORCID, "277902", Work.class); + verifyNoMoreInteractions(orcidClientMock); + } + + @Test + public void testGetExternalDataObjectWithInvalidOrcidId() { + + IllegalArgumentException exception = assertThrows(IllegalArgumentException.class, + () -> dataProvider.getExternalDataObject("invalid::277902")); + + assertThat(exception.getMessage(), is("The given ORCID ID is not valid: invalid" )); + } + + @Test + public void testGetExternalDataObjectWithInvalidId() { + + IllegalArgumentException exception = assertThrows(IllegalArgumentException.class, + () -> dataProvider.getExternalDataObject("id")); + + assertThat(exception.getMessage(), is("Invalid identifier 'id', expected ::")); + } + + @Test + public void testSearchWithoutApiKeysConfigured() throws Exception { + + context.turnOffAuthorisationSystem(); + + orcidConfiguration.setClientSecret(null); + + ItemBuilder.createItem(context, persons) + .withTitle("Profile") + .withOrcidIdentifier(ORCID) + .build(); + + context.restoreAuthSystemState(); + + List externalObjects = dataProvider.searchExternalDataObjects(ORCID, 0, -1); + assertThat(externalObjects, hasSize(3)); + + verify(orcidClientMock).getWorks(ORCID); + verify(orcidClientMock).getWorkBulk(ORCID, List.of("277904", "277902", "277871")); + verifyNoMoreInteractions(orcidClientMock); + } + + private Predicate metadata(String metadataField, String value) { + MetadataFieldName metadataFieldName = new MetadataFieldName(metadataField); + return metadata(metadataFieldName.schema, metadataFieldName.element, metadataFieldName.qualifier, value); + } + + private Predicate metadata(String schema, String element, String qualifier, String value) { + return dto -> StringUtils.equals(schema, dto.getSchema()) + && StringUtils.equals(element, dto.getElement()) + && StringUtils.equals(qualifier, dto.getQualifier()) + && StringUtils.equals(value, dto.getValue()); + } + + private OrcidTokenResponseDTO buildTokenResponse(String accessToken) { + OrcidTokenResponseDTO response = new OrcidTokenResponseDTO(); + response.setAccessToken(accessToken); + return response; + } + + private WorkBulk unmarshallWorkBulk(List putCodes) throws Exception { + return unmarshall("workBulk-" + String.join("-", putCodes) + ".xml", WorkBulk.class); + } + + @SuppressWarnings("unchecked") + private T unmarshall(String fileName, Class clazz) throws Exception { + JAXBContext jaxbContext = JAXBContext.newInstance(clazz); + Unmarshaller unmarshaller = jaxbContext.createUnmarshaller(); + URL resource = getClass().getClassLoader().getResource(BASE_XML_DIR_PATH + fileName); + if (resource == null) { + throw new IllegalStateException("No resource found named " + BASE_XML_DIR_PATH + fileName); + } + return (T) unmarshaller.unmarshal(new File(resource.getFile())); + } + +} diff --git a/dspace-api/src/test/java/org/dspace/google/client/GoogleAnalytics4ClientRequestBuilderTest.java b/dspace-api/src/test/java/org/dspace/google/client/GoogleAnalytics4ClientRequestBuilderTest.java new file mode 100644 index 0000000000..1a5d0b4eb3 --- /dev/null +++ b/dspace-api/src/test/java/org/dspace/google/client/GoogleAnalytics4ClientRequestBuilderTest.java @@ -0,0 +1,220 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.google.client; + +import static java.util.List.of; +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.empty; +import static org.hamcrest.Matchers.hasSize; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.notNullValue; +import static org.junit.Assert.assertThrows; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + +import java.util.List; + +import org.dspace.google.GoogleAnalyticsEvent; +import org.dspace.services.ConfigurationService; +import org.json.JSONArray; +import org.json.JSONObject; +import org.junit.Before; +import org.junit.Test; + +/** + * Unit tests for {@link GoogleAnalytics4ClientRequestBuilder}. + * + * @author Luca Giamminonni (luca.giamminonni at 4science.it) + * + */ +public class GoogleAnalytics4ClientRequestBuilderTest { + + private GoogleAnalytics4ClientRequestBuilder requestBuilder; + + private ConfigurationService configurationService = mock(ConfigurationService.class); + + @Before + public void setup() { + requestBuilder = new GoogleAnalytics4ClientRequestBuilder("https://google-analytics/test"); + requestBuilder.setConfigurationService(configurationService); + } + + @Test + public void testGetEndpointUrl() { + + when(configurationService.getProperty("google.analytics.api-secret")).thenReturn("abc123"); + + String endpointUrl = requestBuilder.getEndpointUrl("G-12345"); + assertThat(endpointUrl, is("https://google-analytics/test?api_secret=abc123&measurement_id=G-12345")); + + } + + @Test + public void testGetEndpointUrlWithNotSupportedKey() { + + assertThrows("Only keys with G- prefix are supported", + IllegalArgumentException.class, () -> requestBuilder.getEndpointUrl("UA-12345")); + + } + + @Test + public void testGetEndpointUrlWithoutApiSecretConfigured() { + + assertThrows("The API secret must be configured to sent GA4 events", + GoogleAnalyticsClientException.class, () -> requestBuilder.getEndpointUrl("G-12345")); + + } + + @Test + public void testComposeRequestBodiesWithoutEvents() { + + List requestsBody = requestBuilder.composeRequestsBody("G-12345", List.of()); + assertThat(requestsBody, empty()); + + } + + @Test + public void testComposeRequestBodiesWithSingleEvent() { + + GoogleAnalyticsEvent event = buildEvent("123", "192.168.1.25", "Chrome", "REF", + "/api/documents/123", "Test publication"); + + List requestsBody = requestBuilder.composeRequestsBody("G-12345", List.of(event)); + assertThat(requestsBody, hasSize(1)); + + JSONObject requestBody = new JSONObject(requestsBody.get(0)); + assertThat(requestBody.get("client_id"), is("123")); + + JSONArray eventsArray = requestBody.getJSONArray("events"); + assertThat(eventsArray.length(), is(1)); + + assertEventJsonHasAttributes(eventsArray.getJSONObject(0), "item", "download", "bitstream", "192.168.1.25", + "Chrome", "REF", "/api/documents/123", "Test publication"); + + } + + @Test + public void testComposeRequestBodiesWithManyEventsWithSameClientId() { + + GoogleAnalyticsEvent event1 = buildEvent("123", "192.168.1.25", "Chrome", "REF", + "/api/documents/123", "Test publication"); + + GoogleAnalyticsEvent event2 = buildEvent("123", "192.168.1.25", "Mozilla Firefox", "REF-2", + "/api/documents/12345", "Test publication 2"); + + List requestsBody = requestBuilder.composeRequestsBody("G-12345", List.of(event1, event2)); + assertThat(requestsBody, hasSize(1)); + + JSONObject requestBody = new JSONObject(requestsBody.get(0)); + assertThat(requestBody.get("client_id"), is("123")); + + JSONArray eventsArray = requestBody.getJSONArray("events"); + assertThat(eventsArray.length(), is(2)); + + JSONObject eventJson1 = findEventJsonByDocumentTitle(eventsArray, "Test publication"); + JSONObject eventJson2 = findEventJsonByDocumentTitle(eventsArray, "Test publication 2"); + + assertThat(eventJson1, notNullValue()); + assertThat(eventJson2, notNullValue()); + + assertEventJsonHasAttributes(eventJson1, "item", "download", "bitstream", "192.168.1.25", + "Chrome", "REF", "/api/documents/123", "Test publication"); + + assertEventJsonHasAttributes(eventJson2, "item", "download", "bitstream", "192.168.1.25", + "Mozilla Firefox", "REF-2", "/api/documents/12345", "Test publication 2"); + + } + + @Test + public void testComposeRequestBodiesWithManyEventsWithDifferentClientId() { + + GoogleAnalyticsEvent event1 = buildEvent("123", "192.168.1.25", "Chrome", "REF", + "/api/documents/123", "Test publication"); + + GoogleAnalyticsEvent event2 = buildEvent("123", "192.168.1.25", "Mozilla Firefox", "REF-2", + "/api/documents/12345", "Test publication 2"); + + GoogleAnalyticsEvent event3 = buildEvent("987", "192.168.1.13", "Postman", null, + "/api/documents/654", "Test publication 3"); + + List requestsBody = requestBuilder.composeRequestsBody("G-12345", of(event1, event2, event3)); + assertThat(requestsBody, hasSize(2)); + + JSONObject firstRequestBody = findRequestBodyByClientId(requestsBody, "123"); + assertThat(firstRequestBody.get("client_id"), is("123")); + + JSONArray firstEventsArray = firstRequestBody.getJSONArray("events"); + assertThat(firstEventsArray.length(), is(2)); + + JSONObject eventJson1 = findEventJsonByDocumentTitle(firstEventsArray, "Test publication"); + JSONObject eventJson2 = findEventJsonByDocumentTitle(firstEventsArray, "Test publication 2"); + + assertThat(eventJson1, notNullValue()); + assertThat(eventJson2, notNullValue()); + + assertEventJsonHasAttributes(eventJson1, "item", "download", "bitstream", "192.168.1.25", + "Chrome", "REF", "/api/documents/123", "Test publication"); + + assertEventJsonHasAttributes(eventJson2, "item", "download", "bitstream", "192.168.1.25", + "Mozilla Firefox", "REF-2", "/api/documents/12345", "Test publication 2"); + + JSONObject secondRequestBody = findRequestBodyByClientId(requestsBody, "987"); + assertThat(secondRequestBody.get("client_id"), is("987")); + + JSONArray secondEventsArray = secondRequestBody.getJSONArray("events"); + assertThat(secondEventsArray.length(), is(1)); + + assertEventJsonHasAttributes(secondEventsArray.getJSONObject(0), "item", "download", "bitstream", + "192.168.1.13", "Postman", "", "/api/documents/654", "Test publication 3"); + + } + + private void assertEventJsonHasAttributes(JSONObject event, String name, String action, String category, + String userIp, String userAgent, String documentReferrer, String documentPath, String documentTitle) { + + assertThat(event.get("name"), is(name)); + assertThat(event.getJSONObject("params"), notNullValue()); + assertThat(event.getJSONObject("params").get("action"), is(action)); + assertThat(event.getJSONObject("params").get("category"), is(category)); + assertThat(event.getJSONObject("params").get("document_title"), is(documentTitle)); + assertThat(event.getJSONObject("params").get("user_ip"), is(userIp)); + assertThat(event.getJSONObject("params").get("user_agent"), is(userAgent)); + assertThat(event.getJSONObject("params").get("document_referrer"), is(documentReferrer)); + assertThat(event.getJSONObject("params").get("document_path"), is(documentPath)); + assertThat(event.getJSONObject("params").get("time"), notNullValue()); + + } + + private JSONObject findRequestBodyByClientId(List requestsBody, String clientId) { + for (String requestBody : requestsBody) { + JSONObject requestBodyJson = new JSONObject(requestBody); + if (requestBodyJson.get("client_id").equals(clientId)) { + return requestBodyJson; + } + } + return null; + } + + private JSONObject findEventJsonByDocumentTitle(JSONArray events, String documentTitle) { + + for (int i = 0; i < events.length(); i++) { + JSONObject event = events.getJSONObject(i); + assertThat(event.getJSONObject("params"), notNullValue()); + if (event.getJSONObject("params").get("document_title").equals(documentTitle)) { + return event; + } + } + + return null; + } + + private GoogleAnalyticsEvent buildEvent(String clientId, String userIp, String userAgent, + String documentReferrer, String documentPath, String documentTitle) { + return new GoogleAnalyticsEvent(clientId, userIp, userAgent, documentReferrer, documentPath, documentTitle); + } +} diff --git a/dspace-api/src/test/java/org/dspace/google/client/UniversalAnalyticsClientRequestBuilderTest.java b/dspace-api/src/test/java/org/dspace/google/client/UniversalAnalyticsClientRequestBuilderTest.java new file mode 100644 index 0000000000..bc30c2a124 --- /dev/null +++ b/dspace-api/src/test/java/org/dspace/google/client/UniversalAnalyticsClientRequestBuilderTest.java @@ -0,0 +1,152 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.google.client; + +import static java.util.List.of; +import static org.apache.commons.lang.StringUtils.countMatches; +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.empty; +import static org.hamcrest.Matchers.hasSize; +import static org.hamcrest.Matchers.is; +import static org.junit.Assert.assertThrows; + +import java.util.List; + +import org.dspace.google.GoogleAnalyticsEvent; +import org.junit.Before; +import org.junit.Test; + +/** + * Unit tests for {@link UniversalAnalyticsClientRequestBuilder}. + * + * @author Luca Giamminonni (luca.giamminonni at 4science.it) + * + */ +public class UniversalAnalyticsClientRequestBuilderTest { + + private UniversalAnalyticsClientRequestBuilder requestBuilder; + + @Before + public void setup() { + requestBuilder = new UniversalAnalyticsClientRequestBuilder("https://google-analytics/test"); + } + + @Test + public void testGetEndpointUrl() { + + String endpointUrl = requestBuilder.getEndpointUrl("UA-12345"); + assertThat(endpointUrl, is("https://google-analytics/test")); + + } + + @Test + public void testComposeRequestBodiesWithoutEvents() { + + List requestsBody = requestBuilder.composeRequestsBody("UA-12345", List.of()); + assertThat(requestsBody, empty()); + + } + + @Test + public void testComposeRequestBodiesWithNotSupportedKey() { + + GoogleAnalyticsEvent event = buildEvent("123", "192.168.1.25", "Chrome", "REF", + "/api/documents/123", "Test publication"); + + assertThrows("Only keys with G- prefix are supported", + IllegalArgumentException.class, () -> requestBuilder.composeRequestsBody("G-12345", List.of(event))); + + } + + @Test + public void testComposeRequestBodiesWithSingleEvent() { + + GoogleAnalyticsEvent event = buildEvent("123", "192.168.1.25", "Chrome", "REF", + "/api/documents/123", "Test publication"); + + List requestsBody = requestBuilder.composeRequestsBody("UA-12345", List.of(event)); + assertThat(requestsBody, hasSize(1)); + + String requestBody = requestsBody.get(0); + assertThat(countMatches(requestBody, "&qt="), is(1)); + + String requestBodyWithoutTime = removeAllTimeSections(requestBody); + + String expectedRequestBodyWithoutTime = "v=1&tid=UA-12345&cid=123&t=event&uip=192.168.1.25&ua=Chrome&dr=REF" + + "&dp=%2Fapi%2Fdocuments%2F123&dt=Test+publication&ec=bitstream&ea=download&el=item"; + + assertThat(requestBodyWithoutTime, is(expectedRequestBodyWithoutTime)); + + } + + @Test + public void testComposeRequestBodiesWithManyEventsWithSameClientId() { + + GoogleAnalyticsEvent event1 = buildEvent("123", "192.168.1.25", "Chrome", "REF", + "/api/documents/123", "Test publication"); + + GoogleAnalyticsEvent event2 = buildEvent("123", "192.168.1.25", "Mozilla Firefox", "REF-2", + "/api/documents/12345", "Test publication 2"); + + List requestsBody = requestBuilder.composeRequestsBody("UA-12345", List.of(event1, event2)); + assertThat(requestsBody, hasSize(1)); + String requestBody = requestsBody.get(0); + + assertThat(countMatches(requestBody, "&qt="), is(2)); + + String requestBodyWithoutTime = removeAllTimeSections(requestBody); + + String expectedRequestBodyWithoutTime = "v=1&tid=UA-12345&cid=123&t=event&uip=192.168.1.25&ua=Chrome&dr=REF" + + "&dp=%2Fapi%2Fdocuments%2F123&dt=Test+publication&ec=bitstream&ea=download&el=item\n" + + "v=1&tid=UA-12345&cid=123&t=event&uip=192.168.1.25&ua=Mozilla+Firefox&dr=REF-2" + + "&dp=%2Fapi%2Fdocuments%2F12345&dt=Test+publication+2&ec=bitstream&ea=download&el=item"; + + assertThat(requestBodyWithoutTime, is(expectedRequestBodyWithoutTime)); + + } + + @Test + public void testComposeRequestBodiesWithManyEventsWithDifferentClientId() { + + GoogleAnalyticsEvent event1 = buildEvent("123", "192.168.1.25", "Chrome", "REF", + "/api/documents/123", "Test publication"); + + GoogleAnalyticsEvent event2 = buildEvent("123", "192.168.1.25", "Mozilla Firefox", "REF-2", + "/api/documents/12345", "Test publication 2"); + + GoogleAnalyticsEvent event3 = buildEvent("987", "192.168.1.13", "Postman", null, + "/api/documents/654", "Test publication 3"); + + List requestsBody = requestBuilder.composeRequestsBody("UA-12345", of(event1, event2, event3)); + assertThat(requestsBody, hasSize(1)); + String requestBody = requestsBody.get(0); + + assertThat(countMatches(requestBody, "&qt="), is(3)); + + String requestBodyWithoutTime = removeAllTimeSections(requestBody); + + String expectedRequestBodyWithoutTime = "v=1&tid=UA-12345&cid=123&t=event&uip=192.168.1.25&ua=Chrome&dr=REF" + + "&dp=%2Fapi%2Fdocuments%2F123&dt=Test+publication&ec=bitstream&ea=download&el=item\n" + + "v=1&tid=UA-12345&cid=123&t=event&uip=192.168.1.25&ua=Mozilla+Firefox&dr=REF-2" + + "&dp=%2Fapi%2Fdocuments%2F12345&dt=Test+publication+2&ec=bitstream&ea=download&el=item\n" + + "v=1&tid=UA-12345&cid=987&t=event&uip=192.168.1.13&ua=Postman&dr=" + + "&dp=%2Fapi%2Fdocuments%2F654&dt=Test+publication+3&ec=bitstream&ea=download&el=item"; + + assertThat(requestBodyWithoutTime, is(expectedRequestBodyWithoutTime)); + + } + + private String removeAllTimeSections(String requestBody) { + return requestBody.replaceAll("&qt=\\d+", ""); + } + + private GoogleAnalyticsEvent buildEvent(String clientId, String userIp, String userAgent, + String documentReferrer, String documentPath, String documentTitle) { + return new GoogleAnalyticsEvent(clientId, userIp, userAgent, documentReferrer, documentPath, documentTitle); + } +} diff --git a/dspace-api/src/test/java/org/dspace/identifier/DOIIdentifierProviderTest.java b/dspace-api/src/test/java/org/dspace/identifier/DOIIdentifierProviderTest.java index 52cda18df5..b9dbbba647 100644 --- a/dspace-api/src/test/java/org/dspace/identifier/DOIIdentifierProviderTest.java +++ b/dspace-api/src/test/java/org/dspace/identifier/DOIIdentifierProviderTest.java @@ -191,7 +191,7 @@ public class DOIIdentifierProviderTest List remainder = new ArrayList<>(); for (MetadataValue id : metadata) { - if (!id.getValue().startsWith(DOI.RESOLVER)) { + if (!id.getValue().startsWith(doiService.getResolver())) { remainder.add(id.getValue()); } } @@ -278,11 +278,11 @@ public class DOIIdentifierProviderTest PREFIX + "/" + NAMESPACE_SEPARATOR + "lkjljasd1234", DOI.SCHEME + "10.5072/123abc-lkj/kljl", "http://dx.doi.org/10.5072/123abc-lkj/kljl", - DOI.RESOLVER + "/10.5072/123abc-lkj/kljl" + doiService.getResolver() + "/10.5072/123abc-lkj/kljl" }; for (String doi : validDOIs) { - assertTrue("DOI should be supported", provider.supports(doi)); + assertTrue("DOI " + doi + " should be supported", provider.supports(doi)); } } diff --git a/dspace-api/src/test/java/org/dspace/iiif/MockIIIFApiQueryServiceImpl.java b/dspace-api/src/test/java/org/dspace/iiif/MockIIIFApiQueryServiceImpl.java new file mode 100644 index 0000000000..a240e76f97 --- /dev/null +++ b/dspace-api/src/test/java/org/dspace/iiif/MockIIIFApiQueryServiceImpl.java @@ -0,0 +1,20 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.iiif; + +import org.dspace.content.Bitstream; + +/** + * Mock for the IIIFApiQueryService. + * @author Michael Spalti (mspalti at willamette.edu) + */ +public class MockIIIFApiQueryServiceImpl extends IIIFApiQueryServiceImpl { + public int[] getImageDimensions(Bitstream bitstream) { + return new int[]{64, 64}; + } +} diff --git a/dspace-api/src/test/java/org/dspace/iiif/canvasdimension/CanvasDimensionsIT.java b/dspace-api/src/test/java/org/dspace/iiif/canvasdimension/CanvasDimensionsIT.java index 502266da06..038654af43 100644 --- a/dspace-api/src/test/java/org/dspace/iiif/canvasdimension/CanvasDimensionsIT.java +++ b/dspace-api/src/test/java/org/dspace/iiif/canvasdimension/CanvasDimensionsIT.java @@ -7,14 +7,15 @@ */ package org.dspace.iiif.canvasdimension; -import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; import java.io.ByteArrayOutputStream; import java.io.IOException; import java.io.InputStream; import java.io.PrintStream; +import java.util.regex.Pattern; +import org.apache.commons.lang.StringUtils; import org.dspace.AbstractIntegrationTestWithDatabase; import org.dspace.builder.BitstreamBuilder; import org.dspace.builder.CollectionBuilder; @@ -231,9 +232,7 @@ public class CanvasDimensionsIT extends AbstractIntegrationTestWithDatabase { .withName("Bitstream2.jpg") .withMimeType("image/jpeg") .build(); - context.restoreAuthSystemState(); - String id = parentCommunity.getID().toString(); execCanvasScript(id); @@ -408,7 +407,8 @@ public class CanvasDimensionsIT extends AbstractIntegrationTestWithDatabase { execCanvasScriptWithMaxRecs(id); // check System.out for number of items processed. - assertEquals("2 IIIF items were processed.\n", outContent.toString()); + Pattern regex = Pattern.compile(".*2 IIIF items were processed", Pattern.DOTALL); + assertTrue(regex.matcher(StringUtils.chomp(outContent.toString())).find()); } @Test diff --git a/dspace-api/src/test/java/org/dspace/license/MockCCLicenseConnectorServiceImpl.java b/dspace-api/src/test/java/org/dspace/license/MockCCLicenseConnectorServiceImpl.java index 8545c4187d..30a5a3a9b5 100644 --- a/dspace-api/src/test/java/org/dspace/license/MockCCLicenseConnectorServiceImpl.java +++ b/dspace-api/src/test/java/org/dspace/license/MockCCLicenseConnectorServiceImpl.java @@ -15,8 +15,8 @@ import java.util.List; import java.util.Map; import org.apache.commons.lang3.StringUtils; -import org.jdom.Document; -import org.jdom.JDOMException; +import org.jdom2.Document; +import org.jdom2.JDOMException; /** * Mock implementation for the Creative commons license connector service. diff --git a/dspace-api/src/test/java/org/dspace/orcid/OrcidQueueConsumerIT.java b/dspace-api/src/test/java/org/dspace/orcid/OrcidQueueConsumerIT.java new file mode 100644 index 0000000000..f2e528d78c --- /dev/null +++ b/dspace-api/src/test/java/org/dspace/orcid/OrcidQueueConsumerIT.java @@ -0,0 +1,791 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.orcid; + +import static org.dspace.app.matcher.OrcidQueueMatcher.matches; +import static org.dspace.builder.OrcidHistoryBuilder.createOrcidHistory; +import static org.dspace.builder.RelationshipTypeBuilder.createRelationshipTypeBuilder; +import static org.dspace.orcid.OrcidOperation.DELETE; +import static org.dspace.orcid.OrcidOperation.INSERT; +import static org.dspace.orcid.OrcidOperation.UPDATE; +import static org.dspace.orcid.model.OrcidProfileSectionType.KEYWORDS; +import static org.dspace.profile.OrcidEntitySyncPreference.ALL; +import static org.dspace.profile.OrcidEntitySyncPreference.DISABLED; +import static org.dspace.profile.OrcidProfileSyncPreference.BIOGRAPHICAL; +import static org.dspace.profile.OrcidProfileSyncPreference.IDENTIFIERS; +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.empty; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.hasItem; +import static org.hamcrest.Matchers.hasSize; + +import java.sql.SQLException; +import java.time.Instant; +import java.util.Date; +import java.util.List; + +import org.dspace.AbstractIntegrationTestWithDatabase; +import org.dspace.builder.CollectionBuilder; +import org.dspace.builder.CommunityBuilder; +import org.dspace.builder.EntityTypeBuilder; +import org.dspace.builder.ItemBuilder; +import org.dspace.builder.OrcidHistoryBuilder; +import org.dspace.builder.RelationshipBuilder; +import org.dspace.content.Collection; +import org.dspace.content.EntityType; +import org.dspace.content.Item; +import org.dspace.content.MetadataValue; +import org.dspace.content.RelationshipType; +import org.dspace.content.factory.ContentServiceFactory; +import org.dspace.content.service.ItemService; +import org.dspace.orcid.consumer.OrcidQueueConsumer; +import org.dspace.orcid.factory.OrcidServiceFactory; +import org.dspace.orcid.service.OrcidQueueService; +import org.dspace.services.ConfigurationService; +import org.dspace.services.factory.DSpaceServicesFactory; +import org.junit.After; +import org.junit.Before; +import org.junit.Test; + +/** + * Integration tests for {@link OrcidQueueConsumer}. + * + * @author Luca Giamminonni (luca.giamminonni at 4science.it) + * + */ +public class OrcidQueueConsumerIT extends AbstractIntegrationTestWithDatabase { + + private OrcidQueueService orcidQueueService = OrcidServiceFactory.getInstance().getOrcidQueueService(); + + private ItemService itemService = ContentServiceFactory.getInstance().getItemService(); + + private ConfigurationService configurationService = DSpaceServicesFactory.getInstance().getConfigurationService(); + + private Collection profileCollection; + + @Before + @Override + public void setUp() throws Exception { + super.setUp(); + + context.turnOffAuthorisationSystem(); + + parentCommunity = CommunityBuilder.createCommunity(context) + .withName("Parent community") + .build(); + + profileCollection = createCollection("Profiles", "Person"); + + context.restoreAuthSystemState(); + } + + @After + @Override + public void destroy() throws Exception { + List records = orcidQueueService.findAll(context); + for (OrcidQueue record : records) { + orcidQueueService.delete(context, record); + } + context.setDispatcher(null); + + super.destroy(); + } + + @Test + public void testWithNotOrcidSynchronizationEntity() throws Exception { + + context.turnOffAuthorisationSystem(); + + Collection orgUnits = CollectionBuilder.createCollection(context, parentCommunity) + .withName("OrgUnits") + .withEntityType("OrgUnit") + .build(); + + ItemBuilder.createItem(context, orgUnits) + .withTitle("Test OrgUnit") + .withSubject("test") + .build(); + + context.restoreAuthSystemState(); + context.commit(); + + List queueRecords = orcidQueueService.findAll(context); + assertThat(queueRecords, empty()); + } + + @Test + public void testWithOrcidSynchronizationDisabled() throws Exception { + + configurationService.setProperty("orcid.synchronization-enabled", false); + + context.turnOffAuthorisationSystem(); + + ItemBuilder.createItem(context, profileCollection) + .withTitle("Test User") + .withOrcidIdentifier("0000-1111-2222-3333") + .withOrcidAccessToken("ab4d18a0-8d9a-40f1-b601-a417255c8d20", eperson) + .withSubject("test") + .withOrcidSynchronizationProfilePreference(BIOGRAPHICAL) + .withOrcidSynchronizationProfilePreference(IDENTIFIERS) + .build(); + + context.restoreAuthSystemState(); + context.commit(); + + List queueRecords = orcidQueueService.findAll(context); + assertThat(queueRecords, empty()); + } + + @Test + public void testOrcidQueueRecordCreationForProfile() throws Exception { + // Set a fake handle prefix for this test which we will use to assign handles below + configurationService.setProperty("handle.prefix", "fake-handle"); + context.turnOffAuthorisationSystem(); + + Item profile = ItemBuilder.createItem(context, profileCollection) + .withTitle("Test User") + .withOrcidIdentifier("0000-1111-2222-3333") + .withOrcidAccessToken("ab4d18a0-8d9a-40f1-b601-a417255c8d20", eperson) + .withSubject("test") + .withHandle("fake-handle/190") + .withOrcidSynchronizationProfilePreference(BIOGRAPHICAL) + .withOrcidSynchronizationProfilePreference(IDENTIFIERS) + .build(); + + context.restoreAuthSystemState(); + context.commit(); + + List queueRecords = orcidQueueService.findAll(context); + assertThat(queueRecords, hasSize(2)); + assertThat(queueRecords, hasItem(matches(profile, profile, "KEYWORDS", null, + "dc.subject::test", "test", INSERT))); + assertThat(queueRecords, hasItem(matches(profile, "RESEARCHER_URLS", null, + "dc.identifier.uri::http://localhost:4000/handle/fake-handle/190", + "http://localhost:4000/handle/fake-handle/190", INSERT))); + + addMetadata(profile, "person", "name", "variant", "User Test", null); + context.commit(); + + queueRecords = orcidQueueService.findAll(context); + assertThat(queueRecords, hasSize(3)); + assertThat(queueRecords, hasItem( + matches(profile, profile, "KEYWORDS", null, "dc.subject::test", "test", INSERT))); + assertThat(queueRecords, hasItem(matches(profile, "RESEARCHER_URLS", null, + "dc.identifier.uri::http://localhost:4000/handle/fake-handle/190", + "http://localhost:4000/handle/fake-handle/190", INSERT))); + assertThat(queueRecords, hasItem(matches(profile, profile, "OTHER_NAMES", + null, "person.name.variant::User Test", "User Test", INSERT))); + } + + @Test + public void testOrcidQueueRecordCreationForProfileWithSameMetadataPreviouslyDeleted() throws Exception { + context.turnOffAuthorisationSystem(); + + Item profile = ItemBuilder.createItem(context, profileCollection) + .withTitle("Test User") + .withOrcidIdentifier("0000-1111-2222-3333") + .withOrcidAccessToken("ab4d18a0-8d9a-40f1-b601-a417255c8d20", eperson) + .withOrcidSynchronizationProfilePreference(BIOGRAPHICAL) + .build(); + + OrcidHistoryBuilder.createOrcidHistory(context, profile, profile) + .withRecordType("COUNTRY") + .withMetadata("person.country::IT") + .withPutCode("123456") + .withOperation(OrcidOperation.INSERT) + .withTimestamp(Date.from(Instant.ofEpochMilli(100000))) + .withStatus(201) + .build(); + + OrcidHistoryBuilder.createOrcidHistory(context, profile, profile) + .withRecordType("COUNTRY") + .withMetadata("person.country::IT") + .withPutCode("123456") + .withOperation(OrcidOperation.DELETE) + .withTimestamp(Date.from(Instant.ofEpochMilli(200000))) + .withStatus(204) + .build(); + + context.restoreAuthSystemState(); + context.commit(); + + assertThat(orcidQueueService.findAll(context), empty()); + + addMetadata(profile, "person", "country", null, "IT", null); + context.commit(); + + List queueRecords = orcidQueueService.findAll(context); + assertThat(queueRecords, hasSize(1)); + assertThat(queueRecords.get(0), matches(profile, "COUNTRY", null, "person.country::IT", "IT", INSERT)); + } + + @Test + public void testOrcidQueueRecordCreationForProfileWithMetadataPreviouslyDeletedAndThenInsertedAgain() + throws Exception { + + context.turnOffAuthorisationSystem(); + + Item profile = ItemBuilder.createItem(context, profileCollection) + .withTitle("Test User") + .withOrcidIdentifier("0000-1111-2222-3333") + .withOrcidAccessToken("ab4d18a0-8d9a-40f1-b601-a417255c8d20", eperson) + .withOrcidSynchronizationProfilePreference(BIOGRAPHICAL) + .build(); + + OrcidHistoryBuilder.createOrcidHistory(context, profile, profile) + .withRecordType("COUNTRY") + .withMetadata("person.country::IT") + .withPutCode("123456") + .withOperation(OrcidOperation.INSERT) + .withTimestamp(Date.from(Instant.ofEpochMilli(100000))) + .withStatus(201) + .build(); + + OrcidHistoryBuilder.createOrcidHistory(context, profile, profile) + .withRecordType("COUNTRY") + .withMetadata("person.country::IT") + .withPutCode("123456") + .withOperation(OrcidOperation.DELETE) + .withTimestamp(Date.from(Instant.ofEpochMilli(200000))) + .withStatus(204) + .build(); + + OrcidHistoryBuilder.createOrcidHistory(context, profile, profile) + .withRecordType("COUNTRY") + .withMetadata("person.country::IT") + .withPutCode("123456") + .withOperation(OrcidOperation.INSERT) + .withTimestamp(Date.from(Instant.ofEpochMilli(300000))) + .withStatus(201) + .build(); + + context.restoreAuthSystemState(); + context.commit(); + + assertThat(orcidQueueService.findAll(context), empty()); + + addMetadata(profile, "person", "country", null, "IT", null); + context.commit(); + + assertThat(orcidQueueService.findAll(context), empty()); + + } + + @Test + public void testOrcidQueueRecordCreationForProfileWithNotSuccessfullyMetadataDeletion() + throws Exception { + + context.turnOffAuthorisationSystem(); + + Item profile = ItemBuilder.createItem(context, profileCollection) + .withTitle("Test User") + .withOrcidIdentifier("0000-1111-2222-3333") + .withOrcidAccessToken("ab4d18a0-8d9a-40f1-b601-a417255c8d20", eperson) + .withOrcidSynchronizationProfilePreference(BIOGRAPHICAL) + .build(); + + OrcidHistoryBuilder.createOrcidHistory(context, profile, profile) + .withRecordType("COUNTRY") + .withMetadata("person.country::IT") + .withPutCode("123456") + .withOperation(OrcidOperation.INSERT) + .withTimestamp(Date.from(Instant.ofEpochMilli(100000))) + .withStatus(201) + .build(); + + OrcidHistoryBuilder.createOrcidHistory(context, profile, profile) + .withRecordType("COUNTRY") + .withMetadata("person.country::IT") + .withPutCode("123456") + .withOperation(OrcidOperation.DELETE) + .withTimestamp(Date.from(Instant.ofEpochMilli(200000))) + .withStatus(400) + .build(); + + context.restoreAuthSystemState(); + context.commit(); + + assertThat(orcidQueueService.findAll(context), empty()); + + addMetadata(profile, "person", "country", null, "IT", null); + context.commit(); + + assertThat(orcidQueueService.findAll(context), empty()); + + } + + @Test + public void testOrcidQueueRecordCreationAndDeletion() throws Exception { + + context.turnOffAuthorisationSystem(); + + Item item = ItemBuilder.createItem(context, profileCollection) + .withTitle("Test User") + .withOrcidIdentifier("0000-1111-2222-3333") + .withOrcidAccessToken("ab4d18a0-8d9a-40f1-b601-a417255c8d20", eperson) + .withSubject("Science") + .withOrcidSynchronizationProfilePreference(BIOGRAPHICAL) + .build(); + + context.restoreAuthSystemState(); + context.commit(); + + List records = orcidQueueService.findAll(context); + assertThat(records, hasSize(1)); + assertThat(records, hasItem(matches(item, KEYWORDS.name(), null, "dc.subject::Science", "Science", INSERT))); + + removeMetadata(item, "dc", "subject", null); + context.commit(); + + assertThat(orcidQueueService.findAll(context), empty()); + + } + + @Test + public void testOrcidQueueRecordCreationAndDeletionWithOrcidHistoryInsertionInTheMiddle() throws Exception { + + context.turnOffAuthorisationSystem(); + + Item item = ItemBuilder.createItem(context, profileCollection) + .withTitle("Test User") + .withOrcidIdentifier("0000-1111-2222-3333") + .withOrcidAccessToken("ab4d18a0-8d9a-40f1-b601-a417255c8d20", eperson) + .withSubject("Science") + .withOrcidSynchronizationProfilePreference(BIOGRAPHICAL) + .build(); + + context.restoreAuthSystemState(); + context.commit(); + + List records = orcidQueueService.findAll(context); + assertThat(records, hasSize(1)); + assertThat(records, hasItem(matches(item, KEYWORDS.name(), null, "dc.subject::Science", "Science", INSERT))); + + OrcidHistoryBuilder.createOrcidHistory(context, item, item) + .withPutCode("12345") + .withMetadata("dc.subject::Science") + .withDescription("Science") + .withRecordType(KEYWORDS.name()) + .withOperation(INSERT) + .withStatus(201) + .build(); + + removeMetadata(item, "dc", "subject", null); + context.commit(); + + records = orcidQueueService.findAll(context); + assertThat(records, hasSize(1)); + assertThat(records, hasItem(matches(item, KEYWORDS.name(), "12345", "dc.subject::Science", "Science", DELETE))); + + } + + @Test + public void testOrcidQueueRecordCreationAndDeletionWithFailedOrcidHistoryInsertionInTheMiddle() throws Exception { + + context.turnOffAuthorisationSystem(); + + Item item = ItemBuilder.createItem(context, profileCollection) + .withTitle("Test User") + .withOrcidIdentifier("0000-1111-2222-3333") + .withOrcidAccessToken("ab4d18a0-8d9a-40f1-b601-a417255c8d20", eperson) + .withSubject("Science") + .withOrcidSynchronizationProfilePreference(BIOGRAPHICAL) + .build(); + + context.restoreAuthSystemState(); + context.commit(); + + List records = orcidQueueService.findAll(context); + assertThat(records, hasSize(1)); + assertThat(records, hasItem(matches(item, KEYWORDS.name(), null, "dc.subject::Science", "Science", INSERT))); + + OrcidHistoryBuilder.createOrcidHistory(context, item, item) + .withPutCode("12345") + .withMetadata("dc.subject::Science") + .withDescription("Science") + .withRecordType(KEYWORDS.name()) + .withOperation(INSERT) + .withStatus(400) + .build(); + + removeMetadata(item, "dc", "subject", null); + context.commit(); + + assertThat(orcidQueueService.findAll(context), empty()); + + } + + @Test + public void testNoOrcidQueueRecordCreationOccursIfProfileSynchronizationIsDisabled() throws SQLException { + context.turnOffAuthorisationSystem(); + + ItemBuilder.createItem(context, profileCollection) + .withTitle("Test User") + .withOrcidIdentifier("0000-1111-2222-3333") + .withOrcidAccessToken("ab4d18a0-8d9a-40f1-b601-a417255c8d20", eperson) + .build(); + + context.restoreAuthSystemState(); + context.commit(); + + assertThat(orcidQueueService.findAll(context), empty()); + } + + @Test + public void testNoOrcidQueueRecordCreationOccursIfNoComplianceMetadataArePresent() throws SQLException { + context.turnOffAuthorisationSystem(); + + ItemBuilder.createItem(context, profileCollection) + .withTitle("Test User") + .withOrcidIdentifier("0000-1111-2222-3333") + .withOrcidAccessToken("ab4d18a0-8d9a-40f1-b601-a417255c8d20", eperson) + .withOrcidSynchronizationProfilePreference(BIOGRAPHICAL) + .build(); + + context.restoreAuthSystemState(); + context.commit(); + + assertThat(orcidQueueService.findAll(context), empty()); + } + + @Test + public void testOrcidQueueRecordCreationForPublication() throws Exception { + + context.turnOffAuthorisationSystem(); + + Item profile = ItemBuilder.createItem(context, profileCollection) + .withTitle("Test User") + .withOrcidIdentifier("0000-1111-2222-3333") + .withOrcidAccessToken("ab4d18a0-8d9a-40f1-b601-a417255c8d20", eperson) + .withOrcidSynchronizationPublicationsPreference(ALL) + .build(); + + Collection publicationCollection = createCollection("Publications", "Publication"); + + Item publication = ItemBuilder.createItem(context, publicationCollection) + .withTitle("Test publication") + .withAuthor("Test User") + .build(); + + EntityType publicationType = EntityTypeBuilder.createEntityTypeBuilder(context, "Publication").build(); + EntityType personType = EntityTypeBuilder.createEntityTypeBuilder(context, "Person").build(); + + RelationshipType isAuthorOfPublication = createRelationshipTypeBuilder(context, personType, publicationType, + "isAuthorOfPublication", "isPublicationOfAuthor", 0, null, 0, null).build(); + + RelationshipBuilder.createRelationshipBuilder(context, profile, publication, isAuthorOfPublication).build(); + + context.restoreAuthSystemState(); + context.commit(); + + List orcidQueueRecords = orcidQueueService.findAll(context); + assertThat(orcidQueueRecords, hasSize(1)); + assertThat(orcidQueueRecords.get(0), matches(profile, publication, "Publication", INSERT)); + + addMetadata(publication, "dc", "contributor", "editor", "Editor", null); + context.commit(); + + List newOrcidQueueRecords = orcidQueueService.findAll(context); + assertThat(newOrcidQueueRecords, hasSize(1)); + + assertThat(orcidQueueRecords.get(0), equalTo(newOrcidQueueRecords.get(0))); + } + + @Test + public void testOrcidQueueRecordCreationToUpdatePublication() throws Exception { + + context.turnOffAuthorisationSystem(); + + Item profile = ItemBuilder.createItem(context, profileCollection) + .withTitle("Test User") + .withOrcidIdentifier("0000-1111-2222-3333") + .withOrcidAccessToken("ab4d18a0-8d9a-40f1-b601-a417255c8d20", eperson) + .withOrcidSynchronizationPublicationsPreference(ALL) + .build(); + + Collection publicationCollection = createCollection("Publications", "Publication"); + + Item publication = ItemBuilder.createItem(context, publicationCollection) + .withTitle("Test publication") + .withAuthor("Test User") + .build(); + + createOrcidHistory(context, profile, publication) + .withPutCode("123456") + .withOperation(INSERT) + .build(); + + EntityType publicationType = EntityTypeBuilder.createEntityTypeBuilder(context, "Publication").build(); + EntityType personType = EntityTypeBuilder.createEntityTypeBuilder(context, "Person").build(); + + RelationshipType isAuthorOfPublication = createRelationshipTypeBuilder(context, personType, publicationType, + "isAuthorOfPublication", "isPublicationOfAuthor", 0, null, 0, null).build(); + + RelationshipBuilder.createRelationshipBuilder(context, profile, publication, isAuthorOfPublication).build(); + + context.restoreAuthSystemState(); + context.commit(); + + List orcidQueueRecords = orcidQueueService.findAll(context); + assertThat(orcidQueueRecords, hasSize(1)); + assertThat(orcidQueueRecords.get(0), matches(profile, publication, "Publication", "123456", UPDATE)); + } + + @Test + public void testNoOrcidQueueRecordCreationOccursIfPublicationSynchronizationIsDisabled() throws Exception { + + context.turnOffAuthorisationSystem(); + + Item profile = ItemBuilder.createItem(context, profileCollection) + .withTitle("Test User") + .withOrcidIdentifier("0000-1111-2222-3333") + .withOrcidAccessToken("ab4d18a0-8d9a-40f1-b601-a417255c8d20", eperson) + .build(); + + Collection publicationCollection = createCollection("Publications", "Publication"); + + Item publication = ItemBuilder.createItem(context, publicationCollection) + .withTitle("Test publication") + .withAuthor("Test User") + .build(); + + EntityType publicationType = EntityTypeBuilder.createEntityTypeBuilder(context, "Publication").build(); + EntityType personType = EntityTypeBuilder.createEntityTypeBuilder(context, "Person").build(); + + RelationshipType isAuthorOfPublication = createRelationshipTypeBuilder(context, personType, publicationType, + "isAuthorOfPublication", "isPublicationOfAuthor", 0, null, 0, null).build(); + + RelationshipBuilder.createRelationshipBuilder(context, profile, publication, isAuthorOfPublication).build(); + + context.restoreAuthSystemState(); + context.commit(); + + assertThat(orcidQueueService.findAll(context), empty()); + + addMetadata(profile, "dspace", "orcid", "sync-publications", DISABLED.name(), null); + addMetadata(publication, "dc", "date", "issued", "2021-01-01", null); + context.commit(); + + assertThat(orcidQueueService.findAll(context), empty()); + } + + @Test + public void testOrcidQueueRecordCreationToUpdateProject() throws Exception { + + context.turnOffAuthorisationSystem(); + + Item profile = ItemBuilder.createItem(context, profileCollection) + .withTitle("Test User") + .withOrcidIdentifier("0000-1111-2222-3333") + .withOrcidAccessToken("ab4d18a0-8d9a-40f1-b601-a417255c8d20", eperson) + .withOrcidSynchronizationFundingsPreference(ALL) + .build(); + + Collection projectCollection = createCollection("Projects", "Project"); + + Item project = ItemBuilder.createItem(context, projectCollection) + .withTitle("Test project") + .build(); + + createOrcidHistory(context, profile, project) + .withPutCode("123456") + .build(); + + EntityType projectType = EntityTypeBuilder.createEntityTypeBuilder(context, "Project").build(); + EntityType personType = EntityTypeBuilder.createEntityTypeBuilder(context, "Person").build(); + + RelationshipType isProjectOfPerson = createRelationshipTypeBuilder(context, projectType, personType, + "isProjectOfPerson", "isPersonOfProject", 0, null, 0, null).build(); + + RelationshipBuilder.createRelationshipBuilder(context, project, profile, isProjectOfPerson).build(); + + context.restoreAuthSystemState(); + context.commit(); + + List orcidQueueRecords = orcidQueueService.findAll(context); + assertThat(orcidQueueRecords, hasSize(1)); + assertThat(orcidQueueRecords.get(0), matches(profile, project, "Project", "123456", UPDATE)); + } + + @Test + public void testNoOrcidQueueRecordCreationOccursForNotConfiguredEntities() throws Exception { + + context.turnOffAuthorisationSystem(); + + Item profile = ItemBuilder.createItem(context, profileCollection) + .withTitle("Test User") + .withOrcidIdentifier("0000-1111-2222-3333") + .withOrcidAccessToken("ab4d18a0-8d9a-40f1-b601-a417255c8d20", eperson) + .build(); + + Collection projectCollection = createCollection("Projects", "Project"); + + Item project = ItemBuilder.createItem(context, projectCollection) + .withTitle("Test project") + .withProjectInvestigator("Test User") + .build(); + + EntityType projectType = EntityTypeBuilder.createEntityTypeBuilder(context, "Project").build(); + EntityType personType = EntityTypeBuilder.createEntityTypeBuilder(context, "Person").build(); + + RelationshipType isProjectOfPerson = createRelationshipTypeBuilder(context, projectType, personType, + "isProjectOfPerson", "isPersonOfProject", 0, null, 0, null).build(); + + RelationshipBuilder.createRelationshipBuilder(context, project, profile, isProjectOfPerson).build(); + + context.restoreAuthSystemState(); + context.commit(); + + assertThat(orcidQueueService.findAll(context), empty()); + } + + @Test + public void testOrcidQueueRecalculationOnProfilePreferenceUpdate() throws Exception { + // Set a fake handle prefix for this test which we will use to assign handles below + configurationService.setProperty("handle.prefix", "fake-handle"); + context.turnOffAuthorisationSystem(); + + Item profile = ItemBuilder.createItem(context, profileCollection) + .withTitle("Test User") + .withOrcidIdentifier("0000-0000-0012-2345") + .withOrcidAccessToken("ab4d18a0-8d9a-40f1-b601-a417255c8d20", eperson) + .withSubject("Math") + .withHandle("fake-handle/200") + .withOrcidSynchronizationProfilePreference(BIOGRAPHICAL) + .build(); + + context.restoreAuthSystemState(); + context.commit(); + + List records = orcidQueueService.findAll(context); + assertThat(records, hasSize(1)); + assertThat(records, hasItem(matches(profile, "KEYWORDS", null, "dc.subject::Math", "Math", INSERT))); + + addMetadata(profile, "person", "identifier", "rid", "ID", null); + addMetadata(profile, "dspace", "orcid", "sync-profile", IDENTIFIERS.name(), null); + + context.commit(); + + records = orcidQueueService.findAll(context); + assertThat(records, hasSize(3)); + assertThat(records, hasItem(matches(profile, "KEYWORDS", null, "dc.subject::Math", "Math", INSERT))); + assertThat(records, hasItem(matches(profile, "EXTERNAL_IDS", null, "person.identifier.rid::ID", "ID", INSERT))); + assertThat(records, hasItem(matches(profile, "RESEARCHER_URLS", null, + "dc.identifier.uri::http://localhost:4000/handle/fake-handle/200", + "http://localhost:4000/handle/fake-handle/200", INSERT))); + + removeMetadata(profile, "dspace", "orcid", "sync-profile"); + + context.commit(); + + assertThat(orcidQueueService.findAll(context), empty()); + + } + + @Test + public void testWithManyInsertionAndDeletionOfSameMetadataValue() throws Exception { + + context.turnOffAuthorisationSystem(); + + Item profile = ItemBuilder.createItem(context, profileCollection) + .withTitle("Test User") + .withOrcidIdentifier("0000-1111-2222-3333") + .withOrcidAccessToken("ab4d18a0-8d9a-40f1-b601-a417255c8d20", eperson) + .withOrcidSynchronizationProfilePreference(BIOGRAPHICAL) + .withSubject("Science") + .build(); + + context.restoreAuthSystemState(); + context.commit(); + + List queueRecords = orcidQueueService.findAll(context); + assertThat(queueRecords, hasSize(1)); + assertThat(queueRecords.get(0), matches(profile, "KEYWORDS", null, + "dc.subject::Science", "Science", INSERT)); + + OrcidHistoryBuilder.createOrcidHistory(context, profile, profile) + .withRecordType(KEYWORDS.name()) + .withDescription("Science") + .withMetadata("dc.subject::Science") + .withOperation(OrcidOperation.INSERT) + .withPutCode("12345") + .withStatus(201) + .build(); + + removeMetadata(profile, "dc", "subject", null); + + context.commit(); + + queueRecords = orcidQueueService.findAll(context); + assertThat(queueRecords, hasSize(1)); + assertThat(queueRecords.get(0), matches(profile, "KEYWORDS", "12345", + "dc.subject::Science", "Science", DELETE)); + + OrcidHistoryBuilder.createOrcidHistory(context, profile, profile) + .withRecordType(KEYWORDS.name()) + .withDescription("Science") + .withMetadata("dc.subject::Science") + .withOperation(OrcidOperation.DELETE) + .withStatus(204) + .build(); + + addMetadata(profile, "dc", "subject", null, "Science", null); + + context.commit(); + + queueRecords = orcidQueueService.findAll(context); + assertThat(queueRecords, hasSize(1)); + assertThat(queueRecords.get(0), matches(profile, "KEYWORDS", null, + "dc.subject::Science", "Science", INSERT)); + + OrcidHistoryBuilder.createOrcidHistory(context, profile, profile) + .withRecordType(KEYWORDS.name()) + .withDescription("Science") + .withMetadata("dc.subject::Science") + .withOperation(OrcidOperation.INSERT) + .withPutCode("12346") + .withStatus(201) + .build(); + + removeMetadata(profile, "dc", "subject", null); + + context.commit(); + + queueRecords = orcidQueueService.findAll(context); + assertThat(queueRecords, hasSize(1)); + assertThat(queueRecords.get(0), matches(profile, "KEYWORDS", "12346", + "dc.subject::Science", "Science", DELETE)); + + } + + private void addMetadata(Item item, String schema, String element, String qualifier, String value, + String authority) throws Exception { + context.turnOffAuthorisationSystem(); + item = context.reloadEntity(item); + itemService.addMetadata(context, item, schema, element, qualifier, null, value, authority, 600); + itemService.update(context, item); + context.restoreAuthSystemState(); + } + + private void removeMetadata(Item item, String schema, String element, String qualifier) throws Exception { + context.turnOffAuthorisationSystem(); + item = context.reloadEntity(item); + List metadata = itemService.getMetadata(item, schema, element, qualifier, Item.ANY); + itemService.removeMetadataValues(context, item, metadata); + itemService.update(context, item); + context.restoreAuthSystemState(); + } + + private Collection createCollection(String name, String entityType) { + return CollectionBuilder.createCollection(context, parentCommunity) + .withName(name) + .withEntityType(entityType) + .build(); + } + +} diff --git a/dspace-api/src/test/java/org/dspace/orcid/model/validator/OrcidValidatorTest.java b/dspace-api/src/test/java/org/dspace/orcid/model/validator/OrcidValidatorTest.java new file mode 100644 index 0000000000..20cad9ce2c --- /dev/null +++ b/dspace-api/src/test/java/org/dspace/orcid/model/validator/OrcidValidatorTest.java @@ -0,0 +1,662 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.orcid.model.validator; + +import static org.dspace.orcid.model.validator.OrcidValidationError.AMOUNT_CURRENCY_REQUIRED; +import static org.dspace.orcid.model.validator.OrcidValidationError.DISAMBIGUATED_ORGANIZATION_REQUIRED; +import static org.dspace.orcid.model.validator.OrcidValidationError.DISAMBIGUATED_ORGANIZATION_VALUE_REQUIRED; +import static org.dspace.orcid.model.validator.OrcidValidationError.DISAMBIGUATION_SOURCE_INVALID; +import static org.dspace.orcid.model.validator.OrcidValidationError.DISAMBIGUATION_SOURCE_REQUIRED; +import static org.dspace.orcid.model.validator.OrcidValidationError.EXTERNAL_ID_REQUIRED; +import static org.dspace.orcid.model.validator.OrcidValidationError.FUNDER_REQUIRED; +import static org.dspace.orcid.model.validator.OrcidValidationError.ORGANIZATION_ADDRESS_REQUIRED; +import static org.dspace.orcid.model.validator.OrcidValidationError.ORGANIZATION_CITY_REQUIRED; +import static org.dspace.orcid.model.validator.OrcidValidationError.ORGANIZATION_COUNTRY_REQUIRED; +import static org.dspace.orcid.model.validator.OrcidValidationError.ORGANIZATION_NAME_REQUIRED; +import static org.dspace.orcid.model.validator.OrcidValidationError.PUBLICATION_DATE_INVALID; +import static org.dspace.orcid.model.validator.OrcidValidationError.TITLE_REQUIRED; +import static org.dspace.orcid.model.validator.OrcidValidationError.TYPE_REQUIRED; +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.containsInAnyOrder; +import static org.hamcrest.Matchers.empty; +import static org.hamcrest.Matchers.hasSize; +import static org.mockito.Mockito.when; + +import java.util.List; + +import org.dspace.orcid.model.validator.impl.OrcidValidatorImpl; +import org.dspace.services.ConfigurationService; +import org.junit.Before; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.mockito.InjectMocks; +import org.mockito.Mock; +import org.mockito.junit.MockitoJUnitRunner; +import org.orcid.jaxb.model.common.Iso3166Country; +import org.orcid.jaxb.model.common.Relationship; +import org.orcid.jaxb.model.common.WorkType; +import org.orcid.jaxb.model.v3.release.common.Amount; +import org.orcid.jaxb.model.v3.release.common.DisambiguatedOrganization; +import org.orcid.jaxb.model.v3.release.common.Organization; +import org.orcid.jaxb.model.v3.release.common.OrganizationAddress; +import org.orcid.jaxb.model.v3.release.common.PublicationDate; +import org.orcid.jaxb.model.v3.release.common.Title; +import org.orcid.jaxb.model.v3.release.common.Year; +import org.orcid.jaxb.model.v3.release.record.ExternalID; +import org.orcid.jaxb.model.v3.release.record.ExternalIDs; +import org.orcid.jaxb.model.v3.release.record.Funding; +import org.orcid.jaxb.model.v3.release.record.FundingTitle; +import org.orcid.jaxb.model.v3.release.record.Work; +import org.orcid.jaxb.model.v3.release.record.WorkTitle; + +/** + * Unit tests for {@link OrcidValidatorImpl} + * @author Luca Giamminonni (luca.giamminonni at 4science.it) + * + */ +@RunWith(MockitoJUnitRunner.class) +public class OrcidValidatorTest { + + @Mock(lenient = true) + private ConfigurationService configurationService; + + @InjectMocks + private OrcidValidatorImpl validator; + + @Before + public void before() { + when(configurationService.getBooleanProperty("orcid.validation.work.enabled", true)).thenReturn(true); + when(configurationService.getBooleanProperty("orcid.validation.funding.enabled", true)).thenReturn(true); + when(configurationService.getArrayProperty("orcid.validation.organization.identifier-sources")) + .thenReturn(new String[] { "RINGGOLD", "GRID", "FUNDREF", "LEI" }); + } + + @Test + public void testWorkWithoutTitleAndTypeAndExternalIds() { + + List errors = validator.validateWork(new Work()); + assertThat(errors, hasSize(3)); + assertThat(errors, containsInAnyOrder(TITLE_REQUIRED, TYPE_REQUIRED, EXTERNAL_ID_REQUIRED)); + } + + @Test + public void testWorkWithoutWorkTitle() { + + Work work = new Work(); + work.setWorkType(WorkType.DATA_SET); + work.setWorkExternalIdentifiers(new ExternalIDs()); + work.getExternalIdentifiers().getExternalIdentifier().add(buildValidExternalID()); + + List errors = validator.validateWork(work); + assertThat(errors, hasSize(1)); + assertThat(errors, containsInAnyOrder(TITLE_REQUIRED)); + } + + @Test + public void testWorkWithoutTitle() { + + Work work = new Work(); + work.setWorkTitle(new WorkTitle()); + work.setWorkType(WorkType.DATA_SET); + work.setWorkExternalIdentifiers(new ExternalIDs()); + work.getExternalIdentifiers().getExternalIdentifier().add(buildValidExternalID()); + + List errors = validator.validateWork(work); + assertThat(errors, hasSize(1)); + assertThat(errors, containsInAnyOrder(TITLE_REQUIRED)); + } + + @Test + public void testWorkWithNullTitle() { + + Work work = new Work(); + work.setWorkTitle(new WorkTitle()); + work.getWorkTitle().setTitle(new Title(null)); + work.setWorkType(WorkType.DATA_SET); + work.setWorkExternalIdentifiers(new ExternalIDs()); + work.getExternalIdentifiers().getExternalIdentifier().add(buildValidExternalID()); + + List errors = validator.validateWork(work); + assertThat(errors, hasSize(1)); + assertThat(errors, containsInAnyOrder(TITLE_REQUIRED)); + } + + @Test + public void testWorkWithEmptyTitle() { + + Work work = new Work(); + work.setWorkTitle(new WorkTitle()); + work.getWorkTitle().setTitle(new Title("")); + work.setWorkType(WorkType.DATA_SET); + work.setWorkExternalIdentifiers(new ExternalIDs()); + work.getExternalIdentifiers().getExternalIdentifier().add(buildValidExternalID()); + + List errors = validator.validateWork(work); + assertThat(errors, hasSize(1)); + assertThat(errors, containsInAnyOrder(TITLE_REQUIRED)); + } + + @Test + public void testWorkWithoutType() { + + Work work = new Work(); + work.setWorkTitle(new WorkTitle()); + work.getWorkTitle().setTitle(new Title("Work title")); + work.setWorkExternalIdentifiers(new ExternalIDs()); + work.getExternalIdentifiers().getExternalIdentifier().add(buildValidExternalID()); + + List errors = validator.validateWork(work); + assertThat(errors, hasSize(1)); + assertThat(errors, containsInAnyOrder(TYPE_REQUIRED)); + } + + @Test + public void testWorkWithoutExternalIds() { + + Work work = new Work(); + work.setWorkTitle(new WorkTitle()); + work.getWorkTitle().setTitle(new Title("Work title")); + work.setWorkType(WorkType.DATA_SET); + + List errors = validator.validateWork(work); + assertThat(errors, hasSize(1)); + assertThat(errors, containsInAnyOrder(EXTERNAL_ID_REQUIRED)); + } + + @Test + public void testWorkWithEmptyExternalIds() { + + Work work = new Work(); + work.setWorkTitle(new WorkTitle()); + work.getWorkTitle().setTitle(new Title("Work title")); + work.setWorkType(WorkType.DATA_SET); + work.setWorkExternalIdentifiers(new ExternalIDs()); + + List errors = validator.validateWork(work); + assertThat(errors, hasSize(1)); + assertThat(errors, containsInAnyOrder(EXTERNAL_ID_REQUIRED)); + } + + @Test + public void testdWorkWithPublicationDateWithoutYear() { + + Work work = new Work(); + work.setWorkTitle(new WorkTitle()); + work.setWorkType(WorkType.DATA_SET); + work.getWorkTitle().setTitle(new Title("Work title")); + work.setWorkExternalIdentifiers(new ExternalIDs()); + work.getExternalIdentifiers().getExternalIdentifier().add(buildValidExternalID()); + + PublicationDate publicationDate = new PublicationDate(); + work.setPublicationDate(publicationDate); + + List errors = validator.validateWork(work); + assertThat(errors, hasSize(1)); + assertThat(errors, containsInAnyOrder(PUBLICATION_DATE_INVALID)); + } + + @Test + public void testdWorkWithPublicationDateWithInvalidYear() { + + Work work = new Work(); + work.setWorkTitle(new WorkTitle()); + work.setWorkType(WorkType.DATA_SET); + work.getWorkTitle().setTitle(new Title("Work title")); + work.setWorkExternalIdentifiers(new ExternalIDs()); + work.getExternalIdentifiers().getExternalIdentifier().add(buildValidExternalID()); + + PublicationDate publicationDate = new PublicationDate(); + Year year = new Year(); + year.setValue("INVALID"); + publicationDate.setYear(year); + work.setPublicationDate(publicationDate); + + List errors = validator.validateWork(work); + assertThat(errors, hasSize(1)); + assertThat(errors, containsInAnyOrder(PUBLICATION_DATE_INVALID)); + } + + @Test + public void testdWorkWithPublicationDateWithYearPriorTo1900() { + + Work work = new Work(); + work.setWorkTitle(new WorkTitle()); + work.setWorkType(WorkType.DATA_SET); + work.getWorkTitle().setTitle(new Title("Work title")); + work.setWorkExternalIdentifiers(new ExternalIDs()); + work.getExternalIdentifiers().getExternalIdentifier().add(buildValidExternalID()); + + PublicationDate publicationDate = new PublicationDate(); + publicationDate.setYear(new Year(1850)); + work.setPublicationDate(publicationDate); + + List errors = validator.validateWork(work); + assertThat(errors, hasSize(1)); + assertThat(errors, containsInAnyOrder(PUBLICATION_DATE_INVALID)); + } + + @Test + public void testValidWork() { + + Work work = new Work(); + work.setWorkTitle(new WorkTitle()); + work.setWorkType(WorkType.DATA_SET); + work.getWorkTitle().setTitle(new Title("Work title")); + work.setWorkExternalIdentifiers(new ExternalIDs()); + work.getExternalIdentifiers().getExternalIdentifier().add(buildValidExternalID()); + + PublicationDate publicationDate = new PublicationDate(); + publicationDate.setYear(new Year(1956)); + work.setPublicationDate(publicationDate); + + List errors = validator.validateWork(work); + assertThat(errors, empty()); + } + + @Test + public void testFundingWithoutTitleAndExternalIdsAndOrganization() { + + List errors = validator.validateFunding(new Funding()); + assertThat(errors, hasSize(3)); + assertThat(errors, containsInAnyOrder(EXTERNAL_ID_REQUIRED, FUNDER_REQUIRED, TITLE_REQUIRED)); + } + + @Test + public void testFundingWithoutExternalIdsAndOrganization() { + + Funding funding = new Funding(); + funding.setTitle(new FundingTitle()); + funding.getTitle().setTitle(new Title("Funding title")); + + List errors = validator.validateFunding(funding); + assertThat(errors, hasSize(2)); + assertThat(errors, containsInAnyOrder(EXTERNAL_ID_REQUIRED, FUNDER_REQUIRED)); + } + + @Test + public void testFundingWithoutTitleAndOrganization() { + + Funding funding = new Funding(); + funding.setExternalIdentifiers(new ExternalIDs()); + funding.getExternalIdentifiers().getExternalIdentifier().add(buildValidExternalID()); + + List errors = validator.validateFunding(funding); + assertThat(errors, hasSize(2)); + assertThat(errors, containsInAnyOrder(TITLE_REQUIRED, FUNDER_REQUIRED)); + } + + @Test + public void testFundingWithoutTitleAndExternalIds() { + + Funding funding = new Funding(); + funding.setOrganization(buildValidOrganization()); + + List errors = validator.validateFunding(funding); + assertThat(errors, hasSize(2)); + assertThat(errors, containsInAnyOrder(TITLE_REQUIRED, EXTERNAL_ID_REQUIRED)); + } + + @Test + public void testFundingWithoutTitle() { + + Funding funding = new Funding(); + funding.setTitle(new FundingTitle()); + + funding.setExternalIdentifiers(new ExternalIDs()); + funding.getExternalIdentifiers().getExternalIdentifier().add(buildValidExternalID()); + + funding.setOrganization(buildValidOrganization()); + + List errors = validator.validateFunding(funding); + assertThat(errors, hasSize(1)); + assertThat(errors, containsInAnyOrder(TITLE_REQUIRED)); + } + + @Test + public void testFundingWithNullTitle() { + + Funding funding = new Funding(); + funding.setTitle(new FundingTitle()); + funding.getTitle().setTitle(new Title(null)); + + funding.setExternalIdentifiers(new ExternalIDs()); + funding.getExternalIdentifiers().getExternalIdentifier().add(buildValidExternalID()); + + funding.setOrganization(buildValidOrganization()); + + List errors = validator.validateFunding(funding); + assertThat(errors, hasSize(1)); + assertThat(errors, containsInAnyOrder(TITLE_REQUIRED)); + } + + @Test + public void testFundingWithEmptyTitle() { + + Funding funding = new Funding(); + funding.setTitle(new FundingTitle()); + funding.getTitle().setTitle(new Title("")); + + funding.setExternalIdentifiers(new ExternalIDs()); + funding.getExternalIdentifiers().getExternalIdentifier().add(buildValidExternalID()); + + funding.setOrganization(buildValidOrganization()); + + List errors = validator.validateFunding(funding); + assertThat(errors, hasSize(1)); + assertThat(errors, containsInAnyOrder(TITLE_REQUIRED)); + } + + @Test + public void testFundingWithEmptyExternalIds() { + + Funding funding = new Funding(); + funding.setTitle(new FundingTitle()); + funding.getTitle().setTitle(new Title("Title")); + + funding.setExternalIdentifiers(new ExternalIDs()); + + funding.setOrganization(buildValidOrganization()); + + List errors = validator.validateFunding(funding); + assertThat(errors, hasSize(1)); + assertThat(errors, containsInAnyOrder(EXTERNAL_ID_REQUIRED)); + } + + @Test + public void testFundingWithOrganizationWithoutName() { + + Funding funding = new Funding(); + funding.setTitle(new FundingTitle()); + funding.getTitle().setTitle(new Title("Title")); + + funding.setExternalIdentifiers(new ExternalIDs()); + funding.getExternalIdentifiers().getExternalIdentifier().add(buildValidExternalID()); + + Organization organization = buildValidOrganization(); + organization.setName(null); + funding.setOrganization(organization); + + List errors = validator.validateFunding(funding); + assertThat(errors, hasSize(1)); + assertThat(errors, containsInAnyOrder(ORGANIZATION_NAME_REQUIRED)); + } + + @Test + public void testFundingWithOrganizationWithEmptyName() { + + Funding funding = new Funding(); + funding.setTitle(new FundingTitle()); + funding.getTitle().setTitle(new Title("Title")); + + funding.setExternalIdentifiers(new ExternalIDs()); + funding.getExternalIdentifiers().getExternalIdentifier().add(buildValidExternalID()); + + Organization organization = buildValidOrganization(); + organization.setName(""); + funding.setOrganization(organization); + + List errors = validator.validateFunding(funding); + assertThat(errors, hasSize(1)); + assertThat(errors, containsInAnyOrder(ORGANIZATION_NAME_REQUIRED)); + } + + @Test + public void testFundingWithOrganizationWithoutAddress() { + + Funding funding = new Funding(); + funding.setTitle(new FundingTitle()); + funding.getTitle().setTitle(new Title("Title")); + + funding.setExternalIdentifiers(new ExternalIDs()); + funding.getExternalIdentifiers().getExternalIdentifier().add(buildValidExternalID()); + + Organization organization = buildValidOrganization(); + organization.setAddress(null); + funding.setOrganization(organization); + + List errors = validator.validateFunding(funding); + assertThat(errors, hasSize(1)); + assertThat(errors, containsInAnyOrder(ORGANIZATION_ADDRESS_REQUIRED)); + } + + @Test + public void testFundingWithOrganizationWithoutCity() { + + Funding funding = new Funding(); + funding.setTitle(new FundingTitle()); + funding.getTitle().setTitle(new Title("Title")); + + funding.setExternalIdentifiers(new ExternalIDs()); + funding.getExternalIdentifiers().getExternalIdentifier().add(buildValidExternalID()); + + Organization organization = buildValidOrganization(); + organization.getAddress().setCity(null); + funding.setOrganization(organization); + + List errors = validator.validateFunding(funding); + assertThat(errors, hasSize(1)); + assertThat(errors, containsInAnyOrder(ORGANIZATION_CITY_REQUIRED)); + } + + @Test + public void testFundingWithOrganizationWithoutCountry() { + + Funding funding = new Funding(); + funding.setTitle(new FundingTitle()); + funding.getTitle().setTitle(new Title("Title")); + + funding.setExternalIdentifiers(new ExternalIDs()); + funding.getExternalIdentifiers().getExternalIdentifier().add(buildValidExternalID()); + + Organization organization = buildValidOrganization(); + organization.getAddress().setCountry(null); + funding.setOrganization(organization); + + List errors = validator.validateFunding(funding); + assertThat(errors, hasSize(1)); + assertThat(errors, containsInAnyOrder(ORGANIZATION_COUNTRY_REQUIRED)); + } + + @Test + public void testFundingWithOrganizationWithoutDisambiguatedOrganization() { + + Funding funding = new Funding(); + funding.setTitle(new FundingTitle()); + funding.getTitle().setTitle(new Title("Title")); + + funding.setExternalIdentifiers(new ExternalIDs()); + funding.getExternalIdentifiers().getExternalIdentifier().add(buildValidExternalID()); + + Organization organization = buildValidOrganization(); + organization.setDisambiguatedOrganization(null); + funding.setOrganization(organization); + + List errors = validator.validateFunding(funding); + assertThat(errors, hasSize(1)); + assertThat(errors, containsInAnyOrder(DISAMBIGUATED_ORGANIZATION_REQUIRED)); + } + + @Test + public void testFundingWithOrganizationWithoutDisambiguatedOrganizationId() { + + Funding funding = new Funding(); + funding.setTitle(new FundingTitle()); + funding.getTitle().setTitle(new Title("Title")); + + funding.setExternalIdentifiers(new ExternalIDs()); + funding.getExternalIdentifiers().getExternalIdentifier().add(buildValidExternalID()); + + Organization organization = buildValidOrganization(); + organization.getDisambiguatedOrganization().setDisambiguatedOrganizationIdentifier(null); + funding.setOrganization(organization); + + List errors = validator.validateFunding(funding); + assertThat(errors, hasSize(1)); + assertThat(errors, containsInAnyOrder(DISAMBIGUATED_ORGANIZATION_VALUE_REQUIRED)); + } + + @Test + public void testFundingWithOrganizationWithoutDisambiguatedOrganizationSource() { + + Funding funding = new Funding(); + funding.setTitle(new FundingTitle()); + funding.getTitle().setTitle(new Title("Title")); + + funding.setExternalIdentifiers(new ExternalIDs()); + funding.getExternalIdentifiers().getExternalIdentifier().add(buildValidExternalID()); + + Organization organization = buildValidOrganization(); + organization.getDisambiguatedOrganization().setDisambiguationSource(null); + funding.setOrganization(organization); + + List errors = validator.validateFunding(funding); + assertThat(errors, hasSize(1)); + assertThat(errors, containsInAnyOrder(DISAMBIGUATION_SOURCE_REQUIRED)); + } + + @Test + public void testFundingWithOrganizationWithInvalidDisambiguationSource() { + + Funding funding = new Funding(); + funding.setTitle(new FundingTitle()); + funding.getTitle().setTitle(new Title("Title")); + + funding.setExternalIdentifiers(new ExternalIDs()); + funding.getExternalIdentifiers().getExternalIdentifier().add(buildValidExternalID()); + + Organization organization = buildValidOrganization(); + organization.getDisambiguatedOrganization().setDisambiguationSource("INVALID"); + funding.setOrganization(organization); + + List errors = validator.validateFunding(funding); + assertThat(errors, hasSize(1)); + assertThat(errors, containsInAnyOrder(DISAMBIGUATION_SOURCE_INVALID)); + } + + @Test + public void testFundingWithoutAmountCurrency() { + Funding funding = new Funding(); + funding.setTitle(new FundingTitle()); + funding.getTitle().setTitle(new Title("Title")); + + funding.setExternalIdentifiers(new ExternalIDs()); + funding.getExternalIdentifiers().getExternalIdentifier().add(buildValidExternalID()); + + funding.setOrganization(buildValidOrganization()); + + funding.setAmount(new Amount()); + funding.getAmount().setContent("20000"); + + List errors = validator.validateFunding(funding); + assertThat(errors, hasSize(1)); + assertThat(errors, containsInAnyOrder(AMOUNT_CURRENCY_REQUIRED)); + } + + @Test + public void testValidFunding() { + Funding funding = new Funding(); + funding.setTitle(new FundingTitle()); + funding.getTitle().setTitle(new Title("Title")); + + funding.setExternalIdentifiers(new ExternalIDs()); + funding.getExternalIdentifiers().getExternalIdentifier().add(buildValidExternalID()); + + funding.setOrganization(buildValidOrganization()); + + List errors = validator.validateFunding(funding); + assertThat(errors, empty()); + } + + @Test + public void testWithWorkValidationEnabled() { + + Work work = new Work(); + work.setWorkTitle(new WorkTitle()); + work.getWorkTitle().setTitle(new Title("Work title")); + work.setWorkExternalIdentifiers(new ExternalIDs()); + work.getExternalIdentifiers().getExternalIdentifier().add(buildValidExternalID()); + + List errors = validator.validate(work); + assertThat(errors, hasSize(1)); + assertThat(errors, containsInAnyOrder(TYPE_REQUIRED)); + } + + @Test + public void testWithWorkValidationDisabled() { + + when(configurationService.getBooleanProperty("orcid.validation.work.enabled", true)).thenReturn(false); + + Work work = new Work(); + work.setWorkTitle(new WorkTitle()); + work.getWorkTitle().setTitle(new Title("Work title")); + + List errors = validator.validate(work); + assertThat(errors, empty()); + } + + @Test + public void testWithFundingValidationEnabled() { + + Funding funding = new Funding(); + funding.setTitle(new FundingTitle()); + funding.getTitle().setTitle(new Title("")); + + funding.setExternalIdentifiers(new ExternalIDs()); + funding.getExternalIdentifiers().getExternalIdentifier().add(buildValidExternalID()); + + funding.setOrganization(buildValidOrganization()); + + List errors = validator.validate(funding); + assertThat(errors, hasSize(1)); + assertThat(errors, containsInAnyOrder(TITLE_REQUIRED)); + } + + @Test + public void testWithFundingValidationDisabled() { + + when(configurationService.getBooleanProperty("orcid.validation.funding.enabled", true)).thenReturn(false); + + Funding funding = new Funding(); + funding.setTitle(new FundingTitle()); + funding.getTitle().setTitle(new Title("")); + + funding.setExternalIdentifiers(new ExternalIDs()); + funding.getExternalIdentifiers().getExternalIdentifier().add(buildValidExternalID()); + + funding.setOrganization(buildValidOrganization()); + + List errors = validator.validate(funding); + assertThat(errors, empty()); + } + + private ExternalID buildValidExternalID() { + ExternalID externalID = new ExternalID(); + externalID.setRelationship(Relationship.SELF); + externalID.setType("TYPE"); + externalID.setValue("VALUE"); + return externalID; + } + + private Organization buildValidOrganization() { + Organization organization = new Organization(); + organization.setName("Organization"); + + OrganizationAddress address = new OrganizationAddress(); + address.setCity("City"); + address.setCountry(Iso3166Country.BA); + organization.setAddress(address); + + DisambiguatedOrganization disambiguatedOrganization = new DisambiguatedOrganization(); + disambiguatedOrganization.setDisambiguatedOrganizationIdentifier("ID"); + disambiguatedOrganization.setDisambiguationSource("LEI"); + organization.setDisambiguatedOrganization(disambiguatedOrganization); + + return organization; + } + +} diff --git a/dspace-api/src/test/java/org/dspace/orcid/script/OrcidBulkPushIT.java b/dspace-api/src/test/java/org/dspace/orcid/script/OrcidBulkPushIT.java new file mode 100644 index 0000000000..db66f6c7aa --- /dev/null +++ b/dspace-api/src/test/java/org/dspace/orcid/script/OrcidBulkPushIT.java @@ -0,0 +1,500 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.orcid.script; + +import static org.dspace.app.launcher.ScriptLauncher.handleScript; +import static org.dspace.app.matcher.LambdaMatcher.matches; +import static org.dspace.app.matcher.OrcidQueueMatcher.matches; +import static org.dspace.builder.OrcidQueueBuilder.createOrcidQueue; +import static org.dspace.orcid.OrcidOperation.DELETE; +import static org.dspace.orcid.OrcidOperation.INSERT; +import static org.dspace.orcid.OrcidOperation.UPDATE; +import static org.dspace.profile.OrcidSynchronizationMode.BATCH; +import static org.dspace.profile.OrcidSynchronizationMode.MANUAL; +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.contains; +import static org.hamcrest.Matchers.containsInAnyOrder; +import static org.hamcrest.Matchers.empty; +import static org.hamcrest.Matchers.hasItem; +import static org.hamcrest.Matchers.hasSize; +import static org.hamcrest.Matchers.is; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.eq; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.verifyNoMoreInteractions; +import static org.mockito.Mockito.when; + +import java.sql.SQLException; +import java.util.List; +import java.util.function.Predicate; + +import org.apache.commons.lang3.ArrayUtils; +import org.dspace.AbstractIntegrationTestWithDatabase; +import org.dspace.app.launcher.ScriptLauncher; +import org.dspace.app.scripts.handler.impl.TestDSpaceRunnableHandler; +import org.dspace.builder.CollectionBuilder; +import org.dspace.builder.CommunityBuilder; +import org.dspace.builder.EPersonBuilder; +import org.dspace.builder.ItemBuilder; +import org.dspace.builder.OrcidTokenBuilder; +import org.dspace.content.Collection; +import org.dspace.content.Item; +import org.dspace.eperson.EPerson; +import org.dspace.orcid.OrcidHistory; +import org.dspace.orcid.OrcidOperation; +import org.dspace.orcid.OrcidQueue; +import org.dspace.orcid.client.OrcidClient; +import org.dspace.orcid.client.OrcidResponse; +import org.dspace.orcid.exception.OrcidClientException; +import org.dspace.orcid.factory.OrcidServiceFactory; +import org.dspace.orcid.service.OrcidQueueService; +import org.dspace.orcid.service.impl.OrcidHistoryServiceImpl; +import org.dspace.profile.OrcidSynchronizationMode; +import org.dspace.services.ConfigurationService; +import org.dspace.services.factory.DSpaceServicesFactory; +import org.junit.After; +import org.junit.Before; +import org.junit.Test; + +/** + * Integration tests for {@link OrcidBulkPush}. + * + * @author Luca Giamminonni (luca.giamminonni at 4science.it) + * + */ +public class OrcidBulkPushIT extends AbstractIntegrationTestWithDatabase { + + private Collection profileCollection; + + private Collection publicationCollection; + + private OrcidHistoryServiceImpl orcidHistoryService; + + private OrcidQueueService orcidQueueService; + + private ConfigurationService configurationService; + + private OrcidClient orcidClient; + + private OrcidClient orcidClientMock; + + @Before + public void setup() { + + orcidHistoryService = (OrcidHistoryServiceImpl) OrcidServiceFactory.getInstance().getOrcidHistoryService(); + orcidQueueService = OrcidServiceFactory.getInstance().getOrcidQueueService(); + + configurationService = DSpaceServicesFactory.getInstance().getConfigurationService(); + + context.setCurrentUser(admin); + + parentCommunity = CommunityBuilder.createCommunity(context) + .withName("Parent community") + .build(); + + profileCollection = CollectionBuilder.createCollection(context, parentCommunity) + .withName("Profiles") + .withEntityType("Person") + .build(); + + publicationCollection = CollectionBuilder.createCollection(context, parentCommunity) + .withName("Publications") + .withEntityType("Publication") + .build(); + + orcidClientMock = mock(OrcidClient.class); + + orcidClient = orcidHistoryService.getOrcidClient(); + orcidHistoryService.setOrcidClient(orcidClientMock); + + } + + @After + public void after() throws SQLException { + List records = orcidHistoryService.findAll(context); + for (OrcidHistory record : records) { + orcidHistoryService.delete(context, record); + } + orcidHistoryService.setOrcidClient(orcidClient); + } + + @Test + public void testWithoutOrcidQueueRecords() throws Exception { + TestDSpaceRunnableHandler handler = runBulkSynchronization(false); + assertThat(handler.getInfoMessages(), hasSize(1)); + assertThat(handler.getInfoMessages().get(0), is("Found 0 queue records to synchronize with ORCID")); + assertThat(handler.getErrorMessages(), empty()); + assertThat(handler.getWarningMessages(), empty()); + } + + @Test + public void testWithManyOrcidQueueRecords() throws Exception { + + context.turnOffAuthorisationSystem(); + + EPerson owner = EPersonBuilder.createEPerson(context) + .withEmail("owner@test.it") + .build(); + context.restoreAuthSystemState(); + + Item firstProfileItem = createProfileItemItem("0000-1111-2222-3333", eperson, BATCH); + Item secondProfileItem = createProfileItemItem("1111-2222-3333-4444", admin, MANUAL); + Item thirdProfileItem = createProfileItemItem("2222-3333-4444-5555", owner, BATCH); + + Item firstEntity = createPublication("First publication"); + Item secondEntity = createPublication("Second publication"); + Item thirdEntity = createPublication("Third publication"); + Item fourthEntity = createPublication("Fourth publication"); + Item fifthEntity = createPublication("Fifth publication"); + + when(orcidClientMock.push(any(), eq("0000-1111-2222-3333"), any())) + .thenReturn(createdResponse("12345")); + + when(orcidClientMock.update(any(), eq("0000-1111-2222-3333"), any(), eq("98765"))) + .thenReturn(updatedResponse("98765")); + + when(orcidClientMock.deleteByPutCode(any(), eq("0000-1111-2222-3333"), eq("22222"), eq("/work"))) + .thenReturn(deletedResponse()); + + when(orcidClientMock.push(any(), eq("2222-3333-4444-5555"), any())) + .thenReturn(createdResponse("11111")); + + createOrcidQueue(context, firstProfileItem, firstEntity); + createOrcidQueue(context, firstProfileItem, secondEntity, "98765"); + createOrcidQueue(context, firstProfileItem, "Description", "Publication", "22222"); + createOrcidQueue(context, secondProfileItem, thirdEntity); + createOrcidQueue(context, secondProfileItem, fourthEntity); + createOrcidQueue(context, thirdProfileItem, fifthEntity); + + context.commit(); + + TestDSpaceRunnableHandler handler = runBulkSynchronization(false); + + String firstProfileItemId = firstProfileItem.getID().toString(); + String thirdProfileItemId = thirdProfileItem.getID().toString(); + + assertThat(handler.getInfoMessages(), hasSize(9)); + assertThat(handler.getInfoMessages(), containsInAnyOrder( + "Found 4 queue records to synchronize with ORCID", + "Addition of Publication for profile with ID: " + firstProfileItemId, + "History record created with status 201. The operation was completed successfully", + "Update of Publication for profile with ID: " + firstProfileItemId + " by put code 98765", + "History record created with status 200. The operation was completed successfully", + "Deletion of Publication for profile with ID: " + firstProfileItemId + " by put code 22222", + "History record created with status 204. The operation was completed successfully", + "Addition of Publication for profile with ID: " + thirdProfileItemId, + "History record created with status 201. The operation was completed successfully")); + + assertThat(handler.getErrorMessages(), empty()); + assertThat(handler.getWarningMessages(), empty()); + + verify(orcidClientMock).push(any(), eq("0000-1111-2222-3333"), any()); + verify(orcidClientMock).push(any(), eq("2222-3333-4444-5555"), any()); + verify(orcidClientMock).update(any(), eq("0000-1111-2222-3333"), any(), eq("98765")); + verify(orcidClientMock).deleteByPutCode(any(), eq("0000-1111-2222-3333"), eq("22222"), eq("/work")); + + verifyNoMoreInteractions(orcidClientMock); + + List queueRecords = orcidQueueService.findAll(context); + assertThat(queueRecords, hasSize(2)); + assertThat(queueRecords, hasItem(matches(secondProfileItem, thirdEntity, "Publication", INSERT, 0))); + assertThat(queueRecords, hasItem(matches(secondProfileItem, fourthEntity, "Publication", INSERT, 0))); + + List historyRecords = orcidHistoryService.findAll(context); + assertThat(historyRecords, hasSize(4)); + assertThat(historyRecords, hasItem(matches(history(firstProfileItem, firstEntity, 201, INSERT)))); + assertThat(historyRecords, hasItem(matches(history(firstProfileItem, secondEntity, 200, UPDATE)))); + assertThat(historyRecords, hasItem(matches(history(firstProfileItem, 204, DELETE)))); + assertThat(historyRecords, hasItem(matches(history(thirdProfileItem, fifthEntity, 201, INSERT)))); + + } + + @Test + public void testWithOneValidationError() throws Exception { + + Item firstProfileItem = createProfileItemItem("0000-1111-2222-3333", eperson, BATCH); + Item secondProfileItem = createProfileItemItem("1111-2222-3333-4444", admin, BATCH); + + Item firstEntity = createPublication("First publication"); + Item secondEntity = createPublication(""); + Item thirdEntity = createPublication("Third publication"); + + when(orcidClientMock.push(any(), eq("0000-1111-2222-3333"), any())) + .thenReturn(createdResponse("12345")); + + when(orcidClientMock.push(any(), eq("1111-2222-3333-4444"), any())) + .thenReturn(createdResponse("55555")); + + createOrcidQueue(context, firstProfileItem, firstEntity); + createOrcidQueue(context, firstProfileItem, secondEntity, "98765"); + createOrcidQueue(context, secondProfileItem, thirdEntity); + + context.commit(); + + TestDSpaceRunnableHandler handler = runBulkSynchronization(false); + + assertThat(handler.getInfoMessages(), hasSize(6)); + assertThat(handler.getInfoMessages(), containsInAnyOrder( + "Found 3 queue records to synchronize with ORCID", + "Addition of Publication for profile with ID: " + firstProfileItem.getID().toString(), + "History record created with status 201. The operation was completed successfully", + "Update of Publication for profile with ID: " + firstProfileItem.getID().toString() + " by put code 98765", + "Addition of Publication for profile with ID: " + secondProfileItem.getID().toString(), + "History record created with status 201. The operation was completed successfully")); + + assertThat(handler.getErrorMessages(), hasSize(1)); + assertThat(handler.getErrorMessages(), containsInAnyOrder( + "Errors occurs during ORCID object validation. Error codes: title.required")); + + assertThat(handler.getWarningMessages(), empty()); + + verify(orcidClientMock).push(any(), eq("0000-1111-2222-3333"), any()); + verify(orcidClientMock).push(any(), eq("1111-2222-3333-4444"), any()); + verifyNoMoreInteractions(orcidClientMock); + + List queueRecords = orcidQueueService.findAll(context); + assertThat(queueRecords, hasSize(1)); + assertThat(queueRecords, hasItem(matches(firstProfileItem, secondEntity, "Publication", UPDATE, 1))); + + List historyRecords = orcidHistoryService.findAll(context); + assertThat(historyRecords, hasSize(2)); + assertThat(historyRecords, hasItem(matches(history(firstProfileItem, firstEntity, 201, INSERT)))); + assertThat(historyRecords, hasItem(matches(history(secondProfileItem, thirdEntity, 201, INSERT)))); + + } + + @Test + public void testWithUnexpectedErrorForMissingOrcid() throws Exception { + + Item firstProfileItem = createProfileItemItem("0000-1111-2222-3333", eperson, BATCH); + Item secondProfileItem = createProfileItemItem("", admin, BATCH); + + Item firstEntity = createPublication("First publication"); + Item secondEntity = createPublication("Second publication"); + + when(orcidClientMock.push(any(), eq("0000-1111-2222-3333"), any())) + .thenReturn(createdResponse("12345")); + + createOrcidQueue(context, secondProfileItem, secondEntity); + createOrcidQueue(context, firstProfileItem, firstEntity); + + context.commit(); + + TestDSpaceRunnableHandler handler = runBulkSynchronization(false); + + assertThat(handler.getInfoMessages(), hasSize(4)); + assertThat(handler.getInfoMessages(), containsInAnyOrder( + "Found 2 queue records to synchronize with ORCID", + "Addition of Publication for profile with ID: " + secondProfileItem.getID().toString(), + "Addition of Publication for profile with ID: " + firstProfileItem.getID().toString(), + "History record created with status 201. The operation was completed successfully")); + + assertThat(handler.getErrorMessages(), hasSize(1)); + assertThat(handler.getErrorMessages(), contains("An unexpected error occurs during the synchronization: " + + "The related profileItem item (id = " + secondProfileItem.getID() + ") does not have an orcid")); + + assertThat(handler.getWarningMessages(), empty()); + + verify(orcidClientMock).push(any(), eq("0000-1111-2222-3333"), any()); + verifyNoMoreInteractions(orcidClientMock); + + List queueRecords = orcidQueueService.findAll(context); + assertThat(queueRecords, hasSize(1)); + assertThat(queueRecords, hasItem(matches(secondProfileItem, secondEntity, "Publication", INSERT, 1))); + + List historyRecords = orcidHistoryService.findAll(context); + assertThat(historyRecords, hasSize(1)); + assertThat(historyRecords, hasItem(matches(history(firstProfileItem, firstEntity, 201, INSERT)))); + + } + + @Test + public void testWithOrcidClientException() throws Exception { + + Item firstProfileItem = createProfileItemItem("0000-1111-2222-3333", eperson, BATCH); + Item secondProfileItem = createProfileItemItem("1111-2222-3333-4444", admin, BATCH); + + Item firstEntity = createPublication("First publication"); + Item secondEntity = createPublication("Second publication"); + + when(orcidClientMock.push(any(), eq("0000-1111-2222-3333"), any())) + .thenThrow(new OrcidClientException(400, "Bad request")); + + when(orcidClientMock.push(any(), eq("1111-2222-3333-4444"), any())) + .thenReturn(createdResponse("55555")); + + createOrcidQueue(context, firstProfileItem, firstEntity); + createOrcidQueue(context, secondProfileItem, secondEntity); + + context.commit(); + + TestDSpaceRunnableHandler handler = runBulkSynchronization(false); + + assertThat(handler.getInfoMessages(), hasSize(5)); + assertThat(handler.getInfoMessages(), containsInAnyOrder( + "Found 2 queue records to synchronize with ORCID", + "Addition of Publication for profile with ID: " + firstProfileItem.getID().toString(), + "History record created with status 400. The resource sent to ORCID registry is not valid", + "Addition of Publication for profile with ID: " + secondProfileItem.getID().toString(), + "History record created with status 201. The operation was completed successfully")); + + assertThat(handler.getErrorMessages(), empty()); + assertThat(handler.getWarningMessages(), empty()); + + verify(orcidClientMock).push(any(), eq("0000-1111-2222-3333"), any()); + verify(orcidClientMock).push(any(), eq("1111-2222-3333-4444"), any()); + verifyNoMoreInteractions(orcidClientMock); + + List queueRecords = orcidQueueService.findAll(context); + assertThat(queueRecords, hasSize(1)); + assertThat(queueRecords, hasItem(matches(firstProfileItem, firstEntity, "Publication", INSERT, 1))); + + List historyRecords = orcidHistoryService.findAll(context); + assertThat(historyRecords, hasSize(2)); + assertThat(historyRecords, hasItem(matches(history(firstProfileItem, firstEntity, 400, INSERT)))); + assertThat(historyRecords, hasItem(matches(history(secondProfileItem, secondEntity, 201, INSERT)))); + + } + + @Test + @SuppressWarnings("unchecked") + public void testWithTooManyAttempts() throws Exception { + + configurationService.setProperty("orcid.bulk-synchronization.max-attempts", 2); + + Item profileItem = createProfileItemItem("0000-1111-2222-3333", eperson, BATCH); + Item entity = createPublication("First publication"); + + when(orcidClientMock.push(any(), eq("0000-1111-2222-3333"), any())) + .thenThrow(new OrcidClientException(400, "Bad request")); + + createOrcidQueue(context, profileItem, entity); + + // First attempt + + TestDSpaceRunnableHandler handler = runBulkSynchronization(false); + assertThat(handler.getInfoMessages(), hasItem("Found 1 queue records to synchronize with ORCID")); + assertThat(handler.getErrorMessages(), empty()); + assertThat(handler.getWarningMessages(), empty()); + + List queueRecords = orcidQueueService.findAll(context); + assertThat(queueRecords, hasSize(1)); + assertThat(queueRecords, hasItem(matches(profileItem, entity, "Publication", INSERT, 1))); + + List historyRecords = orcidHistoryService.findAll(context); + assertThat(historyRecords, hasSize(1)); + assertThat(historyRecords, hasItem(matches(history(profileItem, entity, 400, INSERT)))); + + // Second attempt + + handler = runBulkSynchronization(false); + assertThat(handler.getInfoMessages(), hasItem("Found 1 queue records to synchronize with ORCID")); + assertThat(handler.getErrorMessages(), empty()); + assertThat(handler.getWarningMessages(), empty()); + + queueRecords = orcidQueueService.findAll(context); + assertThat(queueRecords, hasSize(1)); + assertThat(queueRecords, hasItem(matches(profileItem, entity, "Publication", INSERT, 2))); + + historyRecords = orcidHistoryService.findAll(context); + assertThat(historyRecords, hasSize(2)); + assertThat(historyRecords, contains(matches(history(profileItem, entity, 400, INSERT)), + matches(history(profileItem, entity, 400, INSERT)))); + + // Third attempt + + handler = runBulkSynchronization(false); + assertThat(handler.getInfoMessages(), hasItem("Found 0 queue records to synchronize with ORCID")); + assertThat(handler.getErrorMessages(), empty()); + assertThat(handler.getWarningMessages(), empty()); + + queueRecords = orcidQueueService.findAll(context); + assertThat(queueRecords, hasSize(1)); + assertThat(queueRecords, hasItem(matches(profileItem, entity, "Publication", INSERT, 2))); + + historyRecords = orcidHistoryService.findAll(context); + assertThat(historyRecords, hasSize(2)); + assertThat(historyRecords, contains(matches(history(profileItem, entity, 400, INSERT)), + matches(history(profileItem, entity, 400, INSERT)))); + + // Fourth attempt forcing synchronization + + handler = runBulkSynchronization(true); + assertThat(handler.getInfoMessages(), hasItem("Found 1 queue records to synchronize with ORCID")); + assertThat(handler.getErrorMessages(), empty()); + assertThat(handler.getWarningMessages(), empty()); + + queueRecords = orcidQueueService.findAll(context); + assertThat(queueRecords, hasSize(1)); + assertThat(queueRecords, hasItem(matches(profileItem, entity, "Publication", INSERT, 3))); + + historyRecords = orcidHistoryService.findAll(context); + assertThat(historyRecords, hasSize(3)); + assertThat(historyRecords, contains(matches(history(profileItem, entity, 400, INSERT)), + matches(history(profileItem, entity, 400, INSERT)), + matches(history(profileItem, entity, 400, INSERT)))); + } + + private Predicate history(Item profileItem, Item entity, int status, OrcidOperation operation) { + return history -> profileItem.equals(history.getProfileItem()) + && entity.equals(history.getEntity()) + && history.getStatus().equals(status) + && operation == history.getOperation(); + } + + private Predicate history(Item profileItem, int status, OrcidOperation operation) { + return history -> profileItem.equals(history.getProfileItem()) + && history.getStatus().equals(status) + && operation == history.getOperation(); + } + + private TestDSpaceRunnableHandler runBulkSynchronization(boolean forceSynchronization) throws Exception { + String[] args = new String[] { "orcid-bulk-push" }; + args = forceSynchronization ? ArrayUtils.add(args, "-f") : args; + TestDSpaceRunnableHandler handler = new TestDSpaceRunnableHandler(); + handleScript(args, ScriptLauncher.getConfig(kernelImpl), handler, kernelImpl); + return handler; + } + + private Item createProfileItemItem(String orcid, EPerson owner, OrcidSynchronizationMode mode) + throws Exception { + + Item item = ItemBuilder.createItem(context, profileCollection) + .withTitle("Test user") + .withOrcidIdentifier(orcid) + .withOrcidSynchronizationMode(mode) + .withDspaceObjectOwner(owner.getFullName(), owner.getID().toString()) + .build(); + + OrcidTokenBuilder.create(context, owner, "9c913f57-961e-48af-9223-cfad6562c925") + .withProfileItem(item) + .build(); + + return item; + } + + private Item createPublication(String title) { + return ItemBuilder.createItem(context, publicationCollection) + .withTitle(title) + .withType("Controlled Vocabulary for Resource Type Genres::dataset") + .build(); + } + + private OrcidResponse createdResponse(String putCode) { + return new OrcidResponse(201, putCode, null); + } + + private OrcidResponse updatedResponse(String putCode) { + return new OrcidResponse(200, putCode, null); + } + + private OrcidResponse deletedResponse() { + return new OrcidResponse(204, null, null); + } +} diff --git a/dspace-api/src/test/java/org/dspace/orcid/service/OrcidEntityFactoryServiceIT.java b/dspace-api/src/test/java/org/dspace/orcid/service/OrcidEntityFactoryServiceIT.java new file mode 100644 index 0000000000..17bc6ee531 --- /dev/null +++ b/dspace-api/src/test/java/org/dspace/orcid/service/OrcidEntityFactoryServiceIT.java @@ -0,0 +1,296 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.orcid.service; + +import static org.apache.commons.lang.StringUtils.endsWith; +import static org.dspace.app.matcher.LambdaMatcher.has; +import static org.dspace.app.matcher.LambdaMatcher.matches; +import static org.dspace.builder.RelationshipTypeBuilder.createRelationshipTypeBuilder; +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.empty; +import static org.hamcrest.Matchers.hasSize; +import static org.hamcrest.Matchers.instanceOf; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.notNullValue; +import static org.hamcrest.Matchers.nullValue; +import static org.orcid.jaxb.model.common.ContributorRole.AUTHOR; +import static org.orcid.jaxb.model.common.ContributorRole.EDITOR; +import static org.orcid.jaxb.model.common.FundingContributorRole.LEAD; +import static org.orcid.jaxb.model.common.SequenceType.ADDITIONAL; +import static org.orcid.jaxb.model.common.SequenceType.FIRST; + +import java.util.List; +import java.util.function.Predicate; + +import org.dspace.AbstractIntegrationTestWithDatabase; +import org.dspace.builder.CollectionBuilder; +import org.dspace.builder.CommunityBuilder; +import org.dspace.builder.EntityTypeBuilder; +import org.dspace.builder.ItemBuilder; +import org.dspace.builder.RelationshipBuilder; +import org.dspace.content.Collection; +import org.dspace.content.EntityType; +import org.dspace.content.Item; +import org.dspace.content.RelationshipType; +import org.dspace.orcid.factory.OrcidServiceFactory; +import org.junit.Before; +import org.junit.Test; +import org.orcid.jaxb.model.common.ContributorRole; +import org.orcid.jaxb.model.common.FundingContributorRole; +import org.orcid.jaxb.model.common.Iso3166Country; +import org.orcid.jaxb.model.common.Relationship; +import org.orcid.jaxb.model.common.SequenceType; +import org.orcid.jaxb.model.common.WorkType; +import org.orcid.jaxb.model.v3.release.common.Contributor; +import org.orcid.jaxb.model.v3.release.common.FuzzyDate; +import org.orcid.jaxb.model.v3.release.common.Organization; +import org.orcid.jaxb.model.v3.release.common.Url; +import org.orcid.jaxb.model.v3.release.record.Activity; +import org.orcid.jaxb.model.v3.release.record.ExternalID; +import org.orcid.jaxb.model.v3.release.record.Funding; +import org.orcid.jaxb.model.v3.release.record.FundingContributor; +import org.orcid.jaxb.model.v3.release.record.FundingContributors; +import org.orcid.jaxb.model.v3.release.record.Work; + +/** + * Integration tests for {@link OrcidEntityFactoryService}. + * + * @author Luca Giamminonni (luca.giamminonni at 4science.it) + * + */ +public class OrcidEntityFactoryServiceIT extends AbstractIntegrationTestWithDatabase { + + private OrcidEntityFactoryService entityFactoryService; + + private Collection orgUnits; + + private Collection publications; + + private Collection projects; + + @Before + public void setup() { + + entityFactoryService = OrcidServiceFactory.getInstance().getOrcidEntityFactoryService(); + + context.turnOffAuthorisationSystem(); + + parentCommunity = CommunityBuilder.createCommunity(context) + .withTitle("Parent community") + .build(); + + orgUnits = CollectionBuilder.createCollection(context, parentCommunity) + .withName("Collection") + .withEntityType("OrgUnit") + .build(); + + publications = CollectionBuilder.createCollection(context, parentCommunity) + .withName("Collection") + .withEntityType("Publication") + .build(); + + projects = CollectionBuilder.createCollection(context, parentCommunity) + .withName("Collection") + .withEntityType("Project") + .build(); + + context.restoreAuthSystemState(); + } + + @Test + public void testWorkCreation() { + + context.turnOffAuthorisationSystem(); + + Item publication = ItemBuilder.createItem(context, publications) + .withTitle("Test publication") + .withAuthor("Walter White") + .withAuthor("Jesse Pinkman") + .withEditor("Editor") + .withIssueDate("2021-04-30") + .withDescriptionAbstract("Publication description") + .withLanguage("en_US") + .withType("Book") + .withIsPartOf("Journal") + .withDoiIdentifier("doi-id") + .withScopusIdentifier("scopus-id") + .build(); + + context.restoreAuthSystemState(); + + Activity activity = entityFactoryService.createOrcidObject(context, publication); + assertThat(activity, instanceOf(Work.class)); + + Work work = (Work) activity; + assertThat(work.getJournalTitle(), notNullValue()); + assertThat(work.getJournalTitle().getContent(), is("Journal")); + assertThat(work.getLanguageCode(), is("en")); + assertThat(work.getPublicationDate(), matches(date("2021", "04", "30"))); + assertThat(work.getShortDescription(), is("Publication description")); + assertThat(work.getPutCode(), nullValue()); + assertThat(work.getWorkType(), is(WorkType.BOOK)); + assertThat(work.getWorkTitle(), notNullValue()); + assertThat(work.getWorkTitle().getTitle(), notNullValue()); + assertThat(work.getWorkTitle().getTitle().getContent(), is("Test publication")); + assertThat(work.getWorkContributors(), notNullValue()); + assertThat(work.getUrl(), matches(urlEndsWith(publication.getHandle()))); + + List contributors = work.getWorkContributors().getContributor(); + assertThat(contributors, hasSize(3)); + assertThat(contributors, has(contributor("Walter White", AUTHOR, FIRST))); + assertThat(contributors, has(contributor("Editor", EDITOR, FIRST))); + assertThat(contributors, has(contributor("Jesse Pinkman", AUTHOR, ADDITIONAL))); + + assertThat(work.getExternalIdentifiers(), notNullValue()); + + List externalIds = work.getExternalIdentifiers().getExternalIdentifier(); + assertThat(externalIds, hasSize(3)); + assertThat(externalIds, has(selfExternalId("doi", "doi-id"))); + assertThat(externalIds, has(selfExternalId("eid", "scopus-id"))); + assertThat(externalIds, has(selfExternalId("handle", publication.getHandle()))); + + } + + @Test + public void testEmptyWorkWithUnknownTypeCreation() { + + context.turnOffAuthorisationSystem(); + + Item publication = ItemBuilder.createItem(context, publications) + .withType("TYPE") + .build(); + + context.restoreAuthSystemState(); + + Activity activity = entityFactoryService.createOrcidObject(context, publication); + assertThat(activity, instanceOf(Work.class)); + + Work work = (Work) activity; + assertThat(work.getJournalTitle(), nullValue()); + assertThat(work.getLanguageCode(), nullValue()); + assertThat(work.getPublicationDate(), nullValue()); + assertThat(work.getShortDescription(), nullValue()); + assertThat(work.getPutCode(), nullValue()); + assertThat(work.getWorkType(), is(WorkType.OTHER)); + assertThat(work.getWorkTitle(), nullValue()); + assertThat(work.getWorkContributors(), notNullValue()); + assertThat(work.getWorkContributors().getContributor(), empty()); + assertThat(work.getExternalIdentifiers(), notNullValue()); + + List externalIds = work.getExternalIdentifiers().getExternalIdentifier(); + assertThat(externalIds, hasSize(1)); + assertThat(externalIds, has(selfExternalId("handle", publication.getHandle()))); + } + + @Test + public void testFundingCreation() { + context.turnOffAuthorisationSystem(); + + Item orgUnit = ItemBuilder.createItem(context, orgUnits) + .withOrgUnitLegalName("4Science") + .withOrgUnitCountry("IT") + .withOrgUnitLocality("Milan") + .withOrgUnitCrossrefIdentifier("12345") + .build(); + + Item projectItem = ItemBuilder.createItem(context, projects) + .withTitle("Test funding") + .withProjectStartDate("2001-03") + .withProjectEndDate("2010-03-25") + .withProjectInvestigator("Walter White") + .withProjectInvestigator("Jesse Pinkman") + .withProjectAmount("123") + .withProjectAmountCurrency("EUR") + .withOtherIdentifier("888-666-444") + .withIdentifier("000-111-333") + .withDescription("This is a funding to test orcid mapping") + .build(); + + EntityType projectType = EntityTypeBuilder.createEntityTypeBuilder(context, "Project").build(); + EntityType orgUnitType = EntityTypeBuilder.createEntityTypeBuilder(context, "OrgUnit").build(); + + RelationshipType isAuthorOfPublication = createRelationshipTypeBuilder(context, orgUnitType, projectType, + "isOrgUnitOfProject", "isProjectOfOrgUnit", 0, null, 0, null).build(); + + RelationshipBuilder.createRelationshipBuilder(context, orgUnit, projectItem, isAuthorOfPublication).build(); + + context.restoreAuthSystemState(); + + Activity activity = entityFactoryService.createOrcidObject(context, projectItem); + assertThat(activity, instanceOf(Funding.class)); + + Funding funding = (Funding) activity; + assertThat(funding.getTitle(), notNullValue()); + assertThat(funding.getTitle().getTitle(), notNullValue()); + assertThat(funding.getTitle().getTitle().getContent(), is("Test funding")); + assertThat(funding.getStartDate(), matches(date("2001", "03", "01"))); + assertThat(funding.getEndDate(), matches(date("2010", "03", "25"))); + assertThat(funding.getDescription(), is("This is a funding to test orcid mapping")); + assertThat(funding.getUrl(), matches(urlEndsWith(projectItem.getHandle()))); + assertThat(funding.getAmount(), notNullValue()); + assertThat(funding.getAmount().getContent(), is("123")); + assertThat(funding.getAmount().getCurrencyCode(), is("EUR")); + + Organization organization = funding.getOrganization(); + assertThat(organization, notNullValue()); + assertThat(organization.getName(), is("4Science")); + assertThat(organization.getAddress(), notNullValue()); + assertThat(organization.getAddress().getCountry(), is(Iso3166Country.IT)); + assertThat(organization.getAddress().getCity(), is("Milan")); + assertThat(organization.getDisambiguatedOrganization(), notNullValue()); + assertThat(organization.getDisambiguatedOrganization().getDisambiguatedOrganizationIdentifier(), is("12345")); + assertThat(organization.getDisambiguatedOrganization().getDisambiguationSource(), is("FUNDREF")); + + FundingContributors fundingContributors = funding.getContributors(); + assertThat(fundingContributors, notNullValue()); + + List contributors = fundingContributors.getContributor(); + assertThat(contributors, hasSize(2)); + assertThat(contributors, has(fundingContributor("Walter White", LEAD))); + assertThat(contributors, has(fundingContributor("Jesse Pinkman", LEAD))); + + assertThat(funding.getExternalIdentifiers(), notNullValue()); + + List externalIds = funding.getExternalIdentifiers().getExternalIdentifier(); + assertThat(externalIds, hasSize(2)); + assertThat(externalIds, has(selfExternalId("other-id", "888-666-444"))); + assertThat(externalIds, has(selfExternalId("grant_number", "000-111-333"))); + } + + private Predicate selfExternalId(String type, String value) { + return externalId(type, value, Relationship.SELF); + } + + private Predicate externalId(String type, String value, Relationship relationship) { + return externalId -> externalId.getRelationship() == relationship + && type.equals(externalId.getType()) + && value.equals(externalId.getValue()); + } + + private Predicate contributor(String name, ContributorRole role, SequenceType sequence) { + return contributor -> contributor.getCreditName().getContent().equals(name) + && role.equals(contributor.getContributorAttributes().getContributorRole()) + && contributor.getContributorAttributes().getContributorSequence() == sequence; + } + + private Predicate fundingContributor(String name, FundingContributorRole role) { + return contributor -> contributor.getCreditName().getContent().equals(name) + && role.equals(contributor.getContributorAttributes().getContributorRole()); + } + + private Predicate date(String year, String month, String days) { + return date -> date != null + && year.equals(date.getYear().getValue()) + && month.equals(date.getMonth().getValue()) + && days.equals(date.getDay().getValue()); + } + + private Predicate urlEndsWith(String handle) { + return url -> url != null && url.getValue() != null && endsWith(url.getValue(), handle); + } +} diff --git a/dspace-api/src/test/java/org/dspace/orcid/service/OrcidProfileSectionFactoryServiceIT.java b/dspace-api/src/test/java/org/dspace/orcid/service/OrcidProfileSectionFactoryServiceIT.java new file mode 100644 index 0000000000..894029f54e --- /dev/null +++ b/dspace-api/src/test/java/org/dspace/orcid/service/OrcidProfileSectionFactoryServiceIT.java @@ -0,0 +1,244 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.orcid.service; + +import static org.dspace.app.matcher.LambdaMatcher.matches; +import static org.dspace.orcid.model.OrcidProfileSectionType.COUNTRY; +import static org.dspace.orcid.model.OrcidProfileSectionType.EXTERNAL_IDS; +import static org.dspace.orcid.model.OrcidProfileSectionType.KEYWORDS; +import static org.dspace.orcid.model.OrcidProfileSectionType.OTHER_NAMES; +import static org.dspace.orcid.model.OrcidProfileSectionType.RESEARCHER_URLS; +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.containsInAnyOrder; +import static org.hamcrest.Matchers.greaterThan; +import static org.hamcrest.Matchers.hasSize; +import static org.hamcrest.Matchers.instanceOf; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.notNullValue; + +import java.util.List; +import java.util.function.Predicate; +import java.util.stream.Collectors; + +import org.dspace.AbstractIntegrationTestWithDatabase; +import org.dspace.builder.CollectionBuilder; +import org.dspace.builder.CommunityBuilder; +import org.dspace.builder.ItemBuilder; +import org.dspace.content.Collection; +import org.dspace.content.Item; +import org.dspace.content.MetadataValue; +import org.dspace.content.factory.ContentServiceFactory; +import org.dspace.content.service.ItemService; +import org.dspace.orcid.factory.OrcidServiceFactory; +import org.dspace.orcid.model.OrcidProfileSectionType; +import org.dspace.orcid.model.factory.OrcidProfileSectionFactory; +import org.junit.Before; +import org.junit.Test; +import org.orcid.jaxb.model.common.Iso3166Country; +import org.orcid.jaxb.model.common.Relationship; +import org.orcid.jaxb.model.v3.release.record.Address; +import org.orcid.jaxb.model.v3.release.record.Keyword; +import org.orcid.jaxb.model.v3.release.record.OtherName; +import org.orcid.jaxb.model.v3.release.record.PersonExternalIdentifier; +import org.orcid.jaxb.model.v3.release.record.ResearcherUrl; + +/** + * Integration tests for {@link OrcidProfileSectionFactoryService}. + * + * @author Luca Giamminonni (luca.giamminonni at 4science.it) + * + */ +public class OrcidProfileSectionFactoryServiceIT extends AbstractIntegrationTestWithDatabase { + + private OrcidProfileSectionFactoryService profileSectionFactoryService; + + private ItemService itemService; + + private Collection collection; + + @Before + public void setup() { + + profileSectionFactoryService = OrcidServiceFactory.getInstance().getOrcidProfileSectionFactoryService(); + itemService = ContentServiceFactory.getInstance().getItemService(); + + context.turnOffAuthorisationSystem(); + + parentCommunity = CommunityBuilder.createCommunity(context) + .withTitle("Parent community") + .build(); + + collection = CollectionBuilder.createCollection(context, parentCommunity) + .withName("Collection") + .withEntityType("Person") + .build(); + + context.restoreAuthSystemState(); + } + + @Test + public void testAddressCreation() { + context.turnOffAuthorisationSystem(); + Item item = ItemBuilder.createItem(context, collection) + .withTitle("Test profile") + .withPersonCountry("IT") + .build(); + context.restoreAuthSystemState(); + + List values = List.of(getMetadata(item, "person.country", 0)); + + Object orcidObject = profileSectionFactoryService.createOrcidObject(context, values, COUNTRY); + assertThat(orcidObject, instanceOf(Address.class)); + Address address = (Address) orcidObject; + assertThat(address.getCountry(), notNullValue()); + assertThat(address.getCountry().getValue(), is(Iso3166Country.IT)); + + } + + @Test + public void testAddressMetadataSignatureGeneration() { + context.turnOffAuthorisationSystem(); + Item item = ItemBuilder.createItem(context, collection) + .withTitle("Test profile") + .withPersonCountry("IT") + .build(); + context.restoreAuthSystemState(); + + OrcidProfileSectionFactory countryFactory = getFactory(item, COUNTRY); + + List signatures = countryFactory.getMetadataSignatures(context, item); + assertThat(signatures, hasSize(1)); + assertThat(countryFactory.getDescription(context, item, signatures.get(0)), is("IT")); + } + + @Test + public void testExternalIdentifiersCreation() { + context.turnOffAuthorisationSystem(); + Item item = ItemBuilder.createItem(context, collection) + .withTitle("Test profile") + .withScopusAuthorIdentifier("SCOPUS-123456") + .withResearcherIdentifier("R-ID-01") + .build(); + context.restoreAuthSystemState(); + + List values = List.of(getMetadata(item, "person.identifier.scopus-author-id", 0)); + + Object firstOrcidObject = profileSectionFactoryService.createOrcidObject(context, values, EXTERNAL_IDS); + assertThat(firstOrcidObject, instanceOf(PersonExternalIdentifier.class)); + assertThat((PersonExternalIdentifier) firstOrcidObject, matches(hasTypeAndValue("SCOPUS", "SCOPUS-123456"))); + + values = List.of(getMetadata(item, "person.identifier.rid", 0)); + + Object secondOrcidObject = profileSectionFactoryService.createOrcidObject(context, values, EXTERNAL_IDS); + assertThat(secondOrcidObject, instanceOf(PersonExternalIdentifier.class)); + assertThat((PersonExternalIdentifier) secondOrcidObject, matches(hasTypeAndValue("RID", "R-ID-01"))); + } + + @Test + public void testExternalIdentifiersGeneration() { + context.turnOffAuthorisationSystem(); + Item item = ItemBuilder.createItem(context, collection) + .withTitle("Test profile") + .withScopusAuthorIdentifier("SCOPUS-123456") + .withResearcherIdentifier("R-ID-01") + .build(); + context.restoreAuthSystemState(); + + OrcidProfileSectionFactory externalIdsFactory = getFactory(item, EXTERNAL_IDS); + List signatures = externalIdsFactory.getMetadataSignatures(context, item); + assertThat(signatures, hasSize(2)); + + List descriptions = signatures.stream() + .map(signature -> externalIdsFactory.getDescription(context, item, signature)) + .collect(Collectors.toList()); + + assertThat(descriptions, containsInAnyOrder("SCOPUS-123456", "R-ID-01")); + } + + @Test + public void testResearcherUrlsCreation() { + context.turnOffAuthorisationSystem(); + Item item = ItemBuilder.createItem(context, collection) + .withTitle("Test profile") + .withUriIdentifier("www.test.com") + .build(); + context.restoreAuthSystemState(); + + List values = List.of(getMetadata(item, "dc.identifier.uri", 0)); + + Object orcidObject = profileSectionFactoryService.createOrcidObject(context, values, RESEARCHER_URLS); + assertThat(orcidObject, instanceOf(ResearcherUrl.class)); + assertThat((ResearcherUrl) orcidObject, matches(hasUrl("www.test.com"))); + } + + @Test + public void testKeywordsCreation() { + context.turnOffAuthorisationSystem(); + Item item = ItemBuilder.createItem(context, collection) + .withTitle("Test profile") + .withSubject("Subject") + .build(); + context.restoreAuthSystemState(); + + List values = List.of(getMetadata(item, "dc.subject", 0)); + Object orcidObject = profileSectionFactoryService.createOrcidObject(context, values, KEYWORDS); + assertThat(orcidObject, instanceOf(Keyword.class)); + assertThat((Keyword) orcidObject, matches(hasContent("Subject"))); + } + + @Test + public void testOtherNamesCreation() { + context.turnOffAuthorisationSystem(); + Item item = ItemBuilder.createItem(context, collection) + .withTitle("Test profile") + .withVariantName("Variant name") + .withVernacularName("Vernacular name") + .build(); + context.restoreAuthSystemState(); + + List values = List.of(getMetadata(item, "person.name.variant", 0)); + Object orcidObject = profileSectionFactoryService.createOrcidObject(context, values, OTHER_NAMES); + assertThat(orcidObject, instanceOf(OtherName.class)); + assertThat((OtherName) orcidObject, matches(hasValue("Variant name"))); + + values = List.of(getMetadata(item, "person.name.translated", 0)); + orcidObject = profileSectionFactoryService.createOrcidObject(context, values, OTHER_NAMES); + assertThat(orcidObject, instanceOf(OtherName.class)); + assertThat((OtherName) orcidObject, matches(hasValue("Vernacular name"))); + } + + private MetadataValue getMetadata(Item item, String metadataField, int place) { + List values = itemService.getMetadataByMetadataString(item, metadataField); + assertThat(values.size(), greaterThan(place)); + return values.get(place); + } + + private Predicate hasTypeAndValue(String type, String value) { + return identifier -> value.equals(identifier.getValue()) + && type.equals(identifier.getType()) + && identifier.getRelationship() == Relationship.SELF + && identifier.getUrl() != null && value.equals(identifier.getUrl().getValue()); + } + + private Predicate hasUrl(String url) { + return researcherUrl -> researcherUrl.getUrl() != null && url.equals(researcherUrl.getUrl().getValue()); + } + + private Predicate hasContent(String value) { + return keyword -> value.equals(keyword.getContent()); + } + + private Predicate hasValue(String value) { + return name -> value.equals(name.getContent()); + } + + private OrcidProfileSectionFactory getFactory(Item item, OrcidProfileSectionType sectionType) { + return profileSectionFactoryService.findBySectionType(sectionType) + .orElseThrow(() -> new IllegalStateException("No profile section factory of type " + sectionType)); + } +} diff --git a/dspace-api/src/test/java/org/dspace/orcid/service/PlainMetadataSignatureGeneratorIT.java b/dspace-api/src/test/java/org/dspace/orcid/service/PlainMetadataSignatureGeneratorIT.java new file mode 100644 index 0000000000..66b9a98e72 --- /dev/null +++ b/dspace-api/src/test/java/org/dspace/orcid/service/PlainMetadataSignatureGeneratorIT.java @@ -0,0 +1,166 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.orcid.service; + +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.anyOf; +import static org.hamcrest.Matchers.contains; +import static org.hamcrest.Matchers.containsInAnyOrder; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.greaterThan; +import static org.hamcrest.Matchers.hasSize; +import static org.hamcrest.Matchers.notNullValue; + +import java.util.List; + +import org.dspace.AbstractIntegrationTestWithDatabase; +import org.dspace.builder.CollectionBuilder; +import org.dspace.builder.CommunityBuilder; +import org.dspace.builder.ItemBuilder; +import org.dspace.content.Collection; +import org.dspace.content.Item; +import org.dspace.content.MetadataValue; +import org.dspace.content.factory.ContentServiceFactory; +import org.dspace.content.service.ItemService; +import org.dspace.orcid.service.impl.PlainMetadataSignatureGeneratorImpl; +import org.junit.Before; +import org.junit.Test; + +/** + * Integration tests for {@link PlainMetadataSignatureGeneratorImpl}. + * + * @author Luca Giamminonni (luca.giamminonni at 4science.it) + * + */ +public class PlainMetadataSignatureGeneratorIT extends AbstractIntegrationTestWithDatabase { + + private ItemService itemService = ContentServiceFactory.getInstance().getItemService(); + + private Collection collection; + + private MetadataSignatureGenerator generator = new PlainMetadataSignatureGeneratorImpl(); + + @Before + public void setup() { + + context.turnOffAuthorisationSystem(); + + parentCommunity = CommunityBuilder.createCommunity(context) + .withTitle("Parent community") + .build(); + + collection = CollectionBuilder.createCollection(context, parentCommunity) + .withName("Collection") + .withEntityType("Person") + .build(); + + context.restoreAuthSystemState(); + } + + @Test + public void testSignatureGenerationWithManyMetadataValues() { + + context.turnOffAuthorisationSystem(); + + Item item = ItemBuilder.createItem(context, collection) + .withTitle("Item title") + .withIssueDate("2020-01-01") + .withAuthor("Jesse Pinkman") + .withEditor("Editor") + .build(); + + context.restoreAuthSystemState(); + + MetadataValue author = getMetadata(item, "dc.contributor.author", 0); + MetadataValue editor = getMetadata(item, "dc.contributor.editor", 0); + + String signature = generator.generate(context, List.of(author, editor)); + assertThat(signature, notNullValue()); + + String expectedSignature = "dc.contributor.author::Jesse Pinkman§§" + + "dc.contributor.editor::Editor"; + + assertThat(signature, equalTo(expectedSignature)); + + String anotherSignature = generator.generate(context, List.of(editor, author)); + assertThat(anotherSignature, equalTo(signature)); + + List metadataValues = generator.findBySignature(context, item, signature); + assertThat(metadataValues, hasSize(2)); + assertThat(metadataValues, containsInAnyOrder(author, editor)); + + } + + @Test + public void testSignatureGenerationWithSingleMetadataValue() { + + context.turnOffAuthorisationSystem(); + + Item item = ItemBuilder.createItem(context, collection) + .withTitle("Item title") + .withDescription("Description") + .withAuthor("Jesse Pinkman") + .withUriIdentifier("https://www.4science.it/en") + .build(); + + context.restoreAuthSystemState(); + + MetadataValue description = getMetadata(item, "dc.description", 0); + String signature = generator.generate(context, List.of(description)); + assertThat(signature, notNullValue()); + assertThat(signature, equalTo("dc.description::Description")); + + List metadataValues = generator.findBySignature(context, item, signature); + assertThat(metadataValues, hasSize(1)); + assertThat(metadataValues, containsInAnyOrder(description)); + + MetadataValue url = getMetadata(item, "dc.identifier.uri", 0); + signature = generator.generate(context, List.of(url)); + assertThat(signature, equalTo("dc.identifier.uri::https://www.4science.it/en")); + + metadataValues = generator.findBySignature(context, item, signature); + assertThat(metadataValues, hasSize(1)); + assertThat(metadataValues, containsInAnyOrder(url)); + + } + + @Test + public void testSignatureGenerationWithManyEqualsMetadataValues() { + context.turnOffAuthorisationSystem(); + + Item item = ItemBuilder.createItem(context, collection) + .withTitle("Item title") + .withDescription("Description") + .withAuthor("Jesse Pinkman") + .withAuthor("Jesse Pinkman") + .build(); + + context.restoreAuthSystemState(); + + MetadataValue firstAuthor = getMetadata(item, "dc.contributor.author", 0); + String firstSignature = generator.generate(context, List.of(firstAuthor)); + assertThat(firstSignature, notNullValue()); + assertThat(firstSignature, equalTo("dc.contributor.author::Jesse Pinkman")); + + MetadataValue secondAuthor = getMetadata(item, "dc.contributor.author", 1); + String secondSignature = generator.generate(context, List.of(secondAuthor)); + assertThat(secondSignature, notNullValue()); + assertThat(secondSignature, equalTo("dc.contributor.author::Jesse Pinkman")); + + List metadataValues = generator.findBySignature(context, item, firstSignature); + assertThat(metadataValues, hasSize(1)); + assertThat(metadataValues, anyOf(contains(firstAuthor), contains(secondAuthor))); + } + + private MetadataValue getMetadata(Item item, String metadataField, int place) { + List values = itemService.getMetadataByMetadataString(item, metadataField); + assertThat(values.size(), greaterThan(place)); + return values.get(place); + } + +} diff --git a/dspace-api/src/test/java/org/dspace/process/ProcessIT.java b/dspace-api/src/test/java/org/dspace/process/ProcessIT.java new file mode 100644 index 0000000000..d664065212 --- /dev/null +++ b/dspace-api/src/test/java/org/dspace/process/ProcessIT.java @@ -0,0 +1,90 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.process; + +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertTrue; + +import java.util.HashSet; +import java.util.LinkedList; +import java.util.List; +import java.util.Set; +import java.util.UUID; + +import org.dspace.AbstractIntegrationTestWithDatabase; +import org.dspace.builder.GroupBuilder; +import org.dspace.builder.ProcessBuilder; +import org.dspace.eperson.Group; +import org.dspace.eperson.factory.EPersonServiceFactory; +import org.dspace.eperson.service.GroupService; +import org.dspace.scripts.Process; +import org.dspace.scripts.factory.ScriptServiceFactory; +import org.dspace.scripts.service.ProcessService; +import org.junit.Test; + +/** + * This class will aim to test Process related use cases + * + * @author Mykhaylo Boychuk (mykhaylo.boychuk at 4science.it) + */ +public class ProcessIT extends AbstractIntegrationTestWithDatabase { + + protected ProcessService processService = ScriptServiceFactory.getInstance().getProcessService(); + protected GroupService groupService = EPersonServiceFactory.getInstance().getGroupService(); + + @Test + public void checkProcessGroupsTest() throws Exception { + context.turnOffAuthorisationSystem(); + Group groupA = GroupBuilder.createGroup(context) + .withName("Group A") + .addMember(admin) + .build(); + + Set groupSet = new HashSet<>(); + groupSet.add(groupA); + + Process processA = ProcessBuilder.createProcess(context, admin, "mock-script", + new LinkedList<>(), + groupSet).build(); + + context.restoreAuthSystemState(); + Process process = processService.find(context, processA.getID()); + List groups = process.getGroups(); + boolean isPresent = groups.stream().anyMatch(g -> g.getID().equals(groupA.getID())); + assertTrue(isPresent); + } + + @Test + public void removeOneGroupTest() throws Exception { + + context.turnOffAuthorisationSystem(); + Group groupA = GroupBuilder.createGroup(context) + .withName("Group A") + .addMember(admin).build(); + + Set groupSet = new HashSet<>(); + groupSet.add(groupA); + + UUID groupUuid = groupA.getID(); + Process processA = ProcessBuilder.createProcess(context, admin, "mock-script", new LinkedList<>(), + groupSet).build(); + + context.restoreAuthSystemState(); + + groupService.delete(context, groupA); + context.commit(); + context.reloadEntity(groupA); + processA = context.reloadEntity(processA); + + Process process = processService.find(context, processA.getID()); + List groups = process.getGroups(); + boolean isPresent = groups.stream().anyMatch(g -> g.getID().equals(groupUuid)); + assertFalse(isPresent); + + } +} diff --git a/dspace-api/src/test/java/org/dspace/scripts/MockDSpaceRunnableScriptConfiguration.java b/dspace-api/src/test/java/org/dspace/scripts/MockDSpaceRunnableScriptConfiguration.java index 1197370e32..f69c0e3af7 100644 --- a/dspace-api/src/test/java/org/dspace/scripts/MockDSpaceRunnableScriptConfiguration.java +++ b/dspace-api/src/test/java/org/dspace/scripts/MockDSpaceRunnableScriptConfiguration.java @@ -54,9 +54,7 @@ public class MockDSpaceRunnableScriptConfiguration trackers = new ArrayList<>(); - OpenURLTracker tracker1 = mock(OpenURLTracker.class); - OpenURLTracker tracker2 = mock(OpenURLTracker.class); - OpenURLTracker tracker3 = mock(OpenURLTracker.class); - - trackers.add(tracker1); - trackers.add(tracker2); - trackers.add(tracker3); + List trackers = List.of( + createMockTracker("tacker1"), + createMockTracker("tacker2"), + createMockTracker("tacker3") + ); when(failedOpenURLTrackerService.findAll(any(Context.class))).thenReturn(trackers); - doNothing().when(openUrlService).tryReprocessFailed(any(Context.class), any(OpenURLTracker.class)); + + // NOTE: first http request will return status code 500, next one 404, then 200 + doReturn( + createMockHttpResponse(HttpURLConnection.HTTP_INTERNAL_ERROR), + createMockHttpResponse(HttpURLConnection.HTTP_NOT_FOUND), + createMockHttpResponse(HttpURLConnection.HTTP_OK) + ).when(httpClient).execute(any()); openUrlService.reprocessFailedQueue(context); verify(openUrlService, times(3)).tryReprocessFailed(any(Context.class), any(OpenURLTracker.class)); + // NOTE: http request for tracker 1 and 2 failed, so tracker 1 and 2 should be kept + // http request for tracker 3 succeeded, so tracker 3 should be removed + verify(failedOpenURLTrackerService, times(0)).remove(any(Context.class), eq(trackers.get(0))); + verify(failedOpenURLTrackerService, times(0)).remove(any(Context.class), eq(trackers.get(1))); + verify(failedOpenURLTrackerService, times(1)).remove(any(Context.class), eq(trackers.get(2))); } /** * Test the method that logs the failed urls in the db - * @throws SQLException */ @Test public void testLogfailed() throws SQLException { Context context = mock(Context.class); OpenURLTracker tracker1 = mock(OpenURLTracker.class); - doCallRealMethod().when(tracker1).setUrl(anyString()); - when(tracker1.getUrl()).thenCallRealMethod(); - when(failedOpenURLTrackerService.create(any(Context.class))).thenReturn(tracker1); String failedUrl = "failed-url"; openUrlService.logfailed(context, failedUrl); - assertThat(tracker1.getUrl(), is(failedUrl)); + verify(tracker1).setUrl(failedUrl); + // NOTE: verify that setUploadDate received a timestamp whose value is no less than 5 seconds from now + ArgumentCaptor dateArgCaptor = ArgumentCaptor.forClass(Date.class); + verify(tracker1).setUploadDate(dateArgCaptor.capture()); + assertThat( + new BigDecimal(dateArgCaptor.getValue().getTime()), + closeTo(new BigDecimal(new Date().getTime()), new BigDecimal(5000)) + ); } /** * Tests whether the timeout gets set to 10 seconds when processing a url - * @throws SQLException */ @Test - public void testTimeout() throws SQLException { + public void testTimeout() throws IOException, SQLException { Context context = mock(Context.class); - String URL = "http://bla.com"; - RequestConfig.Builder requestConfig = mock(RequestConfig.Builder.class); - doReturn(requestConfig).when(openUrlService).getRequestConfigBuilder(); - doReturn(requestConfig).when(requestConfig).setConnectTimeout(10 * 1000); - doReturn(RequestConfig.custom().build()).when(requestConfig).build(); + // 1. verify processUrl calls getHttpClient and getHttpClientRequestConfig once + doReturn(createMockHttpResponse(HttpURLConnection.HTTP_OK)).when(httpClient).execute(any()); + openUrlService.processUrl(context, "test-url"); + verify(openUrlService).getHttpClient(any()); + verify(openUrlService).getHttpClientRequestConfig(); - openUrlService.processUrl(context, URL); - - Mockito.verify(requestConfig).setConnectTimeout(10 * 1000); + // 2. verify that getHttpClientRequestConfig sets the timeout + assertThat(openUrlService.getHttpClientRequestConfig().getConnectTimeout(), is(10 * 1000)); } } diff --git a/dspace-api/src/test/java/org/dspace/statistics/util/IPTableTest.java b/dspace-api/src/test/java/org/dspace/statistics/util/IPTableTest.java index 320cc55a0d..1dbbdb6cd0 100644 --- a/dspace-api/src/test/java/org/dspace/statistics/util/IPTableTest.java +++ b/dspace-api/src/test/java/org/dspace/statistics/util/IPTableTest.java @@ -56,14 +56,15 @@ public class IPTableTest { IPTable instance = new IPTable(); // Add IP address instance.add(LOCALHOST); - // Add IP range + // Add IP range (contains 256 addresses) instance.add("192.168.1"); - // Make sure both exist + // Make sure it returns the addresses for all ranges Set ipSet = instance.toSet(); - assertEquals(2, ipSet.size()); + assertEquals(257, ipSet.size()); assertTrue(ipSet.contains(LOCALHOST)); - assertTrue(ipSet.contains("192.168.1")); + assertTrue(ipSet.contains("192.168.1.0")); + assertTrue(ipSet.contains("192.168.1.255")); } @Test @@ -76,13 +77,13 @@ public class IPTableTest { assertEquals(1, instance.toSet().size()); instance = new IPTable(); - // Add IP range & then add an IP from within that range + // Add IP range w/ 256 addresses & then add an IP from within that range instance.add("192.168.1"); instance.add("192.168.1.1"); // Verify only the range exists Set ipSet = instance.toSet(); - assertEquals(1, ipSet.size()); - assertTrue(ipSet.contains("192.168.1")); + assertEquals(256, ipSet.size()); + assertTrue(ipSet.contains("192.168.1.1")); instance = new IPTable(); // Now, switch order. Add IP address, then add a range encompassing that IP @@ -90,8 +91,8 @@ public class IPTableTest { instance.add("192.168.1"); // Verify only the range exists ipSet = instance.toSet(); - assertEquals(1, ipSet.size()); - assertTrue(ipSet.contains("192.168.1")); + assertEquals(256, ipSet.size()); + assertTrue(ipSet.contains("192.168.1.1")); } /** @@ -120,6 +121,48 @@ public class IPTableTest { assertTrue("IP within an add()ed range should match", contains); } + @Test + public void testDashRangeContains() throws Exception { + IPTable instance = new IPTable(); + instance.add("192.168.0.0 - 192.168.0.245"); + + assertTrue("Range should contain lower limit", instance.contains("192.168.0.0")); + assertTrue("Range should contain upper limit", instance.contains("192.168.0.245")); + assertTrue("Range should contain value in between limits", instance.contains("192.168.0.123")); + assertTrue("Range should contain value in between limits", instance.contains("192.168.0.234")); + + assertFalse("Range should not contain value below lower limit", instance.contains("192.167.255.255")); + assertFalse("Range should not contain value above upper limit", instance.contains("192.168.0.246")); + } + + @Test + public void testSubnetRangeContains() throws Exception { + IPTable instance = new IPTable(); + instance.add("192.168.0.0/30"); // translates to 192.168.0.0 - 192.168.0.3 + + assertTrue("Range should contain lower limit", instance.contains("192.168.0.0")); + assertTrue("Range should contain upper limit", instance.contains("192.168.0.3")); + assertTrue("Range should contain values in between limits", instance.contains("192.168.0.1")); + assertTrue("Range should contain values in between limits", instance.contains("192.168.0.2")); + + assertFalse("Range should not contain value below lower limit", instance.contains("192.167.255.255")); + assertFalse("Range should not contain value above upper limit", instance.contains("192.168.0.4")); + } + + @Test + public void testImplicitRangeContains() throws Exception { + IPTable instance = new IPTable(); + instance.add("192.168.1"); + + assertTrue("Range should contain lower limit", instance.contains("192.168.1.0")); + assertTrue("Range should contain upper limit", instance.contains("192.168.1.255")); + assertTrue("Range should contain values in between limits", instance.contains("192.168.1.123")); + assertTrue("Range should contain values in between limits", instance.contains("192.168.1.234")); + + assertFalse("Range should not contain value below lower limit", instance.contains("192.168.0.0")); + assertFalse("Range should not contain value above upper limit", instance.contains("192.168.2.0")); + } + /** * Test of isEmpty method, of class IPTable. * @throws java.lang.Exception passed through. diff --git a/dspace-api/src/test/java/org/dspace/storage/bitstore/S3BitStoreServiceTest.java b/dspace-api/src/test/java/org/dspace/storage/bitstore/S3BitStoreServiceTest.java new file mode 100644 index 0000000000..920fb9316c --- /dev/null +++ b/dspace-api/src/test/java/org/dspace/storage/bitstore/S3BitStoreServiceTest.java @@ -0,0 +1,480 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.storage.bitstore; + +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.isEmptyOrNullString; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertThrows; +import static org.junit.Assert.assertTrue; +import static org.mockito.ArgumentMatchers.startsWith; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; + +import java.io.File; +import java.io.IOException; +import java.io.InputStream; +import java.util.function.Supplier; + +import com.amazonaws.regions.Regions; +import com.amazonaws.services.s3.AmazonS3; +import com.amazonaws.services.s3.AmazonS3Client; +import com.amazonaws.services.s3.model.GetObjectRequest; +import com.amazonaws.services.s3.model.PutObjectRequest; +import com.amazonaws.services.s3.model.PutObjectResult; +import com.amazonaws.services.s3.model.S3Object; +import com.amazonaws.services.s3.model.S3ObjectInputStream; +import com.amazonaws.services.s3.transfer.TransferManager; +import com.amazonaws.services.s3.transfer.Upload; +import com.amazonaws.services.s3.transfer.model.UploadResult; +import org.apache.commons.io.FileUtils; +import org.dspace.AbstractUnitTest; +import org.dspace.content.Bitstream; +import org.dspace.curate.Utils; +import org.hamcrest.Matchers; +import org.junit.Before; +import org.junit.Test; +import org.mockito.ArgumentMatchers; +import org.mockito.Mock; +import org.mockito.MockedStatic; +import org.mockito.Mockito; + + + + +/** + * @author Vincenzo Mecca (vins01-4science - vincenzo.mecca at 4science.com) + * + */ +public class S3BitStoreServiceTest extends AbstractUnitTest { + + private S3BitStoreService s3BitStoreService; + + @Mock + private AmazonS3Client s3Service; + + @Mock + private TransferManager tm; + + @Mock + private Bitstream bitstream; + + @Mock + private Bitstream externalBitstream; + + @Before + public void setUp() throws Exception { + this.s3BitStoreService = new S3BitStoreService(s3Service, tm); + } + + private Supplier mockedServiceSupplier() { + return () -> this.s3Service; + } + + @Test + public void givenBucketWhenInitThenUsesSameBucket() throws IOException { + String bucketName = "Bucket0"; + s3BitStoreService.setBucketName(bucketName); + when(this.s3Service.doesBucketExist(bucketName)).thenReturn(false); + + assertThat(s3BitStoreService.getAwsRegionName(), isEmptyOrNullString()); + + this.s3BitStoreService.init(); + + verify(this.s3Service).doesBucketExist(bucketName); + verify(this.s3Service, Mockito.times(1)).createBucket(bucketName); + assertThat(s3BitStoreService.getAwsAccessKey(), isEmptyOrNullString()); + assertThat(s3BitStoreService.getAwsSecretKey(), isEmptyOrNullString()); + assertThat(s3BitStoreService.getAwsRegionName(), isEmptyOrNullString()); + } + + @Test + public void givenEmptyBucketWhenInitThenUsesDefaultBucket() throws IOException { + assertThat(s3BitStoreService.getBucketName(), isEmptyOrNullString()); + when(this.s3Service.doesBucketExist(startsWith(S3BitStoreService.DEFAULT_BUCKET_PREFIX))).thenReturn(false); + assertThat(s3BitStoreService.getAwsRegionName(), isEmptyOrNullString()); + + this.s3BitStoreService.init(); + + verify(this.s3Service, Mockito.times(1)).createBucket(startsWith(S3BitStoreService.DEFAULT_BUCKET_PREFIX)); + assertThat(s3BitStoreService.getBucketName(), Matchers.startsWith(S3BitStoreService.DEFAULT_BUCKET_PREFIX)); + assertThat(s3BitStoreService.getAwsAccessKey(), isEmptyOrNullString()); + assertThat(s3BitStoreService.getAwsSecretKey(), isEmptyOrNullString()); + assertThat(s3BitStoreService.getAwsRegionName(), isEmptyOrNullString()); + } + + @Test + public void givenAccessKeysWhenInitThenVerifiesCorrectBuilderCreation() throws IOException { + assertThat(s3BitStoreService.getAwsAccessKey(), isEmptyOrNullString()); + assertThat(s3BitStoreService.getAwsSecretKey(), isEmptyOrNullString()); + assertThat(s3BitStoreService.getBucketName(), isEmptyOrNullString()); + assertThat(s3BitStoreService.getAwsRegionName(), isEmptyOrNullString()); + when(this.s3Service.doesBucketExist(startsWith(S3BitStoreService.DEFAULT_BUCKET_PREFIX))).thenReturn(false); + + final String awsAccessKey = "ACCESS_KEY"; + final String awsSecretKey = "SECRET_KEY"; + + this.s3BitStoreService.setAwsAccessKey(awsAccessKey); + this.s3BitStoreService.setAwsSecretKey(awsSecretKey); + + try (MockedStatic mockedS3BitStore = Mockito.mockStatic(S3BitStoreService.class)) { + mockedS3BitStore + .when(() -> + S3BitStoreService.amazonClientBuilderBy( + ArgumentMatchers.any(Regions.class), + ArgumentMatchers.argThat( + credentials -> + awsAccessKey.equals(credentials.getAWSAccessKeyId()) && + awsSecretKey.equals(credentials.getAWSSecretKey()) + ) + ) + ) + .thenReturn(this.mockedServiceSupplier()); + + this.s3BitStoreService.init(); + + mockedS3BitStore.verify( + () -> + S3BitStoreService.amazonClientBuilderBy( + ArgumentMatchers.any(Regions.class), + ArgumentMatchers.argThat( + credentials -> + awsAccessKey.equals(credentials.getAWSAccessKeyId()) && + awsSecretKey.equals(credentials.getAWSSecretKey()) + ) + ) + ); + } + + + verify(this.s3Service, Mockito.times(1)).createBucket(startsWith(S3BitStoreService.DEFAULT_BUCKET_PREFIX)); + assertThat(s3BitStoreService.getBucketName(), Matchers.startsWith(S3BitStoreService.DEFAULT_BUCKET_PREFIX)); + assertThat(s3BitStoreService.getAwsAccessKey(), Matchers.equalTo(awsAccessKey)); + assertThat(s3BitStoreService.getAwsSecretKey(), Matchers.equalTo(awsSecretKey)); + assertThat(s3BitStoreService.getAwsRegionName(), isEmptyOrNullString()); + } + + @Test + public void givenBucketBitStreamIdInputStreamWhenRetrievingFromS3ThenUsesBucketBitStreamId() throws IOException { + String bucketName = "BucketTest"; + String bitStreamId = "BitStreamId"; + this.s3BitStoreService.setBucketName(bucketName); + this.s3BitStoreService.setUseRelativePath(false); + when(bitstream.getInternalId()).thenReturn(bitStreamId); + + S3Object object = Mockito.mock(S3Object.class); + S3ObjectInputStream inputStream = Mockito.mock(S3ObjectInputStream.class); + when(object.getObjectContent()).thenReturn(inputStream); + when(this.s3Service.getObject(ArgumentMatchers.any(GetObjectRequest.class))).thenReturn(object); + + this.s3BitStoreService.init(); + assertThat(this.s3BitStoreService.get(bitstream), Matchers.equalTo(inputStream)); + + verify(this.s3Service).getObject( + ArgumentMatchers.argThat( + request -> + bucketName.contentEquals(request.getBucketName()) && + bitStreamId.contentEquals(request.getKey()) + ) + ); + + } + + @Test + public void givenBucketBitStreamIdWhenNothingFoundOnS3ThenReturnsNull() throws IOException { + String bucketName = "BucketTest"; + String bitStreamId = "BitStreamId"; + this.s3BitStoreService.setBucketName(bucketName); + this.s3BitStoreService.setUseRelativePath(false); + when(bitstream.getInternalId()).thenReturn(bitStreamId); + + when(this.s3Service.getObject(ArgumentMatchers.any(GetObjectRequest.class))).thenReturn(null); + + this.s3BitStoreService.init(); + assertThat(this.s3BitStoreService.get(bitstream), Matchers.nullValue()); + + verify(this.s3Service).getObject( + ArgumentMatchers.argThat( + request -> + bucketName.contentEquals(request.getBucketName()) && + bitStreamId.contentEquals(request.getKey()) + ) + ); + + } + + @Test + public void givenSubFolderWhenRequestsItemFromS3ThenTheIdentifierShouldHaveProperPath() throws IOException { + String bucketName = "BucketTest"; + String bitStreamId = "012345"; + String subfolder = "/test/DSpace7/"; + this.s3BitStoreService.setBucketName(bucketName); + this.s3BitStoreService.setUseRelativePath(false); + this.s3BitStoreService.setSubfolder(subfolder); + when(bitstream.getInternalId()).thenReturn(bitStreamId); + + S3Object object = Mockito.mock(S3Object.class); + S3ObjectInputStream inputStream = Mockito.mock(S3ObjectInputStream.class); + when(object.getObjectContent()).thenReturn(inputStream); + when(this.s3Service.getObject(ArgumentMatchers.any(GetObjectRequest.class))).thenReturn(object); + + this.s3BitStoreService.init(); + assertThat(this.s3BitStoreService.get(bitstream), Matchers.equalTo(inputStream)); + + verify(this.s3Service).getObject( + ArgumentMatchers.argThat( + request -> + bucketName.equals(request.getBucketName()) && + request.getKey().startsWith(subfolder) && + request.getKey().contains(bitStreamId) && + !request.getKey().contains(File.separator + File.separator) + ) + ); + + } + + @Test + public void handleRegisteredIdentifierPrefixInS3() { + String trueBitStreamId = "012345"; + String registeredBitstreamId = s3BitStoreService.REGISTERED_FLAG + trueBitStreamId; + // Should be detected as registered bitstream + assertTrue(this.s3BitStoreService.isRegisteredBitstream(registeredBitstreamId)); + } + + @Test + public void stripRegisteredBitstreamPrefixWhenCalculatingPath() { + // Set paths and IDs + String s3Path = "UNIQUE_S3_PATH/test/bitstream.pdf"; + String registeredBitstreamId = s3BitStoreService.REGISTERED_FLAG + s3Path; + // Paths should be equal, since the getRelativePath method should strip the registered -R prefix + String relativeRegisteredPath = this.s3BitStoreService.getRelativePath(registeredBitstreamId); + assertEquals(s3Path, relativeRegisteredPath); + } + + @Test + public void givenBitStreamIdentifierLongerThanPossibleWhenIntermediatePathIsComputedThenIsSplittedAndTruncated() { + String path = "01234567890123456789"; + String computedPath = this.s3BitStoreService.getIntermediatePath(path); + String expectedPath = "01" + File.separator + "23" + File.separator + "45" + File.separator; + assertThat(computedPath, equalTo(expectedPath)); + } + + @Test + public void givenBitStreamIdentifierShorterThanAFolderLengthWhenIntermediatePathIsComputedThenIsSingleFolder() { + String path = "0"; + String computedPath = this.s3BitStoreService.getIntermediatePath(path); + String expectedPath = "0" + File.separator; + assertThat(computedPath, equalTo(expectedPath)); + } + + @Test + public void givenPartialBitStreamIdentifierWhenIntermediatePathIsComputedThenIsCompletlySplitted() { + String path = "01234"; + String computedPath = this.s3BitStoreService.getIntermediatePath(path); + String expectedPath = "01" + File.separator + "23" + File.separator + "4" + File.separator; + assertThat(computedPath, equalTo(expectedPath)); + } + + @Test + public void givenMaxLengthBitStreamIdentifierWhenIntermediatePathIsComputedThenIsSplittedAllAsSubfolder() { + String path = "012345"; + String computedPath = this.s3BitStoreService.getIntermediatePath(path); + String expectedPath = "01" + File.separator + "23" + File.separator + "45" + File.separator; + assertThat(computedPath, equalTo(expectedPath)); + } + + @Test + public void givenBitStreamIdentifierWhenIntermediatePathIsComputedThenNotEndingDoubleSlash() throws IOException { + StringBuilder path = new StringBuilder("01"); + String computedPath = this.s3BitStoreService.getIntermediatePath(path.toString()); + int slashes = computeSlashes(path.toString()); + assertThat(computedPath, Matchers.endsWith(File.separator)); + assertThat(computedPath.split(File.separator).length, Matchers.equalTo(slashes)); + + path.append("2"); + computedPath = this.s3BitStoreService.getIntermediatePath(path.toString()); + assertThat(computedPath, Matchers.not(Matchers.endsWith(File.separator + File.separator))); + + path.append("3"); + computedPath = this.s3BitStoreService.getIntermediatePath(path.toString()); + assertThat(computedPath, Matchers.not(Matchers.endsWith(File.separator + File.separator))); + + path.append("4"); + computedPath = this.s3BitStoreService.getIntermediatePath(path.toString()); + assertThat(computedPath, Matchers.not(Matchers.endsWith(File.separator + File.separator))); + + path.append("56789"); + computedPath = this.s3BitStoreService.getIntermediatePath(path.toString()); + assertThat(computedPath, Matchers.not(Matchers.endsWith(File.separator + File.separator))); + } + + @Test + public void givenBitStreamIdentidierWhenIntermediatePathIsComputedThenMustBeSplitted() throws IOException { + StringBuilder path = new StringBuilder("01"); + String computedPath = this.s3BitStoreService.getIntermediatePath(path.toString()); + int slashes = computeSlashes(path.toString()); + assertThat(computedPath, Matchers.endsWith(File.separator)); + assertThat(computedPath.split(File.separator).length, Matchers.equalTo(slashes)); + + path.append("2"); + computedPath = this.s3BitStoreService.getIntermediatePath(path.toString()); + slashes = computeSlashes(path.toString()); + assertThat(computedPath, Matchers.endsWith(File.separator)); + assertThat(computedPath.split(File.separator).length, Matchers.equalTo(slashes)); + + path.append("3"); + computedPath = this.s3BitStoreService.getIntermediatePath(path.toString()); + slashes = computeSlashes(path.toString()); + assertThat(computedPath, Matchers.endsWith(File.separator)); + assertThat(computedPath.split(File.separator).length, Matchers.equalTo(slashes)); + + path.append("4"); + computedPath = this.s3BitStoreService.getIntermediatePath(path.toString()); + slashes = computeSlashes(path.toString()); + assertThat(computedPath, Matchers.endsWith(File.separator)); + assertThat(computedPath.split(File.separator).length, Matchers.equalTo(slashes)); + + path.append("56789"); + computedPath = this.s3BitStoreService.getIntermediatePath(path.toString()); + slashes = computeSlashes(path.toString()); + assertThat(computedPath, Matchers.endsWith(File.separator)); + assertThat(computedPath.split(File.separator).length, Matchers.equalTo(slashes)); + } + + @Test + public void givenBitStreamIdentifierWithSlashesWhenSanitizedThenSlashesMustBeRemoved() { + String sInternalId = new StringBuilder("01") + .append(File.separator) + .append("22") + .append(File.separator) + .append("33") + .append(File.separator) + .append("4455") + .toString(); + String computedPath = this.s3BitStoreService.sanitizeIdentifier(sInternalId); + assertThat(computedPath, Matchers.not(Matchers.startsWith(File.separator))); + assertThat(computedPath, Matchers.not(Matchers.endsWith(File.separator))); + assertThat(computedPath, Matchers.not(Matchers.containsString(File.separator))); + } + + @Test + public void givenBitStreamWhenRemoveThenCallS3DeleteMethod() throws Exception { + String bucketName = "BucketTest"; + String bitStreamId = "BitStreamId"; + this.s3BitStoreService.setBucketName(bucketName); + this.s3BitStoreService.setUseRelativePath(false); + when(bitstream.getInternalId()).thenReturn(bitStreamId); + + this.s3BitStoreService.init(); + this.s3BitStoreService.remove(bitstream); + + verify(this.s3Service, Mockito.times(1)).deleteObject(ArgumentMatchers.eq(bucketName), + ArgumentMatchers.eq(bitStreamId)); + + } + + @Test + public void givenBitStreamWhenPutThenCallS3PutMethodAndStoresInBitStream() throws Exception { + String bucketName = "BucketTest"; + String bitStreamId = "BitStreamId"; + this.s3BitStoreService.setBucketName(bucketName); + this.s3BitStoreService.setUseRelativePath(false); + when(bitstream.getInternalId()).thenReturn(bitStreamId); + + File file = Mockito.mock(File.class); + InputStream in = Mockito.mock(InputStream.class); + PutObjectResult putObjectResult = Mockito.mock(PutObjectResult.class); + Upload upload = Mockito.mock(Upload.class); + UploadResult uploadResult = Mockito.mock(UploadResult.class); + when(upload.waitForUploadResult()).thenReturn(uploadResult); + String mockedTag = "1a7771d5fdd7bfdfc84033c70b1ba555"; + when(file.length()).thenReturn(8L); + try (MockedStatic fileMock = Mockito.mockStatic(File.class)) { + try (MockedStatic fileUtilsMock = Mockito.mockStatic(FileUtils.class)) { + try (MockedStatic curateUtils = Mockito.mockStatic(Utils.class)) { + curateUtils.when(() -> Utils.checksum((File) ArgumentMatchers.any(), ArgumentMatchers.any())) + .thenReturn(mockedTag); + + fileMock + .when(() -> File.createTempFile(ArgumentMatchers.any(), ArgumentMatchers.any())) + .thenReturn(file); + + when(this.tm.upload(ArgumentMatchers.any(), ArgumentMatchers.any(), ArgumentMatchers.any())) + .thenReturn(upload); + + this.s3BitStoreService.init(); + this.s3BitStoreService.put(bitstream, in); + } + } + + } + + verify(this.bitstream, Mockito.times(1)).setSizeBytes( + ArgumentMatchers.eq(8L) + ); + + verify(this.bitstream, Mockito.times(1)).setChecksum( + ArgumentMatchers.eq(mockedTag) + ); + + verify(this.tm, Mockito.times(1)).upload( + ArgumentMatchers.eq(bucketName), + ArgumentMatchers.eq(bitStreamId), + ArgumentMatchers.eq(file) + ); + + verify(file, Mockito.times(1)).delete(); + + } + + @Test + public void givenBitStreamWhenCallingPutFileCopyingThrowsIOExceptionPutThenFileIsRemovedAndStreamClosed() + throws Exception { + String bucketName = "BucketTest"; + String bitStreamId = "BitStreamId"; + this.s3BitStoreService.setBucketName(bucketName); + this.s3BitStoreService.setUseRelativePath(false); + when(bitstream.getInternalId()).thenReturn(bitStreamId); + + File file = Mockito.mock(File.class); + InputStream in = Mockito.mock(InputStream.class); + try (MockedStatic fileMock = Mockito.mockStatic(File.class)) { + try (MockedStatic fileUtilsMock = Mockito.mockStatic(FileUtils.class)) { + fileUtilsMock + .when(() -> FileUtils.copyInputStreamToFile(ArgumentMatchers.any(), ArgumentMatchers.any())) + .thenThrow(IOException.class); + fileMock + .when(() -> File.createTempFile(ArgumentMatchers.any(), ArgumentMatchers.any())) + .thenReturn(file); + + this.s3BitStoreService.init(); + assertThrows(IOException.class, () -> this.s3BitStoreService.put(bitstream, in)); + } + + } + + verify(this.bitstream, Mockito.never()).setSizeBytes(ArgumentMatchers.any(Long.class)); + + verify(this.bitstream, Mockito.never()).setChecksum(ArgumentMatchers.any(String.class)); + + verify(this.s3Service, Mockito.never()).putObject(ArgumentMatchers.any(PutObjectRequest.class)); + + verify(file, Mockito.times(1)).delete(); + + } + + private int computeSlashes(String internalId) { + int minimum = internalId.length(); + int slashesPerLevel = minimum / S3BitStoreService.digitsPerLevel; + int odd = Math.min(1, minimum % S3BitStoreService.digitsPerLevel); + int slashes = slashesPerLevel + odd; + return Math.min(slashes, S3BitStoreService.directoryLevels); + } + +} diff --git a/dspace-api/src/test/java/org/dspace/util/DoiCheckTest.java b/dspace-api/src/test/java/org/dspace/util/DoiCheckTest.java new file mode 100644 index 0000000000..17e21779d4 --- /dev/null +++ b/dspace-api/src/test/java/org/dspace/util/DoiCheckTest.java @@ -0,0 +1,65 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.util; + +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertTrue; + +import java.text.ParseException; +import java.util.Arrays; +import java.util.List; + +import org.apache.commons.lang.StringUtils; +import org.dspace.importer.external.service.DoiCheck; +import org.junit.Test; + +/** + * Test class for the DoiCheck + * + * @author Mykhaylo Boychuk (mykhaylo.boychuk@4science.com) + */ +public class DoiCheckTest { + + @Test + public void checkDOIsTest() throws ParseException { + for (String doi : DOIsToTest()) { + assertTrue("The: " + doi + " is a doi!", DoiCheck.isDoi(doi)); + } + } + + @Test + public void checkWrongDOIsTest() throws ParseException { + for (String key : wrongDOIsToTest()) { + assertFalse("This : " + key + " isn't a doi!", DoiCheck.isDoi(key)); + } + } + + private List DOIsToTest() { + return Arrays.asList( + "10.1430/8105", + "10.1038/nphys1170", + "10.1002/0470841559.ch1", + "10.1594/PANGAEA.726855", + "10.1594/GFZ.GEOFON.gfz2009kciu", + "10.3866/PKU.WHXB201112303", + "10.11467/isss2003.7.1_11", + "10.3972/water973.0145.db" + ); + } + + private List wrongDOIsToTest() { + return Arrays.asList( + StringUtils.EMPTY, + "123456789", + "nphys1170/10.1038", + "10.", "10", + "10.1038/" + ); + } + +} \ No newline at end of file diff --git a/dspace-api/src/test/java/org/dspace/util/RelationshipVersioningTestUtils.java b/dspace-api/src/test/java/org/dspace/util/RelationshipVersioningTestUtils.java new file mode 100644 index 0000000000..68f73734af --- /dev/null +++ b/dspace-api/src/test/java/org/dspace/util/RelationshipVersioningTestUtils.java @@ -0,0 +1,53 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.util; + +import static org.hamcrest.Matchers.allOf; +import static org.hamcrest.Matchers.hasProperty; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.nullValue; + +import org.dspace.content.Item; +import org.dspace.content.Relationship.LatestVersionStatus; +import org.dspace.content.RelationshipType; +import org.hamcrest.Matcher; + +/** + * Methods for testing relationships and their behavior with versioned items. + */ +public class RelationshipVersioningTestUtils { + + private RelationshipVersioningTestUtils() {} + + public static Matcher isRel( + Item leftItem, RelationshipType relationshipType, Item rightItem, LatestVersionStatus latestVersionStatus, + int leftPlace, int rightPlace + ) { + return isRel(leftItem, relationshipType, rightItem, latestVersionStatus, null, null, leftPlace, rightPlace); + } + + public static Matcher isRel( + Item leftItem, RelationshipType relationshipType, Item rightItem, LatestVersionStatus latestVersionStatus, + String leftwardValue, String rightwardValue, int leftPlace, int rightPlace + ) { + return allOf( + hasProperty("leftItem", is(leftItem)), + // NOTE: this is a painful one... class RelationshipType does not implement the equals method, so we cannot + // rely on object equality and have to compare ids instead. It has to be in capital letters, + // because the getter has been implemented inconsistently (#id vs #setId() vs #getID()). + hasProperty("relationshipType", hasProperty("ID", is(relationshipType.getID()))), + hasProperty("rightItem", is(rightItem)), + hasProperty("leftPlace", is(leftPlace)), + hasProperty("rightPlace", is(rightPlace)), + hasProperty("leftwardValue", leftwardValue == null ? nullValue() : is(leftwardValue)), + hasProperty("rightwardValue", rightwardValue == null ? nullValue() : is(rightwardValue)), + hasProperty("latestVersionStatus", is(latestVersionStatus)) + ); + } + +} diff --git a/dspace-api/src/test/java/org/dspace/util/SimpleMapConverterTest.java b/dspace-api/src/test/java/org/dspace/util/SimpleMapConverterTest.java new file mode 100644 index 0000000000..b380c4e7ba --- /dev/null +++ b/dspace-api/src/test/java/org/dspace/util/SimpleMapConverterTest.java @@ -0,0 +1,171 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.util; + +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.instanceOf; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.notNullValue; +import static org.hamcrest.Matchers.nullValue; +import static org.junit.Assert.assertThrows; +import static org.mockito.Mockito.when; + +import java.io.File; +import java.io.FileNotFoundException; +import java.io.IOException; +import java.nio.charset.StandardCharsets; + +import org.apache.commons.io.FileUtils; +import org.dspace.services.ConfigurationService; +import org.junit.Before; +import org.junit.Rule; +import org.junit.Test; +import org.junit.rules.TemporaryFolder; +import org.junit.runner.RunWith; +import org.mockito.Mock; +import org.mockito.junit.MockitoJUnitRunner; + +/** + * Unit tests for {@link SimpleMapConverter}. + * + * @author Luca Giamminonni (luca.giamminonni at 4science.it) + * + */ +@RunWith(MockitoJUnitRunner.class) +public class SimpleMapConverterTest { + + @Rule + public TemporaryFolder folder = new TemporaryFolder(); + + @Mock + private ConfigurationService configurationService; + + private File dspaceDir; + + private File crosswalksDir; + + @Before + public void before() throws IOException { + dspaceDir = folder.getRoot(); + crosswalksDir = folder.newFolder("config", "crosswalks"); + } + + @Test + public void testPropertiesParsing() throws IOException { + + when(configurationService.getProperty("dspace.dir")).thenReturn(dspaceDir.getAbsolutePath()); + createFileInFolder(crosswalksDir, "test.properties", "key1=value1\nkey2=value2\nkey3=value3"); + + SimpleMapConverter simpleMapConverter = new SimpleMapConverter(); + simpleMapConverter.setConfigurationService(configurationService); + simpleMapConverter.setConverterNameFile("test.properties"); + + simpleMapConverter.init(); + + assertThat(simpleMapConverter.getValue("key1"), is("value1")); + assertThat(simpleMapConverter.getValue("key2"), is("value2")); + assertThat(simpleMapConverter.getValue("key3"), is("value3")); + assertThat(simpleMapConverter.getValue(""), is("")); + assertThat(simpleMapConverter.getValue(null), nullValue()); + + assertThat(simpleMapConverter.getValue("key4"), is("key4")); + + } + + @Test + public void testPropertiesParsingWithDefaultValue() throws IOException { + + when(configurationService.getProperty("dspace.dir")).thenReturn(dspaceDir.getAbsolutePath()); + createFileInFolder(crosswalksDir, "test.properties", "key1=value1\nkey2=value2\nkey3=value3"); + + SimpleMapConverter simpleMapConverter = new SimpleMapConverter(); + simpleMapConverter.setConfigurationService(configurationService); + simpleMapConverter.setConverterNameFile("test.properties"); + simpleMapConverter.setDefaultValue("default"); + + simpleMapConverter.init(); + + assertThat(simpleMapConverter.getValue("key1"), is("value1")); + assertThat(simpleMapConverter.getValue("key2"), is("value2")); + assertThat(simpleMapConverter.getValue("key3"), is("value3")); + assertThat(simpleMapConverter.getValue(""), is("default")); + assertThat(simpleMapConverter.getValue(null), is("default")); + + assertThat(simpleMapConverter.getValue("key4"), is("default")); + + } + + @Test + public void testPropertiesParsingWithAnUnexistingFile() throws IOException { + + when(configurationService.getProperty("dspace.dir")).thenReturn(dspaceDir.getAbsolutePath()); + + SimpleMapConverter simpleMapConverter = new SimpleMapConverter(); + simpleMapConverter.setConfigurationService(configurationService); + simpleMapConverter.setConverterNameFile("test.properties"); + + IllegalArgumentException exception = assertThrows(IllegalArgumentException.class, + () -> simpleMapConverter.init()); + + // Get path separator used for this platform (eg. / for Linux, \ for Windows) + String separator = File.separator; + + assertThat(exception.getMessage(), + is("An error occurs parsing " + dspaceDir.getAbsolutePath() + separator + "config" + separator + + "crosswalks" + separator + "test.properties")); + + Throwable cause = exception.getCause(); + assertThat(cause, notNullValue()); + assertThat(cause, instanceOf(FileNotFoundException.class)); + + } + + @Test + public void testPropertiesParsingWithCorruptedFile() throws IOException { + + when(configurationService.getProperty("dspace.dir")).thenReturn(dspaceDir.getAbsolutePath()); + createFileInFolder(crosswalksDir, "test.properties", "key1=value1\nkey2\nkey3=value3"); + + SimpleMapConverter simpleMapConverter = new SimpleMapConverter(); + simpleMapConverter.setConfigurationService(configurationService); + simpleMapConverter.setConverterNameFile("test.properties"); + + simpleMapConverter.init(); + + assertThat(simpleMapConverter.getValue("key1"), is("value1")); + assertThat(simpleMapConverter.getValue("key2"), is("key2")); + assertThat(simpleMapConverter.getValue("key3"), is("value3")); + + assertThat(simpleMapConverter.getValue("key4"), is("key4")); + + + } + + @Test + public void testPropertiesParsingWithEmptyFile() throws IOException { + + when(configurationService.getProperty("dspace.dir")).thenReturn(dspaceDir.getAbsolutePath()); + createFileInFolder(crosswalksDir, "test.properties", ""); + + SimpleMapConverter simpleMapConverter = new SimpleMapConverter(); + simpleMapConverter.setConfigurationService(configurationService); + simpleMapConverter.setConverterNameFile("test.properties"); + + simpleMapConverter.init(); + + assertThat(simpleMapConverter.getValue("key1"), is("key1")); + assertThat(simpleMapConverter.getValue("key2"), is("key2")); + + } + + private void createFileInFolder(File folder, String name, String content) throws IOException { + File file = new File(folder, name); + FileUtils.write(file, content, StandardCharsets.UTF_8); + } + +} diff --git a/dspace-api/src/test/resources/org/dspace/app/itemimport/dublin_core-person.xml b/dspace-api/src/test/resources/org/dspace/app/itemimport/dublin_core-person.xml new file mode 100644 index 0000000000..4d530630ba --- /dev/null +++ b/dspace-api/src/test/resources/org/dspace/app/itemimport/dublin_core-person.xml @@ -0,0 +1,3 @@ + + Person Test + \ No newline at end of file diff --git a/dspace-api/src/test/resources/org/dspace/app/itemimport/dublin_core.xml b/dspace-api/src/test/resources/org/dspace/app/itemimport/dublin_core.xml new file mode 100644 index 0000000000..a1afbb417a --- /dev/null +++ b/dspace-api/src/test/resources/org/dspace/app/itemimport/dublin_core.xml @@ -0,0 +1,5 @@ + + A Tale of Two Cities + 1990 + J'aime les Printemps + \ No newline at end of file diff --git a/dspace-api/src/test/resources/org/dspace/app/itemimport/metadata_dcterms.xml b/dspace-api/src/test/resources/org/dspace/app/itemimport/metadata_dcterms.xml new file mode 100644 index 0000000000..8d8e3a8d54 --- /dev/null +++ b/dspace-api/src/test/resources/org/dspace/app/itemimport/metadata_dcterms.xml @@ -0,0 +1,3 @@ + + A Tale of Two Cities + \ No newline at end of file diff --git a/dspace-api/src/test/resources/org/dspace/app/itemimport/relationships b/dspace-api/src/test/resources/org/dspace/app/itemimport/relationships new file mode 100644 index 0000000000..e8ec1985ce --- /dev/null +++ b/dspace-api/src/test/resources/org/dspace/app/itemimport/relationships @@ -0,0 +1 @@ +relation.isAuthorOfPublication folderName:item_001 \ No newline at end of file diff --git a/dspace-api/src/test/resources/org/dspace/app/itemimport/saf-bitstreams.zip b/dspace-api/src/test/resources/org/dspace/app/itemimport/saf-bitstreams.zip new file mode 100755 index 0000000000..35be57e897 Binary files /dev/null and b/dspace-api/src/test/resources/org/dspace/app/itemimport/saf-bitstreams.zip differ diff --git a/dspace-api/src/test/resources/org/dspace/app/itemimport/saf-relationships.zip b/dspace-api/src/test/resources/org/dspace/app/itemimport/saf-relationships.zip new file mode 100755 index 0000000000..d41e7c6eb8 Binary files /dev/null and b/dspace-api/src/test/resources/org/dspace/app/itemimport/saf-relationships.zip differ diff --git a/dspace-api/src/test/resources/org/dspace/app/mediafilter/test.csv b/dspace-api/src/test/resources/org/dspace/app/mediafilter/test.csv new file mode 100644 index 0000000000..07c22ff0bf --- /dev/null +++ b/dspace-api/src/test/resources/org/dspace/app/mediafilter/test.csv @@ -0,0 +1,4 @@ +row1,row2,row3,row4 +"data1,2","data 2,2","data3,2","data4,2" +"data1,3","data 2,3","data3,3","data4,3" +"data1,4","data2,4","data3,4","data4,4" diff --git a/dspace-api/src/test/resources/org/dspace/app/mediafilter/wordtest.doc b/dspace-api/src/test/resources/org/dspace/app/mediafilter/test.doc similarity index 100% rename from dspace-api/src/test/resources/org/dspace/app/mediafilter/wordtest.doc rename to dspace-api/src/test/resources/org/dspace/app/mediafilter/test.doc diff --git a/dspace-api/src/test/resources/org/dspace/app/mediafilter/wordtest.docx b/dspace-api/src/test/resources/org/dspace/app/mediafilter/test.docx similarity index 100% rename from dspace-api/src/test/resources/org/dspace/app/mediafilter/wordtest.docx rename to dspace-api/src/test/resources/org/dspace/app/mediafilter/test.docx diff --git a/dspace-api/src/test/resources/org/dspace/app/mediafilter/test.html b/dspace-api/src/test/resources/org/dspace/app/mediafilter/test.html new file mode 100644 index 0000000000..7655f566cc --- /dev/null +++ b/dspace-api/src/test/resources/org/dspace/app/mediafilter/test.html @@ -0,0 +1,53 @@ + + + + +A Text Extraction Test Document for DSpace + + + + +
    + +

    A Text Extraction Test Document

    + +

    for

    + +

    DSpace

    + +

    + +

    This is a text. For the next sixty seconds this software +will conduct a test of the DSpace text extraction facility. This is only a +text.

    + +

    This is a paragraph that followed the first that lived in +the document that Jack built.

    + +

    Lorem ipsum dolor sit amet. The quick brown fox jumped over +the lazy dog. Yow! Are we having fun yet?

    + +

    This has been a test of the DSpace text extraction system. +In the event of actual content you would care what is written here.

    + +
    + +
    + +
    + +
    + +

    Tip o’ the hat to the U.S. Emergency Broadcast System for the format that I have +irreverently borrowed.

    + +
    + + + + + + diff --git a/dspace-api/src/test/resources/org/dspace/app/mediafilter/test.odp b/dspace-api/src/test/resources/org/dspace/app/mediafilter/test.odp new file mode 100644 index 0000000000..4701884a8a Binary files /dev/null and b/dspace-api/src/test/resources/org/dspace/app/mediafilter/test.odp differ diff --git a/dspace-api/src/test/resources/org/dspace/app/mediafilter/test.ods b/dspace-api/src/test/resources/org/dspace/app/mediafilter/test.ods new file mode 100644 index 0000000000..94ad873c1a Binary files /dev/null and b/dspace-api/src/test/resources/org/dspace/app/mediafilter/test.ods differ diff --git a/dspace-api/src/test/resources/org/dspace/app/mediafilter/test.odt b/dspace-api/src/test/resources/org/dspace/app/mediafilter/test.odt new file mode 100644 index 0000000000..3c996a1f46 Binary files /dev/null and b/dspace-api/src/test/resources/org/dspace/app/mediafilter/test.odt differ diff --git a/dspace-api/src/test/resources/org/dspace/app/mediafilter/test.pdf b/dspace-api/src/test/resources/org/dspace/app/mediafilter/test.pdf new file mode 100644 index 0000000000..5b3749cbff Binary files /dev/null and b/dspace-api/src/test/resources/org/dspace/app/mediafilter/test.pdf differ diff --git a/dspace-api/src/test/resources/org/dspace/app/mediafilter/test.ppt b/dspace-api/src/test/resources/org/dspace/app/mediafilter/test.ppt new file mode 100644 index 0000000000..bb3a3d6b41 Binary files /dev/null and b/dspace-api/src/test/resources/org/dspace/app/mediafilter/test.ppt differ diff --git a/dspace-api/src/test/resources/org/dspace/app/mediafilter/test.pptx b/dspace-api/src/test/resources/org/dspace/app/mediafilter/test.pptx new file mode 100644 index 0000000000..2c27ad1630 Binary files /dev/null and b/dspace-api/src/test/resources/org/dspace/app/mediafilter/test.pptx differ diff --git a/dspace-api/src/test/resources/org/dspace/app/mediafilter/test.rtf b/dspace-api/src/test/resources/org/dspace/app/mediafilter/test.rtf new file mode 100644 index 0000000000..3b841917b2 --- /dev/null +++ b/dspace-api/src/test/resources/org/dspace/app/mediafilter/test.rtf @@ -0,0 +1,239 @@ +{\rtf1\adeflang1025\ansi\ansicpg1252\uc1\adeff46\deff0\stshfdbch45\stshfloch43\stshfhich43\stshfbi46\deflang1033\deflangfe1033\themelang1033\themelangfe0\themelangcs0{\fonttbl{\f34\fbidi \froman\fcharset0\fprq2{\*\panose 02040503050406030204}Cambria Math;}{\f43\fbidi \froman\fcharset0\fprq2 Liberation Serif{\*\falt Times New Roman};} +{\f44\fbidi \fswiss\fcharset0\fprq2 Liberation Sans{\*\falt Arial};}{\f45\fbidi \froman\fcharset0\fprq0{\*\panose 00000000000000000000}AR PL SungtiL GB;}{\f46\fbidi \froman\fcharset0\fprq0{\*\panose 00000000000000000000}Lohit Hindi;} +{\flomajor\f31500\fbidi \froman\fcharset0\fprq2{\*\panose 02020603050405020304}Times New Roman;}{\fdbmajor\f31501\fbidi \froman\fcharset0\fprq2{\*\panose 02020603050405020304}Times New Roman;} +{\fhimajor\f31502\fbidi \fswiss\fcharset0\fprq2{\*\panose 020f0302020204030204}Calibri Light;}{\fbimajor\f31503\fbidi \froman\fcharset0\fprq2{\*\panose 02020603050405020304}Times New Roman;} +{\flominor\f31504\fbidi \froman\fcharset0\fprq2{\*\panose 02020603050405020304}Times New Roman;}{\fdbminor\f31505\fbidi \froman\fcharset0\fprq2{\*\panose 02020603050405020304}Times New Roman;} +{\fhiminor\f31506\fbidi \fswiss\fcharset0\fprq2{\*\panose 020f0502020204030204}Calibri;}{\fbiminor\f31507\fbidi \froman\fcharset0\fprq2{\*\panose 02020603050405020304}Times New Roman;}{\f1504\fbidi \froman\fcharset238\fprq2 Cambria Math CE;} +{\f1505\fbidi \froman\fcharset204\fprq2 Cambria Math Cyr;}{\f1507\fbidi \froman\fcharset161\fprq2 Cambria Math Greek;}{\f1508\fbidi \froman\fcharset162\fprq2 Cambria Math Tur;}{\f1511\fbidi \froman\fcharset186\fprq2 Cambria Math Baltic;} +{\f1512\fbidi \froman\fcharset163\fprq2 Cambria Math (Vietnamese);}{\flomajor\f31508\fbidi \froman\fcharset238\fprq2 Times New Roman CE;}{\flomajor\f31509\fbidi \froman\fcharset204\fprq2 Times New Roman Cyr;} +{\flomajor\f31511\fbidi \froman\fcharset161\fprq2 Times New Roman Greek;}{\flomajor\f31512\fbidi \froman\fcharset162\fprq2 Times New Roman Tur;}{\flomajor\f31513\fbidi \froman\fcharset177\fprq2 Times New Roman (Hebrew);} +{\flomajor\f31514\fbidi \froman\fcharset178\fprq2 Times New Roman (Arabic);}{\flomajor\f31515\fbidi \froman\fcharset186\fprq2 Times New Roman Baltic;}{\flomajor\f31516\fbidi \froman\fcharset163\fprq2 Times New Roman (Vietnamese);} +{\fdbmajor\f31518\fbidi \froman\fcharset238\fprq2 Times New Roman CE;}{\fdbmajor\f31519\fbidi \froman\fcharset204\fprq2 Times New Roman Cyr;}{\fdbmajor\f31521\fbidi \froman\fcharset161\fprq2 Times New Roman Greek;} +{\fdbmajor\f31522\fbidi \froman\fcharset162\fprq2 Times New Roman Tur;}{\fdbmajor\f31523\fbidi \froman\fcharset177\fprq2 Times New Roman (Hebrew);}{\fdbmajor\f31524\fbidi \froman\fcharset178\fprq2 Times New Roman (Arabic);} +{\fdbmajor\f31525\fbidi \froman\fcharset186\fprq2 Times New Roman Baltic;}{\fdbmajor\f31526\fbidi \froman\fcharset163\fprq2 Times New Roman (Vietnamese);}{\fhimajor\f31528\fbidi \fswiss\fcharset238\fprq2 Calibri Light CE;} +{\fhimajor\f31529\fbidi \fswiss\fcharset204\fprq2 Calibri Light Cyr;}{\fhimajor\f31531\fbidi \fswiss\fcharset161\fprq2 Calibri Light Greek;}{\fhimajor\f31532\fbidi \fswiss\fcharset162\fprq2 Calibri Light Tur;} +{\fhimajor\f31533\fbidi \fswiss\fcharset177\fprq2 Calibri Light (Hebrew);}{\fhimajor\f31534\fbidi \fswiss\fcharset178\fprq2 Calibri Light (Arabic);}{\fhimajor\f31535\fbidi \fswiss\fcharset186\fprq2 Calibri Light Baltic;} +{\fhimajor\f31536\fbidi \fswiss\fcharset163\fprq2 Calibri Light (Vietnamese);}{\fbimajor\f31538\fbidi \froman\fcharset238\fprq2 Times New Roman CE;}{\fbimajor\f31539\fbidi \froman\fcharset204\fprq2 Times New Roman Cyr;} +{\fbimajor\f31541\fbidi \froman\fcharset161\fprq2 Times New Roman Greek;}{\fbimajor\f31542\fbidi \froman\fcharset162\fprq2 Times New Roman Tur;}{\fbimajor\f31543\fbidi \froman\fcharset177\fprq2 Times New Roman (Hebrew);} +{\fbimajor\f31544\fbidi \froman\fcharset178\fprq2 Times New Roman (Arabic);}{\fbimajor\f31545\fbidi \froman\fcharset186\fprq2 Times New Roman Baltic;}{\fbimajor\f31546\fbidi \froman\fcharset163\fprq2 Times New Roman (Vietnamese);} +{\flominor\f31548\fbidi \froman\fcharset238\fprq2 Times New Roman CE;}{\flominor\f31549\fbidi \froman\fcharset204\fprq2 Times New Roman Cyr;}{\flominor\f31551\fbidi \froman\fcharset161\fprq2 Times New Roman Greek;} +{\flominor\f31552\fbidi \froman\fcharset162\fprq2 Times New Roman Tur;}{\flominor\f31553\fbidi \froman\fcharset177\fprq2 Times New Roman (Hebrew);}{\flominor\f31554\fbidi \froman\fcharset178\fprq2 Times New Roman (Arabic);} +{\flominor\f31555\fbidi \froman\fcharset186\fprq2 Times New Roman Baltic;}{\flominor\f31556\fbidi \froman\fcharset163\fprq2 Times New Roman (Vietnamese);}{\fdbminor\f31558\fbidi \froman\fcharset238\fprq2 Times New Roman CE;} +{\fdbminor\f31559\fbidi \froman\fcharset204\fprq2 Times New Roman Cyr;}{\fdbminor\f31561\fbidi \froman\fcharset161\fprq2 Times New Roman Greek;}{\fdbminor\f31562\fbidi \froman\fcharset162\fprq2 Times New Roman Tur;} +{\fdbminor\f31563\fbidi \froman\fcharset177\fprq2 Times New Roman (Hebrew);}{\fdbminor\f31564\fbidi \froman\fcharset178\fprq2 Times New Roman (Arabic);}{\fdbminor\f31565\fbidi \froman\fcharset186\fprq2 Times New Roman Baltic;} +{\fdbminor\f31566\fbidi \froman\fcharset163\fprq2 Times New Roman (Vietnamese);}{\fhiminor\f31568\fbidi \fswiss\fcharset238\fprq2 Calibri CE;}{\fhiminor\f31569\fbidi \fswiss\fcharset204\fprq2 Calibri Cyr;} +{\fhiminor\f31571\fbidi \fswiss\fcharset161\fprq2 Calibri Greek;}{\fhiminor\f31572\fbidi \fswiss\fcharset162\fprq2 Calibri Tur;}{\fhiminor\f31573\fbidi \fswiss\fcharset177\fprq2 Calibri (Hebrew);} +{\fhiminor\f31574\fbidi \fswiss\fcharset178\fprq2 Calibri (Arabic);}{\fhiminor\f31575\fbidi \fswiss\fcharset186\fprq2 Calibri Baltic;}{\fhiminor\f31576\fbidi \fswiss\fcharset163\fprq2 Calibri (Vietnamese);} +{\fbiminor\f31578\fbidi \froman\fcharset238\fprq2 Times New Roman CE;}{\fbiminor\f31579\fbidi \froman\fcharset204\fprq2 Times New Roman Cyr;}{\fbiminor\f31581\fbidi \froman\fcharset161\fprq2 Times New Roman Greek;} +{\fbiminor\f31582\fbidi \froman\fcharset162\fprq2 Times New Roman Tur;}{\fbiminor\f31583\fbidi \froman\fcharset177\fprq2 Times New Roman (Hebrew);}{\fbiminor\f31584\fbidi \froman\fcharset178\fprq2 Times New Roman (Arabic);} +{\fbiminor\f31585\fbidi \froman\fcharset186\fprq2 Times New Roman Baltic;}{\fbiminor\f31586\fbidi \froman\fcharset163\fprq2 Times New Roman (Vietnamese);}{\f1164\fbidi \froman\fcharset238\fprq2 Times New Roman CE;} +{\f1165\fbidi \froman\fcharset204\fprq2 Times New Roman Cyr;}{\f1167\fbidi \froman\fcharset161\fprq2 Times New Roman Greek;}{\f1168\fbidi \froman\fcharset162\fprq2 Times New Roman Tur;}{\f1169\fbidi \froman\fcharset177\fprq2 Times New Roman (Hebrew);} +{\f1170\fbidi \froman\fcharset178\fprq2 Times New Roman (Arabic);}{\f1171\fbidi \froman\fcharset186\fprq2 Times New Roman Baltic;}{\f1172\fbidi \froman\fcharset163\fprq2 Times New Roman (Vietnamese);}}{\colortbl;\red0\green0\blue0;\red0\green0\blue255; +\red0\green255\blue255;\red0\green255\blue0;\red255\green0\blue255;\red255\green0\blue0;\red255\green255\blue0;\red255\green255\blue255;\red0\green0\blue128;\red0\green128\blue128;\red0\green128\blue0;\red128\green0\blue128;\red128\green0\blue0; +\red128\green128\blue0;\red128\green128\blue128;\red192\green192\blue192;\red0\green0\blue0;\red0\green0\blue0;}{\*\defchp \fs24\lang1033\langfe2052\loch\af43\hich\af43\dbch\af45\langfenp2052 }{\*\defpap +\ql \li0\ri0\widctlpar\wrapdefault\aspalpha\aspnum\faauto\adjustright\rin0\lin0\itap0 }\noqfpromote {\stylesheet{\ql \li0\ri0\widctlpar\wrapdefault\aspalpha\aspnum\faauto\adjustright\rin0\lin0\itap0 \rtlch\fcs1 \af46\afs24\alang1081 \ltrch\fcs0 +\fs24\lang1033\langfe2052\loch\f43\hich\af43\dbch\af45\cgrid\langnp1033\langfenp2052 \snext0 \sqformat \spriority0 Normal;}{\*\cs10 \additive \ssemihidden \sunhideused \spriority1 Default Paragraph Font;}{\* +\ts11\tsrowd\trftsWidthB3\trpaddl108\trpaddr108\trpaddfl3\trpaddft3\trpaddfb3\trpaddfr3\trcbpat1\trcfpat1\tblind0\tblindtype3\tsvertalt\tsbrdrt\tsbrdrl\tsbrdrb\tsbrdrr\tsbrdrdgl\tsbrdrdgr\tsbrdrh\tsbrdrv +\ql \li0\ri0\widctlpar\wrapdefault\aspalpha\aspnum\faauto\adjustright\rin0\lin0\itap0 \rtlch\fcs1 \af46\afs24\alang1081 \ltrch\fcs0 \fs24\lang1033\langfe2052\loch\f43\hich\af43\dbch\af45\cgrid\langnp1033\langfenp2052 \snext11 \ssemihidden \sunhideused +Normal Table;}{\*\cs15 \additive \sqformat \spriority0 Footnote Characters;}{\*\cs16 \additive \super \spriority0 Footnote Anchor;}{\*\cs17 \additive \super \spriority0 Endnote Anchor;}{\*\cs18 \additive \sqformat \spriority0 Endnote Characters;}{ +\s19\ql \li0\ri0\sb240\sa120\keepn\widctlpar\wrapdefault\aspalpha\aspnum\faauto\adjustright\rin0\lin0\itap0 \rtlch\fcs1 \af46\afs28\alang1081 \ltrch\fcs0 \fs28\lang1033\langfe2052\loch\f44\hich\af44\dbch\af45\cgrid\langnp1033\langfenp2052 +\sbasedon0 \snext20 \sqformat \spriority0 Heading;}{\s20\ql \li0\ri0\sa140\sl288\slmult1\widctlpar\wrapdefault\aspalpha\aspnum\faauto\adjustright\rin0\lin0\itap0 \rtlch\fcs1 \af46\afs24\alang1081 \ltrch\fcs0 +\fs24\lang1033\langfe2052\loch\f43\hich\af43\dbch\af45\cgrid\langnp1033\langfenp2052 \sbasedon0 \snext20 \spriority0 Body Text;}{\s21\ql \li0\ri0\sa140\sl288\slmult1\widctlpar\wrapdefault\aspalpha\aspnum\faauto\adjustright\rin0\lin0\itap0 \rtlch\fcs1 +\af46\afs24\alang1081 \ltrch\fcs0 \fs24\lang1033\langfe2052\loch\f43\hich\af43\dbch\af45\cgrid\langnp1033\langfenp2052 \sbasedon20 \snext21 \spriority0 List;}{ +\s22\ql \li0\ri0\sb120\sa120\widctlpar\noline\wrapdefault\aspalpha\aspnum\faauto\adjustright\rin0\lin0\itap0 \rtlch\fcs1 \ai\af46\afs24\alang1081 \ltrch\fcs0 \i\fs24\lang1033\langfe2052\loch\f43\hich\af43\dbch\af45\cgrid\langnp1033\langfenp2052 +\sbasedon0 \snext22 \sqformat \spriority0 caption;}{\s23\ql \li0\ri0\widctlpar\noline\wrapdefault\aspalpha\aspnum\faauto\adjustright\rin0\lin0\itap0 \rtlch\fcs1 \af46\afs24\alang1081 \ltrch\fcs0 +\fs24\lang1033\langfe2052\loch\f43\hich\af43\dbch\af45\cgrid\langnp1033\langfenp2052 \sbasedon0 \snext23 \sqformat \spriority0 Index;}{\s24\ql \fi-339\li339\ri0\widctlpar\noline\wrapdefault\aspalpha\aspnum\faauto\adjustright\rin0\lin339\itap0 \rtlch\fcs1 +\af46\afs20\alang1081 \ltrch\fcs0 \fs20\lang1033\langfe2052\loch\f43\hich\af43\dbch\af45\cgrid\langnp1033\langfenp2052 \sbasedon0 \snext24 \spriority0 footnote text;}}{\*\rsidtbl \rsid6097384\rsid16590483\rsid16671749}{\mmathPr\mmathFont34\mbrkBin0 +\mbrkBinSub0\msmallFrac0\mdispDef1\mlMargin0\mrMargin0\mdefJc1\mwrapIndent1440\mintLim0\mnaryLim1}{\info{\title A Text Extraction Test Document for DSpace}{\author Mark Wood}{\operator Tim Donohue}{\creatim\yr2022\mo3\dy30\hr13\min54} +{\revtim\yr2022\mo3\dy30\hr13\min54}{\version2}{\edmins0}{\nofpages1}{\nofwords75}{\nofchars433}{\nofcharsws507}{\vern43}}{\*\xmlnstbl {\xmlns1 http://schemas.microsoft.com/office/word/2003/wordml}} +\paperw12240\paperh15840\margl1134\margr1134\margt1134\margb1134\gutter0\ltrsect +\deftab709\widowctrl\ftnbj\aenddoc\trackmoves0\trackformatting1\donotembedsysfont1\relyonvml0\donotembedlingdata0\grfdocevents0\validatexml1\showplaceholdtext0\ignoremixedcontent0\saveinvalidxml0\showxmlerrors1 +\noxlattoyen\expshrtn\noultrlspc\dntblnsbdb\nospaceforul\formshade\horzdoc\dgmargin\dghspace180\dgvspace180\dghorigin450\dgvorigin0\dghshow1\dgvshow1 +\jexpand\viewkind5\viewscale100\pgbrdrhead\pgbrdrfoot\splytwnine\ftnlytwnine\htmautsp\nolnhtadjtbl\useltbaln\alntblind\lytcalctblwd\lyttblrtgr\lnbrkrule\nobrkwrptbl\snaptogridincell\allowfieldendsel\wrppunct +\asianbrkrule\rsidroot6097384\newtblstyruls\nogrowautofit\usenormstyforlist\noindnmbrts\felnbrelev\nocxsptable\indrlsweleven\noafcnsttbl\afelev\utinl\hwelev\spltpgpar\notcvasp\notbrkcnstfrctbl\notvatxbx\krnprsnet\cachedcolbal \nouicompat \fet0 +{\*\wgrffmtfilter 2450}\nofeaturethrottle1\ilfomacatclnup0{\*\ftnsep \ltrpar \pard\plain \ltrpar\ql \li0\ri0\widctlpar\wrapdefault\aspalpha\aspnum\faauto\adjustright\rin0\lin0\itap0 \rtlch\fcs1 \af46\afs24\alang1081 \ltrch\fcs0 +\fs24\lang1033\langfe2052\loch\af43\hich\af43\dbch\af45\cgrid\langnp1033\langfenp2052 {\rtlch\fcs1 \af46 \ltrch\fcs0 \insrsid16671749 \chftnsep +\par }}{\*\ftnsepc \ltrpar \pard\plain \ltrpar\ql \li0\ri0\widctlpar\wrapdefault\aspalpha\aspnum\faauto\adjustright\rin0\lin0\itap0 \rtlch\fcs1 \af46\afs24\alang1081 \ltrch\fcs0 +\fs24\lang1033\langfe2052\loch\af43\hich\af43\dbch\af45\cgrid\langnp1033\langfenp2052 {\rtlch\fcs1 \af46 \ltrch\fcs0 \insrsid16671749 \chftnsepc +\par }}{\*\aftnsep \ltrpar \pard\plain \ltrpar\ql \li0\ri0\widctlpar\wrapdefault\aspalpha\aspnum\faauto\adjustright\rin0\lin0\itap0 \rtlch\fcs1 \af46\afs24\alang1081 \ltrch\fcs0 +\fs24\lang1033\langfe2052\loch\af43\hich\af43\dbch\af45\cgrid\langnp1033\langfenp2052 {\rtlch\fcs1 \af46 \ltrch\fcs0 \insrsid16671749 \chftnsep +\par }}{\*\aftnsepc \ltrpar \pard\plain \ltrpar\ql \li0\ri0\widctlpar\wrapdefault\aspalpha\aspnum\faauto\adjustright\rin0\lin0\itap0 \rtlch\fcs1 \af46\afs24\alang1081 \ltrch\fcs0 +\fs24\lang1033\langfe2052\loch\af43\hich\af43\dbch\af45\cgrid\langnp1033\langfenp2052 {\rtlch\fcs1 \af46 \ltrch\fcs0 \insrsid16671749 \chftnsepc +\par }}\ltrpar \sectd \ltrsect\linex0\headery0\footery0\endnhere\sectunlocked1\sectdefaultcl\sftnbj {\*\pnseclvl1\pnucrm\pnstart1\pnindent720\pnhang {\pntxta .}}{\*\pnseclvl2\pnucltr\pnstart1\pnindent720\pnhang {\pntxta .}}{\*\pnseclvl3 +\pndec\pnstart1\pnindent720\pnhang {\pntxta .}}{\*\pnseclvl4\pnlcltr\pnstart1\pnindent720\pnhang {\pntxta )}}{\*\pnseclvl5\pndec\pnstart1\pnindent720\pnhang {\pntxtb (}{\pntxta )}}{\*\pnseclvl6\pnlcltr\pnstart1\pnindent720\pnhang {\pntxtb (}{\pntxta )}} +{\*\pnseclvl7\pnlcrm\pnstart1\pnindent720\pnhang {\pntxtb (}{\pntxta )}}{\*\pnseclvl8\pnlcltr\pnstart1\pnindent720\pnhang {\pntxtb (}{\pntxta )}}{\*\pnseclvl9\pnlcrm\pnstart1\pnindent720\pnhang {\pntxtb (}{\pntxta )}}\pard\plain \ltrpar +\qc \li0\ri0\widctlpar\wrapdefault\aspalpha\aspnum\faauto\adjustright\rin0\lin0\itap0 \rtlch\fcs1 \af46\afs24\alang1081 \ltrch\fcs0 \fs24\lang1033\langfe2052\loch\af43\hich\af43\dbch\af45\cgrid\langnp1033\langfenp2052 {\rtlch\fcs1 \af46\afs30 \ltrch\fcs0 +\fs30\insrsid16671749 \hich\af43\dbch\af45\loch\f43 A Text Extraction Test Document}{\rtlch\fcs1 \af46\afs30 \ltrch\fcs0 \fs30\insrsid6097384 +\par }{\rtlch\fcs1 \af46\afs20 \ltrch\fcs0 \fs20\insrsid16671749 \hich\af43\dbch\af45\loch\f43 for}{\rtlch\fcs1 \af46\afs20 \ltrch\fcs0 \fs20\insrsid6097384 +\par }{\rtlch\fcs1 \af46\afs30 \ltrch\fcs0 \fs30\insrsid16671749 \hich\af43\dbch\af45\loch\f43 DSpace}{\rtlch\fcs1 \af46\afs30 \ltrch\fcs0 \fs30\insrsid6097384 +\par +\par }\pard \ltrpar\ql \li0\ri0\widctlpar\wrapdefault\aspalpha\aspnum\faauto\adjustright\rin0\lin0\itap0 {\rtlch\fcs1 \af46 \ltrch\fcs0 \insrsid16671749 \hich\af43\dbch\af45\loch\f43 +This is a text. For the next sixty seconds this software will conduct a test of the DSpace text extraction facility. This is only a text.}{\rtlch\fcs1 \af46 \ltrch\fcs0 \insrsid6097384 +\par +\par }{\rtlch\fcs1 \af46 \ltrch\fcs0 \insrsid16671749 \hich\af43\dbch\af45\loch\f43 This is a paragraph that followed the first that lived in the \hich\af43\dbch\af45\loch\f43 document that Jack built.}{\rtlch\fcs1 \af46 \ltrch\fcs0 \insrsid6097384 +\par +\par }{\rtlch\fcs1 \af46 \ltrch\fcs0 \insrsid16671749 \hich\af43\dbch\af45\loch\f43 Lorem ipsum dolor sit amet. The quick brown fox jumped over the lazy dog. Yow! Are we having fun yet?}{\rtlch\fcs1 \af46 \ltrch\fcs0 \insrsid6097384 +\par +\par }{\rtlch\fcs1 \af46 \ltrch\fcs0 \insrsid16671749 \hich\af43\dbch\af45\loch\f43 This has been a test of the DSpace text extraction system. In the event of actual content you would care what is written he\hich\af43\dbch\af45\loch\f43 re.}{\rtlch\fcs1 +\af46 \ltrch\fcs0 \cs16\super\insrsid16671749 \chftn {\footnote \ltrpar \pard\plain \ltrpar\s24\ql \fi-339\li339\ri0\widctlpar\noline\wrapdefault\aspalpha\aspnum\faauto\adjustright\rin0\lin339\itap0 \rtlch\fcs1 \af46\afs20\alang1081 \ltrch\fcs0 +\fs20\lang1033\langfe2052\loch\af43\hich\af43\dbch\af45\cgrid\langnp1033\langfenp2052 {\rtlch\fcs1 \af46 \ltrch\fcs0 \insrsid16671749 \chftn \tab \hich\af43\dbch\af45\loch\f43 Tip o\hich\f43 \rquote \loch\f43 + the hat to the U.S. Emergency Broadcast System for the format that I have irreverently borrowed.}}}{\rtlch\fcs1 \af46 \ltrch\fcs0 \insrsid6097384 +\par }{\*\themedata 504b030414000600080000002100e9de0fbfff0000001c020000130000005b436f6e74656e745f54797065735d2e786d6cac91cb4ec3301045f748fc83e52d4a +9cb2400825e982c78ec7a27cc0c8992416c9d8b2a755fbf74cd25442a820166c2cd933f79e3be372bd1f07b5c3989ca74aaff2422b24eb1b475da5df374fd9ad +5689811a183c61a50f98f4babebc2837878049899a52a57be670674cb23d8e90721f90a4d2fa3802cb35762680fd800ecd7551dc18eb899138e3c943d7e503b6 +b01d583deee5f99824e290b4ba3f364eac4a430883b3c092d4eca8f946c916422ecab927f52ea42b89a1cd59c254f919b0e85e6535d135a8de20f20b8c12c3b0 +0c895fcf6720192de6bf3b9e89ecdbd6596cbcdd8eb28e7c365ecc4ec1ff1460f53fe813d3cc7f5b7f020000ffff0300504b030414000600080000002100a5d6 +a7e7c0000000360100000b0000005f72656c732f2e72656c73848fcf6ac3300c87ef85bd83d17d51d2c31825762fa590432fa37d00e1287f68221bdb1bebdb4f +c7060abb0884a4eff7a93dfeae8bf9e194e720169aaa06c3e2433fcb68e1763dbf7f82c985a4a725085b787086a37bdbb55fbc50d1a33ccd311ba548b6309512 +0f88d94fbc52ae4264d1c910d24a45db3462247fa791715fd71f989e19e0364cd3f51652d73760ae8fa8c9ffb3c330cc9e4fc17faf2ce545046e37944c69e462 +a1a82fe353bd90a865aad41ed0b5b8f9d6fd010000ffff0300504b0304140006000800000021006b799616830000008a0000001c0000007468656d652f746865 +6d652f7468656d654d616e616765722e786d6c0ccc4d0ac3201040e17da17790d93763bb284562b2cbaebbf600439c1a41c7a0d29fdbd7e5e38337cedf14d59b +4b0d592c9c070d8a65cd2e88b7f07c2ca71ba8da481cc52c6ce1c715e6e97818c9b48d13df49c873517d23d59085adb5dd20d6b52bd521ef2cdd5eb9246a3d8b +4757e8d3f729e245eb2b260a0238fd010000ffff0300504b030414000600080000002100b6f4679893070000c9200000160000007468656d652f7468656d652f +7468656d65312e786d6cec59cd8b1bc915bf07f23f347d97f5d5ad8fc1f2a24fcfda33b6b164873dd648a5eef2547789aad28cc56208de532e81c026e49085bd +ed21842cecc22eb9e48f31d8249b3f22afaa5bdd5552c99e191c3061463074977eefd5afde7bf5de53d5ddcf5e26d4bbc05c1096f6fcfa9d9aefe174ce16248d +7afeb3d9a4d2f13d2151ba4094a5b8e76fb0f03fbbf7eb5fdd454732c609f6403e1547a8e7c752ae8eaa5531876124eeb0154ee1bb25e30992f0caa3ea82a34b +d09bd06aa3566b55134452df4b51026a1f2f97648ebd9952e9dfdb2a1f53784da5500373caa74a35b6243476715e5708b11143cabd0b447b3eccb3609733fc52 +fa1e4542c2173dbfa6fffceabdbb5574940b517940d6909be8bf5c2e17589c37f49c3c3a2b260d823068f50bfd1a40e53e6edc1eb7c6ad429f06a0f91c569a71 +b175b61bc320c71aa0ecd1a17bd41e35eb16ded0dfdce3dc0fd5c7c26b50a63fd8c34f2643b0a285d7a00c1feee1c3417730b2f56b50866fede1dbb5fe28685b +fa3528a6243ddf43d7c25673b85d6d0159327aec8477c360d26ee4ca4b144443115d6a8a254be5a1584bd00bc6270050408a24493db959e1259a43140f112567 +9c7827248a21f056286502866b8ddaa4d684ffea13e827ed5174849121ad780113b137a4f87862cec94af6fc07a0d537206f7ffef9cdeb1fdfbcfee9cd575fbd +79fdf77c6eadca923b466964cafdf2dd1ffef3cd6fbd7ffff0ed2f5fff319b7a172f4cfcbbbffdeedd3ffef93ef5b0e2d2146ffff4fdbb1fbf7ffbe7dfffebaf +5f3bb4f7393a33e1339260e13dc297de5396c0021dfcf119bf9ec42c46c494e8a791402952b338f48f656ca11f6d10450edc00db767cce21d5b880f7d72f2cc2 +d398af2571687c182716f094313a60dc6985876a2ec3ccb3751ab927e76b13f714a10bd7dc43945a5e1eaf579063894be530c616cd2714a5124538c5d253dfb1 +738c1dabfb8210cbaea764ce99604be97d41bc01224e93ccc899154da5d03149c02f1b1741f0b7659bd3e7de8051d7aa47f8c246c2de40d4417e86a965c6fb68 +2d51e252394309350d7e8264ec2239ddf0b9891b0b099e8e3065de78818570c93ce6b05ec3e90f21cdb8dd7e4a37898de4929cbb749e20c64ce4889d0f6394ac +5cd829496313fbb938871045de13265df05366ef10f50e7e40e941773f27d872f787b3c133c8b026a53240d4376beef0e57dccacf89d6ee8126157aae9f3c44a +b17d4e9cd131584756689f604cd1255a60ec3dfbdcc160c05696cd4bd20f62c82ac7d815580f901dabea3dc5027a25d5dcece7c91322ac909de2881de073bad9 +493c1b9426881fd2fc08bc6eda7c0ca52e7105c0633a3f37818f08f480102f4ea33c16a0c308ee835a9fc4c82a60ea5db8e375c32dff5d658fc1be7c61d1b8c2 +be04197c6d1948eca6cc7b6d3343d49aa00c9819822ec3956e41c4727f29a28aab165b3be596f6a62ddd00dd91d5f42424fd6007b4d3fb84ffbbde073a8cb77f +f9c6b10f3e4ebfe3566c25ab6b763a8792c9f14e7f7308b7dbd50c195f904fbfa919a175fa04431dd9cf58b73dcd6d4fe3ffdff73487f6f36d2773a8dfb8ed64 +7ce8306e3b99fc70e5e3743265f3027d8d3af0c80e7af4b14f72f0d46749289dca0dc527421ffc08f83db398c0a092d3279eb838055cc5f0a8ca1c4c60e1228e +b48cc799fc0d91f134462b381daafb4a492472d591f0564cc0a1911e76ea5678ba4e4ed9223becacd7d5c16656590592e5782d2cc6e1a04a66e856bb3cc02bd4 +6bb6913e68dd1250b2d721614c6693683a48b4b783ca48fa58178ce620a157f65158741d2c3a4afdd6557b2c805ae115f8c1edc1cff49e1f06200242701e07cd +f942f92973f5d6bbda991fd3d3878c69450034d8db08283ddd555c0f2e4fad2e0bb52b78da2261849b4d425b46377822869fc17974aad1abd0b8aeafbba54b2d +7aca147a3e08ad9246bbf33e1637f535c8ede6069a9a9982a6de65cf6f35430899395af5fc251c1ac363b282d811ea3717a211dcbccc25cf36fc4d32cb8a0b39 +4222ce0cae934e960d122231f728497abe5a7ee1069aea1ca2b9d51b90103e59725d482b9f1a3970baed64bc5ce2b934dd6e8c284b67af90e1b35ce1fc568bdf +1cac24d91adc3d8d1797de195df3a708422c6cd795011744c0dd413db3e682c0655891c8caf8db294c79da356fa3740c65e388ae62945714339967709dca0b3a +faadb081f196af190c6a98242f8467912ab0a651ad6a5a548d8cc3c1aafb6121653923699635d3ca2aaa6abab39835c3b60cecd8f26645de60b53531e434b3c2 +67a97b37e576b7b96ea74f28aa0418bcb09fa3ea5ea12018d4cac92c6a8af17e1a56393b1fb56bc776811fa07695226164fdd656ed8edd8a1ae19c0e066f54f9 +416e376a6168b9ed2bb5a5f5adb979b1cdce5e40f2184197bba6526857c2c92e47d0104d754f92a50dd8222f65be35e0c95b73d2f3bfac85fd60d80887955a27 +1c57826650ab74c27eb3d20fc3667d1cd66ba341e31514161927f530bbb19fc00506dde4f7f67a7cefee3ed9ded1dc99b3a4caf4dd7c5513d777f7f5c6e1bb7b +8f40d2f9b2d598749bdd41abd26df627956034e854bac3d6a0326a0ddba3c9681876ba9357be77a1c141bf390c5ae34ea5551f0e2b41aba6e877ba9576d068f4 +8376bf330efaaff23606569ea58fdc16605ecdebde7f010000ffff0300504b0304140006000800000021000dd1909fb60000001b010000270000007468656d65 +2f7468656d652f5f72656c732f7468656d654d616e616765722e786d6c2e72656c73848f4d0ac2301484f78277086f6fd3ba109126dd88d0add40384e4350d36 +3f2451eced0dae2c082e8761be9969bb979dc9136332de3168aa1a083ae995719ac16db8ec8e4052164e89d93b64b060828e6f37ed1567914b284d262452282e +3198720e274a939cd08a54f980ae38a38f56e422a3a641c8bbd048f7757da0f19b017cc524bd62107bd5001996509affb3fd381a89672f1f165dfe514173d985 +0528a2c6cce0239baa4c04ca5bbabac4df000000ffff0300504b01022d0014000600080000002100e9de0fbfff0000001c020000130000000000000000000000 +0000000000005b436f6e74656e745f54797065735d2e786d6c504b01022d0014000600080000002100a5d6a7e7c0000000360100000b00000000000000000000 +000000300100005f72656c732f2e72656c73504b01022d00140006000800000021006b799616830000008a0000001c0000000000000000000000000019020000 +7468656d652f7468656d652f7468656d654d616e616765722e786d6c504b01022d0014000600080000002100b6f4679893070000c92000001600000000000000 +000000000000d60200007468656d652f7468656d652f7468656d65312e786d6c504b01022d00140006000800000021000dd1909fb60000001b01000027000000 +000000000000000000009d0a00007468656d652f7468656d652f5f72656c732f7468656d654d616e616765722e786d6c2e72656c73504b050600000000050005005d010000980b00000000} +{\*\colorschememapping 3c3f786d6c2076657273696f6e3d22312e302220656e636f64696e673d225554462d3822207374616e64616c6f6e653d22796573223f3e0d0a3c613a636c724d +617020786d6c6e733a613d22687474703a2f2f736368656d61732e6f70656e786d6c666f726d6174732e6f72672f64726177696e676d6c2f323030362f6d6169 +6e22206267313d226c743122207478313d22646b3122206267323d226c743222207478323d22646b322220616363656e74313d22616363656e74312220616363 +656e74323d22616363656e74322220616363656e74333d22616363656e74332220616363656e74343d22616363656e74342220616363656e74353d22616363656e74352220616363656e74363d22616363656e74362220686c696e6b3d22686c696e6b2220666f6c486c696e6b3d22666f6c486c696e6b222f3e} +{\*\latentstyles\lsdstimax376\lsdlockeddef0\lsdsemihiddendef0\lsdunhideuseddef0\lsdqformatdef0\lsdprioritydef99{\lsdlockedexcept \lsdqformat1 \lsdpriority0 \lsdlocked0 Normal;\lsdqformat1 \lsdpriority9 \lsdlocked0 heading 1; +\lsdsemihidden1 \lsdunhideused1 \lsdqformat1 \lsdpriority9 \lsdlocked0 heading 2;\lsdsemihidden1 \lsdunhideused1 \lsdqformat1 \lsdpriority9 \lsdlocked0 heading 3;\lsdsemihidden1 \lsdunhideused1 \lsdqformat1 \lsdpriority9 \lsdlocked0 heading 4; +\lsdsemihidden1 \lsdunhideused1 \lsdqformat1 \lsdpriority9 \lsdlocked0 heading 5;\lsdsemihidden1 \lsdunhideused1 \lsdqformat1 \lsdpriority9 \lsdlocked0 heading 6;\lsdsemihidden1 \lsdunhideused1 \lsdqformat1 \lsdpriority9 \lsdlocked0 heading 7; +\lsdsemihidden1 \lsdunhideused1 \lsdqformat1 \lsdpriority9 \lsdlocked0 heading 8;\lsdsemihidden1 \lsdunhideused1 \lsdqformat1 \lsdpriority9 \lsdlocked0 heading 9;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 index 1; +\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 index 2;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 index 3;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 index 4;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 index 5; +\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 index 6;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 index 7;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 index 8;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 index 9; +\lsdsemihidden1 \lsdunhideused1 \lsdpriority39 \lsdlocked0 toc 1;\lsdsemihidden1 \lsdunhideused1 \lsdpriority39 \lsdlocked0 toc 2;\lsdsemihidden1 \lsdunhideused1 \lsdpriority39 \lsdlocked0 toc 3; +\lsdsemihidden1 \lsdunhideused1 \lsdpriority39 \lsdlocked0 toc 4;\lsdsemihidden1 \lsdunhideused1 \lsdpriority39 \lsdlocked0 toc 5;\lsdsemihidden1 \lsdunhideused1 \lsdpriority39 \lsdlocked0 toc 6; +\lsdsemihidden1 \lsdunhideused1 \lsdpriority39 \lsdlocked0 toc 7;\lsdsemihidden1 \lsdunhideused1 \lsdpriority39 \lsdlocked0 toc 8;\lsdsemihidden1 \lsdunhideused1 \lsdpriority39 \lsdlocked0 toc 9;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 Normal Indent; +\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 footnote text;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 annotation text;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 header;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 footer; +\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 index heading;\lsdsemihidden1 \lsdunhideused1 \lsdqformat1 \lsdpriority35 \lsdlocked0 caption;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 table of figures; +\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 envelope address;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 envelope return;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 footnote reference;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 annotation reference; +\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 line number;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 page number;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 endnote reference;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 endnote text; +\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 table of authorities;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 macro;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 toa heading;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 List; +\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 List Bullet;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 List Number;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 List 2;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 List 3; +\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 List 4;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 List 5;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 List Bullet 2;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 List Bullet 3; +\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 List Bullet 4;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 List Bullet 5;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 List Number 2;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 List Number 3; +\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 List Number 4;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 List Number 5;\lsdqformat1 \lsdpriority10 \lsdlocked0 Title;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 Closing; +\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 Signature;\lsdsemihidden1 \lsdunhideused1 \lsdpriority1 \lsdlocked0 Default Paragraph Font;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 Body Text;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 Body Text Indent; +\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 List Continue;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 List Continue 2;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 List Continue 3;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 List Continue 4; +\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 List Continue 5;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 Message Header;\lsdqformat1 \lsdpriority11 \lsdlocked0 Subtitle;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 Salutation; +\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 Date;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 Body Text First Indent;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 Body Text First Indent 2;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 Note Heading; +\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 Body Text 2;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 Body Text 3;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 Body Text Indent 2;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 Body Text Indent 3; +\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 Block Text;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 Hyperlink;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 FollowedHyperlink;\lsdqformat1 \lsdpriority22 \lsdlocked0 Strong; +\lsdqformat1 \lsdpriority20 \lsdlocked0 Emphasis;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 Document Map;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 Plain Text;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 E-mail Signature; +\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 HTML Top of Form;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 HTML Bottom of Form;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 Normal (Web);\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 HTML Acronym; +\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 HTML Address;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 HTML Cite;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 HTML Code;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 HTML Definition; +\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 HTML Keyboard;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 HTML Preformatted;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 HTML Sample;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 HTML Typewriter; +\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 HTML Variable;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 Normal Table;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 annotation subject;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 No List; +\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 Outline List 1;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 Outline List 2;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 Outline List 3;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 Table Simple 1; +\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 Table Simple 2;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 Table Simple 3;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 Table Classic 1;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 Table Classic 2; +\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 Table Classic 3;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 Table Classic 4;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 Table Colorful 1;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 Table Colorful 2; +\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 Table Colorful 3;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 Table Columns 1;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 Table Columns 2;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 Table Columns 3; +\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 Table Columns 4;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 Table Columns 5;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 Table Grid 1;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 Table Grid 2; +\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 Table Grid 3;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 Table Grid 4;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 Table Grid 5;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 Table Grid 6; +\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 Table Grid 7;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 Table Grid 8;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 Table List 1;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 Table List 2; +\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 Table List 3;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 Table List 4;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 Table List 5;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 Table List 6; +\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 Table List 7;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 Table List 8;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 Table 3D effects 1;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 Table 3D effects 2; +\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 Table 3D effects 3;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 Table Contemporary;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 Table Elegant;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 Table Professional; +\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 Table Subtle 1;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 Table Subtle 2;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 Table Web 1;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 Table Web 2; +\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 Table Web 3;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 Balloon Text;\lsdpriority39 \lsdlocked0 Table Grid;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 Table Theme;\lsdsemihidden1 \lsdlocked0 Placeholder Text; +\lsdqformat1 \lsdpriority1 \lsdlocked0 No Spacing;\lsdpriority60 \lsdlocked0 Light Shading;\lsdpriority61 \lsdlocked0 Light List;\lsdpriority62 \lsdlocked0 Light Grid;\lsdpriority63 \lsdlocked0 Medium Shading 1;\lsdpriority64 \lsdlocked0 Medium Shading 2; +\lsdpriority65 \lsdlocked0 Medium List 1;\lsdpriority66 \lsdlocked0 Medium List 2;\lsdpriority67 \lsdlocked0 Medium Grid 1;\lsdpriority68 \lsdlocked0 Medium Grid 2;\lsdpriority69 \lsdlocked0 Medium Grid 3;\lsdpriority70 \lsdlocked0 Dark List; +\lsdpriority71 \lsdlocked0 Colorful Shading;\lsdpriority72 \lsdlocked0 Colorful List;\lsdpriority73 \lsdlocked0 Colorful Grid;\lsdpriority60 \lsdlocked0 Light Shading Accent 1;\lsdpriority61 \lsdlocked0 Light List Accent 1; +\lsdpriority62 \lsdlocked0 Light Grid Accent 1;\lsdpriority63 \lsdlocked0 Medium Shading 1 Accent 1;\lsdpriority64 \lsdlocked0 Medium Shading 2 Accent 1;\lsdpriority65 \lsdlocked0 Medium List 1 Accent 1;\lsdsemihidden1 \lsdlocked0 Revision; +\lsdqformat1 \lsdpriority34 \lsdlocked0 List Paragraph;\lsdqformat1 \lsdpriority29 \lsdlocked0 Quote;\lsdqformat1 \lsdpriority30 \lsdlocked0 Intense Quote;\lsdpriority66 \lsdlocked0 Medium List 2 Accent 1;\lsdpriority67 \lsdlocked0 Medium Grid 1 Accent 1; +\lsdpriority68 \lsdlocked0 Medium Grid 2 Accent 1;\lsdpriority69 \lsdlocked0 Medium Grid 3 Accent 1;\lsdpriority70 \lsdlocked0 Dark List Accent 1;\lsdpriority71 \lsdlocked0 Colorful Shading Accent 1;\lsdpriority72 \lsdlocked0 Colorful List Accent 1; +\lsdpriority73 \lsdlocked0 Colorful Grid Accent 1;\lsdpriority60 \lsdlocked0 Light Shading Accent 2;\lsdpriority61 \lsdlocked0 Light List Accent 2;\lsdpriority62 \lsdlocked0 Light Grid Accent 2;\lsdpriority63 \lsdlocked0 Medium Shading 1 Accent 2; +\lsdpriority64 \lsdlocked0 Medium Shading 2 Accent 2;\lsdpriority65 \lsdlocked0 Medium List 1 Accent 2;\lsdpriority66 \lsdlocked0 Medium List 2 Accent 2;\lsdpriority67 \lsdlocked0 Medium Grid 1 Accent 2;\lsdpriority68 \lsdlocked0 Medium Grid 2 Accent 2; +\lsdpriority69 \lsdlocked0 Medium Grid 3 Accent 2;\lsdpriority70 \lsdlocked0 Dark List Accent 2;\lsdpriority71 \lsdlocked0 Colorful Shading Accent 2;\lsdpriority72 \lsdlocked0 Colorful List Accent 2;\lsdpriority73 \lsdlocked0 Colorful Grid Accent 2; +\lsdpriority60 \lsdlocked0 Light Shading Accent 3;\lsdpriority61 \lsdlocked0 Light List Accent 3;\lsdpriority62 \lsdlocked0 Light Grid Accent 3;\lsdpriority63 \lsdlocked0 Medium Shading 1 Accent 3;\lsdpriority64 \lsdlocked0 Medium Shading 2 Accent 3; +\lsdpriority65 \lsdlocked0 Medium List 1 Accent 3;\lsdpriority66 \lsdlocked0 Medium List 2 Accent 3;\lsdpriority67 \lsdlocked0 Medium Grid 1 Accent 3;\lsdpriority68 \lsdlocked0 Medium Grid 2 Accent 3;\lsdpriority69 \lsdlocked0 Medium Grid 3 Accent 3; +\lsdpriority70 \lsdlocked0 Dark List Accent 3;\lsdpriority71 \lsdlocked0 Colorful Shading Accent 3;\lsdpriority72 \lsdlocked0 Colorful List Accent 3;\lsdpriority73 \lsdlocked0 Colorful Grid Accent 3;\lsdpriority60 \lsdlocked0 Light Shading Accent 4; +\lsdpriority61 \lsdlocked0 Light List Accent 4;\lsdpriority62 \lsdlocked0 Light Grid Accent 4;\lsdpriority63 \lsdlocked0 Medium Shading 1 Accent 4;\lsdpriority64 \lsdlocked0 Medium Shading 2 Accent 4;\lsdpriority65 \lsdlocked0 Medium List 1 Accent 4; +\lsdpriority66 \lsdlocked0 Medium List 2 Accent 4;\lsdpriority67 \lsdlocked0 Medium Grid 1 Accent 4;\lsdpriority68 \lsdlocked0 Medium Grid 2 Accent 4;\lsdpriority69 \lsdlocked0 Medium Grid 3 Accent 4;\lsdpriority70 \lsdlocked0 Dark List Accent 4; +\lsdpriority71 \lsdlocked0 Colorful Shading Accent 4;\lsdpriority72 \lsdlocked0 Colorful List Accent 4;\lsdpriority73 \lsdlocked0 Colorful Grid Accent 4;\lsdpriority60 \lsdlocked0 Light Shading Accent 5;\lsdpriority61 \lsdlocked0 Light List Accent 5; +\lsdpriority62 \lsdlocked0 Light Grid Accent 5;\lsdpriority63 \lsdlocked0 Medium Shading 1 Accent 5;\lsdpriority64 \lsdlocked0 Medium Shading 2 Accent 5;\lsdpriority65 \lsdlocked0 Medium List 1 Accent 5;\lsdpriority66 \lsdlocked0 Medium List 2 Accent 5; +\lsdpriority67 \lsdlocked0 Medium Grid 1 Accent 5;\lsdpriority68 \lsdlocked0 Medium Grid 2 Accent 5;\lsdpriority69 \lsdlocked0 Medium Grid 3 Accent 5;\lsdpriority70 \lsdlocked0 Dark List Accent 5;\lsdpriority71 \lsdlocked0 Colorful Shading Accent 5; +\lsdpriority72 \lsdlocked0 Colorful List Accent 5;\lsdpriority73 \lsdlocked0 Colorful Grid Accent 5;\lsdpriority60 \lsdlocked0 Light Shading Accent 6;\lsdpriority61 \lsdlocked0 Light List Accent 6;\lsdpriority62 \lsdlocked0 Light Grid Accent 6; +\lsdpriority63 \lsdlocked0 Medium Shading 1 Accent 6;\lsdpriority64 \lsdlocked0 Medium Shading 2 Accent 6;\lsdpriority65 \lsdlocked0 Medium List 1 Accent 6;\lsdpriority66 \lsdlocked0 Medium List 2 Accent 6; +\lsdpriority67 \lsdlocked0 Medium Grid 1 Accent 6;\lsdpriority68 \lsdlocked0 Medium Grid 2 Accent 6;\lsdpriority69 \lsdlocked0 Medium Grid 3 Accent 6;\lsdpriority70 \lsdlocked0 Dark List Accent 6;\lsdpriority71 \lsdlocked0 Colorful Shading Accent 6; +\lsdpriority72 \lsdlocked0 Colorful List Accent 6;\lsdpriority73 \lsdlocked0 Colorful Grid Accent 6;\lsdqformat1 \lsdpriority19 \lsdlocked0 Subtle Emphasis;\lsdqformat1 \lsdpriority21 \lsdlocked0 Intense Emphasis; +\lsdqformat1 \lsdpriority31 \lsdlocked0 Subtle Reference;\lsdqformat1 \lsdpriority32 \lsdlocked0 Intense Reference;\lsdqformat1 \lsdpriority33 \lsdlocked0 Book Title;\lsdsemihidden1 \lsdunhideused1 \lsdpriority37 \lsdlocked0 Bibliography; +\lsdsemihidden1 \lsdunhideused1 \lsdqformat1 \lsdpriority39 \lsdlocked0 TOC Heading;\lsdpriority41 \lsdlocked0 Plain Table 1;\lsdpriority42 \lsdlocked0 Plain Table 2;\lsdpriority43 \lsdlocked0 Plain Table 3;\lsdpriority44 \lsdlocked0 Plain Table 4; +\lsdpriority45 \lsdlocked0 Plain Table 5;\lsdpriority40 \lsdlocked0 Grid Table Light;\lsdpriority46 \lsdlocked0 Grid Table 1 Light;\lsdpriority47 \lsdlocked0 Grid Table 2;\lsdpriority48 \lsdlocked0 Grid Table 3;\lsdpriority49 \lsdlocked0 Grid Table 4; +\lsdpriority50 \lsdlocked0 Grid Table 5 Dark;\lsdpriority51 \lsdlocked0 Grid Table 6 Colorful;\lsdpriority52 \lsdlocked0 Grid Table 7 Colorful;\lsdpriority46 \lsdlocked0 Grid Table 1 Light Accent 1;\lsdpriority47 \lsdlocked0 Grid Table 2 Accent 1; +\lsdpriority48 \lsdlocked0 Grid Table 3 Accent 1;\lsdpriority49 \lsdlocked0 Grid Table 4 Accent 1;\lsdpriority50 \lsdlocked0 Grid Table 5 Dark Accent 1;\lsdpriority51 \lsdlocked0 Grid Table 6 Colorful Accent 1; +\lsdpriority52 \lsdlocked0 Grid Table 7 Colorful Accent 1;\lsdpriority46 \lsdlocked0 Grid Table 1 Light Accent 2;\lsdpriority47 \lsdlocked0 Grid Table 2 Accent 2;\lsdpriority48 \lsdlocked0 Grid Table 3 Accent 2; +\lsdpriority49 \lsdlocked0 Grid Table 4 Accent 2;\lsdpriority50 \lsdlocked0 Grid Table 5 Dark Accent 2;\lsdpriority51 \lsdlocked0 Grid Table 6 Colorful Accent 2;\lsdpriority52 \lsdlocked0 Grid Table 7 Colorful Accent 2; +\lsdpriority46 \lsdlocked0 Grid Table 1 Light Accent 3;\lsdpriority47 \lsdlocked0 Grid Table 2 Accent 3;\lsdpriority48 \lsdlocked0 Grid Table 3 Accent 3;\lsdpriority49 \lsdlocked0 Grid Table 4 Accent 3; +\lsdpriority50 \lsdlocked0 Grid Table 5 Dark Accent 3;\lsdpriority51 \lsdlocked0 Grid Table 6 Colorful Accent 3;\lsdpriority52 \lsdlocked0 Grid Table 7 Colorful Accent 3;\lsdpriority46 \lsdlocked0 Grid Table 1 Light Accent 4; +\lsdpriority47 \lsdlocked0 Grid Table 2 Accent 4;\lsdpriority48 \lsdlocked0 Grid Table 3 Accent 4;\lsdpriority49 \lsdlocked0 Grid Table 4 Accent 4;\lsdpriority50 \lsdlocked0 Grid Table 5 Dark Accent 4; +\lsdpriority51 \lsdlocked0 Grid Table 6 Colorful Accent 4;\lsdpriority52 \lsdlocked0 Grid Table 7 Colorful Accent 4;\lsdpriority46 \lsdlocked0 Grid Table 1 Light Accent 5;\lsdpriority47 \lsdlocked0 Grid Table 2 Accent 5; +\lsdpriority48 \lsdlocked0 Grid Table 3 Accent 5;\lsdpriority49 \lsdlocked0 Grid Table 4 Accent 5;\lsdpriority50 \lsdlocked0 Grid Table 5 Dark Accent 5;\lsdpriority51 \lsdlocked0 Grid Table 6 Colorful Accent 5; +\lsdpriority52 \lsdlocked0 Grid Table 7 Colorful Accent 5;\lsdpriority46 \lsdlocked0 Grid Table 1 Light Accent 6;\lsdpriority47 \lsdlocked0 Grid Table 2 Accent 6;\lsdpriority48 \lsdlocked0 Grid Table 3 Accent 6; +\lsdpriority49 \lsdlocked0 Grid Table 4 Accent 6;\lsdpriority50 \lsdlocked0 Grid Table 5 Dark Accent 6;\lsdpriority51 \lsdlocked0 Grid Table 6 Colorful Accent 6;\lsdpriority52 \lsdlocked0 Grid Table 7 Colorful Accent 6; +\lsdpriority46 \lsdlocked0 List Table 1 Light;\lsdpriority47 \lsdlocked0 List Table 2;\lsdpriority48 \lsdlocked0 List Table 3;\lsdpriority49 \lsdlocked0 List Table 4;\lsdpriority50 \lsdlocked0 List Table 5 Dark; +\lsdpriority51 \lsdlocked0 List Table 6 Colorful;\lsdpriority52 \lsdlocked0 List Table 7 Colorful;\lsdpriority46 \lsdlocked0 List Table 1 Light Accent 1;\lsdpriority47 \lsdlocked0 List Table 2 Accent 1;\lsdpriority48 \lsdlocked0 List Table 3 Accent 1; +\lsdpriority49 \lsdlocked0 List Table 4 Accent 1;\lsdpriority50 \lsdlocked0 List Table 5 Dark Accent 1;\lsdpriority51 \lsdlocked0 List Table 6 Colorful Accent 1;\lsdpriority52 \lsdlocked0 List Table 7 Colorful Accent 1; +\lsdpriority46 \lsdlocked0 List Table 1 Light Accent 2;\lsdpriority47 \lsdlocked0 List Table 2 Accent 2;\lsdpriority48 \lsdlocked0 List Table 3 Accent 2;\lsdpriority49 \lsdlocked0 List Table 4 Accent 2; +\lsdpriority50 \lsdlocked0 List Table 5 Dark Accent 2;\lsdpriority51 \lsdlocked0 List Table 6 Colorful Accent 2;\lsdpriority52 \lsdlocked0 List Table 7 Colorful Accent 2;\lsdpriority46 \lsdlocked0 List Table 1 Light Accent 3; +\lsdpriority47 \lsdlocked0 List Table 2 Accent 3;\lsdpriority48 \lsdlocked0 List Table 3 Accent 3;\lsdpriority49 \lsdlocked0 List Table 4 Accent 3;\lsdpriority50 \lsdlocked0 List Table 5 Dark Accent 3; +\lsdpriority51 \lsdlocked0 List Table 6 Colorful Accent 3;\lsdpriority52 \lsdlocked0 List Table 7 Colorful Accent 3;\lsdpriority46 \lsdlocked0 List Table 1 Light Accent 4;\lsdpriority47 \lsdlocked0 List Table 2 Accent 4; +\lsdpriority48 \lsdlocked0 List Table 3 Accent 4;\lsdpriority49 \lsdlocked0 List Table 4 Accent 4;\lsdpriority50 \lsdlocked0 List Table 5 Dark Accent 4;\lsdpriority51 \lsdlocked0 List Table 6 Colorful Accent 4; +\lsdpriority52 \lsdlocked0 List Table 7 Colorful Accent 4;\lsdpriority46 \lsdlocked0 List Table 1 Light Accent 5;\lsdpriority47 \lsdlocked0 List Table 2 Accent 5;\lsdpriority48 \lsdlocked0 List Table 3 Accent 5; +\lsdpriority49 \lsdlocked0 List Table 4 Accent 5;\lsdpriority50 \lsdlocked0 List Table 5 Dark Accent 5;\lsdpriority51 \lsdlocked0 List Table 6 Colorful Accent 5;\lsdpriority52 \lsdlocked0 List Table 7 Colorful Accent 5; +\lsdpriority46 \lsdlocked0 List Table 1 Light Accent 6;\lsdpriority47 \lsdlocked0 List Table 2 Accent 6;\lsdpriority48 \lsdlocked0 List Table 3 Accent 6;\lsdpriority49 \lsdlocked0 List Table 4 Accent 6; +\lsdpriority50 \lsdlocked0 List Table 5 Dark Accent 6;\lsdpriority51 \lsdlocked0 List Table 6 Colorful Accent 6;\lsdpriority52 \lsdlocked0 List Table 7 Colorful Accent 6;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 Mention; +\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 Smart Hyperlink;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 Hashtag;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 Unresolved Mention;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 Smart Link;}}{\*\datastore 01050000 +02000000180000004d73786d6c322e534158584d4c5265616465722e362e3000000000000000000000060000 +d0cf11e0a1b11ae1000000000000000000000000000000003e000300feff090006000000000000000000000001000000010000000000000000100000feffffff00000000feffffff0000000000000000ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff +ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff +ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff +ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff +fffffffffffffffffdfffffffeffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff +ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff +ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff +ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff +ffffffffffffffffffffffffffffffff52006f006f007400200045006e00740072007900000000000000000000000000000000000000000000000000000000000000000000000000000000000000000016000500ffffffffffffffffffffffff0c6ad98892f1d411a65f0040963251e5000000000000000000000000d0af +77916744d801feffffff00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000ffffffffffffffffffffffff00000000000000000000000000000000000000000000000000000000 +00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000ffffffffffffffffffffffff0000000000000000000000000000000000000000000000000000 +000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000ffffffffffffffffffffffff000000000000000000000000000000000000000000000000 +0000000000000000000000000000000000000000000000000105000000000000}} \ No newline at end of file diff --git a/dspace-api/src/test/resources/org/dspace/app/mediafilter/test.txt b/dspace-api/src/test/resources/org/dspace/app/mediafilter/test.txt new file mode 100644 index 0000000000..edd9160b1d --- /dev/null +++ b/dspace-api/src/test/resources/org/dspace/app/mediafilter/test.txt @@ -0,0 +1,13 @@ +A Text Extraction Test Document +for +DSpace + +This is a text. For the next sixty seconds this software will conduct a test of the DSpace text extraction facility. This is only a text. + +This is a paragraph that followed the first that lived in the document that Jack built. + +Lorem ipsum dolor sit amet. The quick brown fox jumped over the lazy dog. Yow! Are we having fun yet? + +This has been a test of the DSpace text extraction system. In the event of actual content you would care what is written here. + +Tip o’ the hat to the U.S. Emergency Broadcast System for the format that I have irreverently borrowed. \ No newline at end of file diff --git a/dspace-api/src/test/resources/org/dspace/app/mediafilter/test.xls b/dspace-api/src/test/resources/org/dspace/app/mediafilter/test.xls new file mode 100644 index 0000000000..1ebc20bc38 Binary files /dev/null and b/dspace-api/src/test/resources/org/dspace/app/mediafilter/test.xls differ diff --git a/dspace-api/src/test/resources/org/dspace/app/mediafilter/test.xlsx b/dspace-api/src/test/resources/org/dspace/app/mediafilter/test.xlsx new file mode 100644 index 0000000000..47e0f7387f Binary files /dev/null and b/dspace-api/src/test/resources/org/dspace/app/mediafilter/test.xlsx differ diff --git a/dspace-api/src/test/resources/org/dspace/app/orcid-works/work-277871.xml b/dspace-api/src/test/resources/org/dspace/app/orcid-works/work-277871.xml new file mode 100644 index 0000000000..f5fd30fa13 --- /dev/null +++ b/dspace-api/src/test/resources/org/dspace/app/orcid-works/work-277871.xml @@ -0,0 +1,31 @@ + + + 2014-01-22T19:11:57.151Z + 2015-06-19T19:14:25.924Z + + + https://sandbox.orcid.org/0000-0002-4105-0763 + 0000-0002-4105-0763 + sandbox.orcid.org + + + https://sandbox.orcid.org/client/0000-0002-4105-0763 + 0000-0002-4105-0763 + sandbox.orcid.org + + BU Profiles to ORCID Integration Site + + + Branch artery occlusion in a young woman. + + + formatted-unspecified + Gittinger JW, Miller NR, Keltner JL, Burde RM. Branch artery occlusion in a young woman. Surv Ophthalmol. 1985 Jul-Aug; 30(1):52-8. + + journal-article + + 1985 + 07 + 01 + + \ No newline at end of file diff --git a/dspace-api/src/test/resources/org/dspace/app/orcid-works/work-277902.xml b/dspace-api/src/test/resources/org/dspace/app/orcid-works/work-277902.xml new file mode 100644 index 0000000000..aeab728543 --- /dev/null +++ b/dspace-api/src/test/resources/org/dspace/app/orcid-works/work-277902.xml @@ -0,0 +1,54 @@ + + + 2014-01-22T19:11:57.159Z + 2015-06-19T19:14:26.327Z + + + https://sandbox.orcid.org/0000-0002-4105-0763 + 0000-0002-4105-0763 + sandbox.orcid.org + + + https://sandbox.orcid.org/client/0000-0002-4105-0763 + 0000-0002-4105-0763 + sandbox.orcid.org + + BU Profiles to ORCID Integration Site + + + Another cautionary tale. + + Journal title + Short description + journal-article + + 2011 + 05 + 01 + + + + Walter White + walter@test.com + + first + author + + + + John White + john@test.com + + additional + author + + + + Jesse Pinkman + + first + editor + + + + \ No newline at end of file diff --git a/dspace-api/src/test/resources/org/dspace/app/orcid-works/work-277904.xml b/dspace-api/src/test/resources/org/dspace/app/orcid-works/work-277904.xml new file mode 100644 index 0000000000..980daa490e --- /dev/null +++ b/dspace-api/src/test/resources/org/dspace/app/orcid-works/work-277904.xml @@ -0,0 +1,62 @@ + + + 2014-01-22T19:11:57.160Z + 2015-06-19T19:14:26.350Z + + + https://sandbox.orcid.org/0000-0002-4105-0763 + 0000-0002-4105-0763 + sandbox.orcid.org + + + https://sandbox.orcid.org/client/0000-0002-4105-0763 + 0000-0002-4105-0763 + sandbox.orcid.org + + BU Profiles to ORCID Integration Site + + + The elements of style and the survey of ophthalmology. + + + bibtex + @article{Test, + doi = {10.11234.12}, + year = 2011, + month = {nov}, + publisher = {Elsevier {BV}}, + volume = {110}, + pages = {71--83}, + author = {Walter White}, + title = {Title from Bibtex: The elements of style and the survey of ophthalmology.}, + journal = {Test Journal} + } + + + invention + + + agr + work:external-identifier-id + http://orcid.org + version-of + + + doi + 10.11234.12 + http://orcid.org + self + + + + + Walter White + walter@test.com + + first + author + + + + it + \ No newline at end of file diff --git a/dspace-api/src/test/resources/org/dspace/app/orcid-works/workBulk-277904-277902-277871.xml b/dspace-api/src/test/resources/org/dspace/app/orcid-works/workBulk-277904-277902-277871.xml new file mode 100644 index 0000000000..97d39dcf41 --- /dev/null +++ b/dspace-api/src/test/resources/org/dspace/app/orcid-works/workBulk-277904-277902-277871.xml @@ -0,0 +1,147 @@ + + + + 2014-01-22T19:11:57.160Z + 2015-06-19T19:14:26.350Z + + + https://sandbox.orcid.org/0000-0002-4105-0763 + 0000-0002-4105-0763 + sandbox.orcid.org + + + https://sandbox.orcid.org/client/0000-0002-4105-0763 + 0000-0002-4105-0763 + sandbox.orcid.org + + BU Profiles to ORCID Integration Site + + + The elements of style and the survey of ophthalmology. + + + bibtex + @article{Test, + doi = {10.11234.12}, + year = 2011, + month = {nov}, + publisher = {Elsevier {BV}}, + volume = {110}, + pages = {71--83}, + author = {Walter White}, + title = {Title from Bibtex: The elements of style and the survey of ophthalmology.}, + journal = {Test Journal} + } + + + invention + + + agr + work:external-identifier-id + http://orcid.org + version-of + + + doi + 10.11234.12 + http://orcid.org + self + + + + + Walter White + walter@test.com + + first + author + + + + it + + + 2014-01-22T19:11:57.159Z + 2015-06-19T19:14:26.327Z + + + https://sandbox.orcid.org/0000-0002-4105-0763 + 0000-0002-4105-0763 + sandbox.orcid.org + + + https://sandbox.orcid.org/client/0000-0002-4105-0763 + 0000-0002-4105-0763 + sandbox.orcid.org + + BU Profiles to ORCID Integration Site + + + Another cautionary tale. + + Journal title + Short description + journal-article + + 2011 + 05 + 01 + + + + Walter White + walter@test.com + + first + author + + + + John White + john@test.com + + additional + author + + + + Jesse Pinkman + + first + editor + + + + + + 2014-01-22T19:11:57.151Z + 2015-06-19T19:14:25.924Z + + + https://sandbox.orcid.org/0000-0002-4105-0763 + 0000-0002-4105-0763 + sandbox.orcid.org + + + https://sandbox.orcid.org/client/0000-0002-4105-0763 + 0000-0002-4105-0763 + sandbox.orcid.org + + BU Profiles to ORCID Integration Site + + + Branch artery occlusion in a young woman. + + + formatted-unspecified + Gittinger JW, Miller NR, Keltner JL, Burde RM. Branch artery occlusion in a young woman. Surv Ophthalmol. 1985 Jul-Aug; 30(1):52-8. + + journal-article + + 1985 + 07 + 01 + + + \ No newline at end of file diff --git a/dspace-api/src/test/resources/org/dspace/app/orcid-works/workBulk-277904-277902.xml b/dspace-api/src/test/resources/org/dspace/app/orcid-works/workBulk-277904-277902.xml new file mode 100644 index 0000000000..6c9d0d7db6 --- /dev/null +++ b/dspace-api/src/test/resources/org/dspace/app/orcid-works/workBulk-277904-277902.xml @@ -0,0 +1,117 @@ + + + + 2014-01-22T19:11:57.160Z + 2015-06-19T19:14:26.350Z + + + https://sandbox.orcid.org/0000-0002-4105-0763 + 0000-0002-4105-0763 + sandbox.orcid.org + + + https://sandbox.orcid.org/client/0000-0002-4105-0763 + 0000-0002-4105-0763 + sandbox.orcid.org + + BU Profiles to ORCID Integration Site + + + The elements of style and the survey of ophthalmology. + + + bibtex + @article{Test, + doi = {10.11234.12}, + year = 2011, + month = {nov}, + publisher = {Elsevier {BV}}, + volume = {110}, + pages = {71--83}, + author = {Walter White}, + title = {Title from Bibtex: The elements of style and the survey of ophthalmology.}, + journal = {Test Journal} + } + + + invention + + + agr + work:external-identifier-id + http://orcid.org + version-of + + + doi + 10.11234.12 + http://orcid.org + self + + + + + Walter White + walter@test.com + + first + author + + + + it + + + 2014-01-22T19:11:57.159Z + 2015-06-19T19:14:26.327Z + + + https://sandbox.orcid.org/0000-0002-4105-0763 + 0000-0002-4105-0763 + sandbox.orcid.org + + + https://sandbox.orcid.org/client/0000-0002-4105-0763 + 0000-0002-4105-0763 + sandbox.orcid.org + + BU Profiles to ORCID Integration Site + + + Another cautionary tale. + + Journal title + Short description + journal-article + + 2011 + 05 + 01 + + + + Walter White + walter@test.com + + first + author + + + + John White + john@test.com + + additional + author + + + + Jesse Pinkman + + first + editor + + + + + \ No newline at end of file diff --git a/dspace-api/src/test/resources/org/dspace/app/orcid-works/works.xml b/dspace-api/src/test/resources/org/dspace/app/orcid-works/works.xml new file mode 100644 index 0000000000..411160ef8e --- /dev/null +++ b/dspace-api/src/test/resources/org/dspace/app/orcid-works/works.xml @@ -0,0 +1,196 @@ + + + 2015-06-19T19:14:26.350Z + + 2015-06-19T19:14:26.350Z + + + 2014-01-22T19:11:57.160Z + 2015-06-19T19:14:26.350Z + + + https://sandbox.orcid.org/0000-0002-4105-0763 + 0000-0002-4105-0763 + sandbox.orcid.org + + + https://sandbox.orcid.org/client/0000-0002-4105-0763 + 0000-0002-4105-0763 + sandbox.orcid.org + + BU Profiles to ORCID Integration Site + + + The elements of style and the survey of ophthalmology. + + invention + + 2012 + 11 + 01 + + + + + 2015-06-19T19:14:26.339Z + + + 2014-01-22T19:11:57.159Z + 2015-06-19T19:14:26.339Z + + + https://sandbox.orcid.org/client/DSPACE-CLIENT-ID + DSPACE-CLIENT-ID + sandbox.orcid.org + + DSPACE-CRIS + + + Introduction. + + journal-article + + 2011 + 11 + 01 + + + + + 2015-06-19T19:14:26.327Z + + + 2014-01-22T19:11:57.159Z + 2015-06-19T19:14:26.327Z + + + https://sandbox.orcid.org/0000-0002-4105-0763 + 0000-0002-4105-0763 + sandbox.orcid.org + + + https://sandbox.orcid.org/client/0000-0002-4105-0763 + 0000-0002-4105-0763 + sandbox.orcid.org + + BU Profiles to ORCID Integration Site + + + Another cautionary tale. + + journal-article + + 2011 + 05 + 01 + + + + 2014-01-22T19:11:57.159Z + 2015-06-19T19:14:26.327Z + + + https://sandbox.orcid.org/client/4Science + 4Science + sandbox.orcid.org + + BU Profiles to ORCID Integration Site + + + Another cautionary tale (4Science). + + journal-article + + 2011 + 05 + 01 + + + + + 2015-06-19T19:14:26.108Z + + + 2014-01-22T19:11:57.155Z + 2015-06-19T19:14:26.108Z + + + https://sandbox.orcid.org/0000-0002-4105-0763 + 0000-0002-4105-0763 + sandbox.orcid.org + + + https://sandbox.orcid.org/client/DSPACE-CLIENT-ID + DSPACE-CLIENT-ID + sandbox.orcid.org + + DSPACE-CRIS + + + Functional hemianopsia: a historical perspective. + + journal-article + + 1988 + 05 + 01 + + + + 2014-01-22T19:11:57.151Z + 2015-06-19T19:14:25.924Z + + + https://sandbox.orcid.org/0000-0002-4105-0763 + 0000-0002-4105-0763 + sandbox.orcid.org + + + https://sandbox.orcid.org/client/0000-0002-4105-0763 + 0000-0002-4105-0763 + sandbox.orcid.org + + BU Profiles to ORCID Integration Site + + + Branch artery occlusion in a young man. + + journal-article + + 1985 + 07 + 01 + + + + + 2015-06-19T19:14:26.108Z + + + 2014-01-22T19:11:57.151Z + 2015-06-19T19:14:25.924Z + + + https://sandbox.orcid.org/0000-0002-4105-0763 + 0000-0002-4105-0763 + sandbox.orcid.org + + + https://sandbox.orcid.org/client/0000-0002-4105-0763 + 0000-0002-4105-0763 + sandbox.orcid.org + + BU Profiles to ORCID Integration Site + + + Branch artery occlusion in a young woman. + + journal-article + + 1985 + 07 + 01 + + + + \ No newline at end of file diff --git a/dspace-api/src/test/resources/org/dspace/app/sherpa/0000-0000.json b/dspace-api/src/test/resources/org/dspace/app/sherpa/0000-0000.json new file mode 100644 index 0000000000..3b9e474502 --- /dev/null +++ b/dspace-api/src/test/resources/org/dspace/app/sherpa/0000-0000.json @@ -0,0 +1,3 @@ +{ + "items": [] +} \ No newline at end of file diff --git a/dspace-api/src/test/resources/org/dspace/app/sherpa/2731-0582.json b/dspace-api/src/test/resources/org/dspace/app/sherpa/2731-0582.json new file mode 100644 index 0000000000..2e5c7e2db9 --- /dev/null +++ b/dspace-api/src/test/resources/org/dspace/app/sherpa/2731-0582.json @@ -0,0 +1,504 @@ +{ + "items": [ + { + "system_metadata": { + "id": 40863, + "uri": "https://v2.sherpa.ac.uk/id/publication/40863", + "date_modified": "2022-03-25 14:08:29", + "publicly_visible": "yes", + "publicly_visible_phrases": [ + { + "language": "en", + "phrase": "Yes", + "value": "yes" + } + ], + "date_created": "2022-01-11 09:43:53" + }, + "tj_status_phrases": [ + { + "phrase": "Plan S Approved", + "value": "plan_s_approved", + "language": "en" + } + ], + "type_phrases": [ + { + "value": "journal", + "phrase": "Journal", + "language": "en" + } + ], + "id": 40863, + "issns": [ + { + "issn": "2731-0582" + } + ], + "publishers": [ + { + "relationship_type": "commercial_publisher", + "relationship_type_phrases": [ + { + "value": "commercial_publisher", + "phrase": "Commercial Publisher", + "language": "en" + } + ], + "publisher": { + "id": 3286, + "name": [ + { + "name": "Nature Research", + "language": "en", + "preferred_phrases": [ + { + "language": "en", + "phrase": "Name", + "value": "name" + } + ], + "preferred": "name", + "language_phrases": [ + { + "phrase": "English", + "value": "en", + "language": "en" + } + ] + } + ], + "imprint_of_id": 62037, + "country": "gb", + "country_phrases": [ + { + "value": "gb", + "phrase": "United Kingdom", + "language": "en" + } + ], + "publication_count": 87, + "uri": "https://v2.sherpa.ac.uk/id/publisher/3286", + "url": "https://www.nature.com/" + } + } + ], + "listed_in_doaj_phrases": [ + { + "language": "en", + "phrase": "No", + "value": "no" + } + ], + "listed_in_doaj": "no", + "tj_status": [ + "plan_s_approved" + ], + "publisher_policy": [ + { + "open_access_prohibited": "no", + "id": 3286, + "publication_count": 36, + "internal_moniker": "Default Policy", + "urls": [ + { + "description": "Self archiving and license to publish", + "url": "https://www.nature.com/neuro/editorial-policies/self-archiving-and-license-to-publish" + }, + { + "description": "Preprints and Conference Proceedings", + "url": "https://www.nature.com/nature-portfolio/editorial-policies/preprints-and-conference-proceedings" + }, + { + "url": "https://www.springernature.com/gp/open-research/policies/accepted-manuscript-terms", + "description": "Accepted manuscript terms of use" + } + ], + "open_access_prohibited_phrases": [ + { + "value": "no", + "phrase": "No", + "language": "en" + } + ], + "uri": "https://v2.sherpa.ac.uk/id/publisher_policy/3286", + "permitted_oa": [ + { + "prerequisites": { + "prerequisites_phrases": [ + { + "language": "en", + "value": "when_research_article", + "phrase": "If a Research Article" + } + ], + "prerequisites": [ + "when_research_article" + ] + }, + "copyright_owner": "authors", + "additional_oa_fee_phrases": [ + { + "language": "en", + "value": "no", + "phrase": "No" + } + ], + "article_version_phrases": [ + { + "language": "en", + "value": "submitted", + "phrase": "Submitted" + } + ], + "additional_oa_fee": "no", + "copyright_owner_phrases": [ + { + "language": "en", + "value": "authors", + "phrase": "Authors" + } + ], + "article_version": [ + "submitted" + ], + "location": { + "location_phrases": [ + { + "value": "authors_homepage", + "phrase": "Author's Homepage", + "language": "en" + }, + { + "language": "en", + "phrase": "Funder Designated Location", + "value": "funder_designated_location" + }, + { + "language": "en", + "value": "institutional_repository", + "phrase": "Institutional Repository" + }, + { + "phrase": "Preprint Repository", + "value": "preprint_repository", + "language": "en" + } + ], + "location": [ + "authors_homepage", + "funder_designated_location", + "institutional_repository", + "preprint_repository" + ] + }, + "conditions": [ + "Must link to publisher version", + "Upon publication, source must be acknowledged and DOI cited", + "Post-prints are subject to Springer Nature re-use terms", + "Non-commercial use only" + ] + }, + { + "embargo": { + "units": "months", + "amount": 6, + "units_phrases": [ + { + "phrase": "Months", + "value": "months", + "language": "en" + } + ] + }, + "license": [ + { + "license_phrases": [ + { + "phrase": "Publisher's Bespoke License", + "value": "bespoke_license", + "language": "en" + } + ], + "license": "bespoke_license" + } + ], + "article_version_phrases": [ + { + "value": "accepted", + "phrase": "Accepted", + "language": "en" + } + ], + "additional_oa_fee": "no", + "conditions": [ + "Must link to publisher version", + "Published source must be acknowledged and DOI cited", + "Post-prints are subject to Springer Nature re-use terms", + "Non-commercial use only" + ], + "copyright_owner_phrases": [ + { + "phrase": "Authors", + "value": "authors", + "language": "en" + } + ], + "location": { + "location": [ + "authors_homepage", + "funder_designated_location", + "institutional_repository", + "named_repository" + ], + "location_phrases": [ + { + "phrase": "Author's Homepage", + "value": "authors_homepage", + "language": "en" + }, + { + "phrase": "Funder Designated Location", + "value": "funder_designated_location", + "language": "en" + }, + { + "language": "en", + "value": "institutional_repository", + "phrase": "Institutional Repository" + }, + { + "language": "en", + "value": "named_repository", + "phrase": "Named Repository" + } + ], + "named_repository": [ + "PubMed Central", + "Europe PMC" + ] + }, + "article_version": [ + "accepted" + ], + "prerequisites": { + "prerequisites": [ + "when_research_article" + ], + "prerequisites_phrases": [ + { + "value": "when_research_article", + "phrase": "If a Research Article", + "language": "en" + } + ] + }, + "copyright_owner": "authors", + "additional_oa_fee_phrases": [ + { + "language": "en", + "value": "no", + "phrase": "No" + } + ] + } + ] + }, + { + "id": 4410, + "open_access_prohibited": "no", + "urls": [ + { + "url": "https://www.springernature.com/gp/open-research/about/the-fundamentals-of-open-access-and-open-research", + "description": "The fundamentals of open access and open research" + }, + { + "url": "https://www.nature.com/neuro/editorial-policies/self-archiving-and-license-to-publish", + "description": "Self archiving and license to publish" + }, + { + "url": "https://www.springernature.com/gp/open-research/policies/journal-policies", + "description": "Open access policies for journals" + } + ], + "open_access_prohibited_phrases": [ + { + "language": "en", + "phrase": "No", + "value": "no" + } + ], + "internal_moniker": "Open Access", + "publication_count": 34, + "permitted_oa": [ + { + "additional_oa_fee_phrases": [ + { + "language": "en", + "phrase": "Yes", + "value": "yes" + } + ], + "copyright_owner": "authors", + "conditions": [ + "Published source must be acknowledged with citation" + ], + "article_version": [ + "published" + ], + "copyright_owner_phrases": [ + { + "language": "en", + "value": "authors", + "phrase": "Authors" + } + ], + "location": { + "location_phrases": [ + { + "phrase": "Any Website", + "value": "any_website", + "language": "en" + }, + { + "language": "en", + "phrase": "Journal Website", + "value": "this_journal" + } + ], + "location": [ + "any_website", + "this_journal" + ] + }, + "additional_oa_fee": "yes", + "article_version_phrases": [ + { + "phrase": "Published", + "value": "published", + "language": "en" + } + ], + "license": [ + { + "license_phrases": [ + { + "phrase": "CC BY", + "value": "cc_by", + "language": "en" + } + ], + "license": "cc_by", + "version": "4.0" + } + ], + "publisher_deposit": [ + { + "repository_metadata": { + "type_phrases": [ + { + "language": "en", + "value": "disciplinary", + "phrase": "Disciplinary" + } + ], + "notes": "Launched as UK PubMed Central (UKPMC) in January 2007, changed to Europe PubMed Central in November 2012.\r\nSpecial item types include: Links", + "url": "http://europepmc.org/", + "type": "disciplinary", + "name": [ + { + "name": "Europe PMC", + "language": "en", + "preferred": "name", + "language_phrases": [ + { + "value": "en", + "phrase": "English", + "language": "en" + } + ], + "preferred_phrases": [ + { + "language": "en", + "phrase": "Name", + "value": "name" + } + ] + } + ] + }, + "system_metadata": { + "id": 908, + "uri": "https://v2.sherpa.ac.uk/id/repository/908" + } + }, + { + "system_metadata": { + "id": 267, + "uri": "https://v2.sherpa.ac.uk/id/repository/267" + }, + "repository_metadata": { + "type_phrases": [ + { + "language": "en", + "phrase": "Disciplinary", + "value": "disciplinary" + } + ], + "type": "disciplinary", + "url": "http://www.ncbi.nlm.nih.gov/pmc/", + "name": [ + { + "language": "en", + "name": "PubMed Central", + "preferred": "name", + "language_phrases": [ + { + "language": "en", + "value": "en", + "phrase": "English" + } + ], + "preferred_phrases": [ + { + "language": "en", + "value": "name", + "phrase": "Name" + } + ] + } + ] + } + } + ] + } + ], + "uri": "https://v2.sherpa.ac.uk/id/publisher_policy/4410" + } + ], + "title": [ + { + "preferred_phrases": [ + { + "language": "en", + "phrase": "Title", + "value": "name" + } + ], + "language_phrases": [ + { + "language": "en", + "value": "en", + "phrase": "English" + } + ], + "preferred": "name", + "title": "Nature Synthesis", + "language": "en" + } + ], + "type": "journal", + "url": "https://www.nature.com/natsynth/" + } + ] +} \ No newline at end of file diff --git a/dspace-api/src/test/resources/test-config.properties b/dspace-api/src/test/resources/test-config.properties index 66a29ab9a0..06322d4a7e 100644 --- a/dspace-api/src/test/resources/test-config.properties +++ b/dspace-api/src/test/resources/test-config.properties @@ -12,4 +12,4 @@ test.folder = ./target/testing/ # Path of the test bitstream (to use in BitstreamTest and elsewhere) test.bitstream = ./target/testing/dspace/assetstore/ConstitutionofIreland.pdf test.exportcsv = ./target/testing/dspace/assetstore/test.csv -test.importcsv = ./target/testing/dspace/assetstore/testImport.csv +test.importcsv = ./target/testing/dspace/assetstore/testImport.csv \ No newline at end of file diff --git a/dspace-iiif/pom.xml b/dspace-iiif/pom.xml index ce1dc8a326..879fb1da69 100644 --- a/dspace-iiif/pom.xml +++ b/dspace-iiif/pom.xml @@ -15,7 +15,7 @@ org.dspace dspace-parent - 7.2 + 7.5-SNAPSHOT .. @@ -45,11 +45,25 @@ org.springframework.boot spring-boot-starter-web ${spring-boot.version} + + + + org.hibernate.validator + hibernate-validator + + org.springframework.boot spring-boot-starter-data-rest ${spring-boot.version} + + + + com.fasterxml.jackson.datatype + jackson-datatype-jdk8 + + org.springframework.boot @@ -66,13 +80,12 @@ javax.cache cache-api - 1.1.0 org.ehcache ehcache - 3.4.0 + ${ehcache.version} diff --git a/dspace-iiif/src/main/java/org/dspace/app/iiif/service/CanvasService.java b/dspace-iiif/src/main/java/org/dspace/app/iiif/service/CanvasService.java index 189e4d6f62..dcfb707d62 100644 --- a/dspace-iiif/src/main/java/org/dspace/app/iiif/service/CanvasService.java +++ b/dspace-iiif/src/main/java/org/dspace/app/iiif/service/CanvasService.java @@ -78,29 +78,45 @@ public class CanvasService extends AbstractResourceService { } /** - * Checks for bitstream iiif.image.width metadata in the first - * bitstream in first IIIF bundle. If bitstream metadata is not - * found, use the IIIF image service to update the default canvas - * dimensions for this request. Called once for each manifest. + * Checks for "iiif.image.width" metadata in IIIF bundles. When bitstream + * metadata is not found for the first image in the bundle this method updates the + * default canvas dimensions for the request based on the actual image dimensions, + * using the IIIF image service. Called once for each manifest. * @param bundles IIIF bundles for this item */ - protected void guessCanvasDimensions(List bundles) { - Bitstream firstBistream = bundles.get(0).getBitstreams().get(0); - if (!utils.hasWidthMetadata(firstBistream)) { - int[] imageDims = utils.getImageDimensions(firstBistream); - if (imageDims != null && imageDims.length == 2) { - // update the fallback dimensions - defaultCanvasWidthFallback = imageDims[0]; - defaultCanvasHeightFallback = imageDims[1]; + protected void guessCanvasDimensions(Context context, List bundles) { + // prevent redundant updates. + boolean dimensionUpdated = false; + + for (Bundle bundle : bundles) { + if (!dimensionUpdated) { + for (Bitstream bitstream : bundle.getBitstreams()) { + if (utils.isIIIFBitstream(context, bitstream)) { + // check for width dimension + if (!utils.hasWidthMetadata(bitstream)) { + // get the dimensions of the image. + int[] imageDims = utils.getImageDimensions(bitstream); + if (imageDims != null && imageDims.length == 2) { + // update the fallback dimensions + defaultCanvasWidthFallback = imageDims[0]; + defaultCanvasHeightFallback = imageDims[1]; + } + setDefaultCanvasDimensions(); + // stop processing the bundles + dimensionUpdated = true; + } + // check only the first image + break; + } + } } - setDefaultCanvasDimensions(); } } /** - * Used to set the height and width dimensions for all images when iiif.image.default-width and - * iiif.image.default-height are set to -1 in DSpace configuration. - * The values are updated only if the bitstream does not have its own iiif.image.width metadata. + * Sets the height and width dimensions for all images when "iiif.image.default-width" + * and "iiif.image.default-height" are set to -1 in DSpace configuration. The values + * are updated only when the bitstream does not have its own image dimension metadata. * @param bitstream */ private void setCanvasDimensions(Bitstream bitstream) { diff --git a/dspace-iiif/src/main/java/org/dspace/app/iiif/service/ManifestService.java b/dspace-iiif/src/main/java/org/dspace/app/iiif/service/ManifestService.java index a9611593d9..09526deeb6 100644 --- a/dspace-iiif/src/main/java/org/dspace/app/iiif/service/ManifestService.java +++ b/dspace-iiif/src/main/java/org/dspace/app/iiif/service/ManifestService.java @@ -156,9 +156,8 @@ public class ManifestService extends AbstractResourceService { List bundles = utils.getIIIFBundles(item); // Set the default canvas dimensions. if (guessCanvasDimension) { - canvasService.guessCanvasDimensions(bundles); + canvasService.guessCanvasDimensions(context, bundles); } - // canvasService.setDefaultCanvasDimensions(); for (Bundle bnd : bundles) { String bundleToCPrefix = null; if (bundles.size() > 1) { diff --git a/dspace-iiif/src/main/java/org/dspace/app/iiif/service/WordHighlightSolrSearch.java b/dspace-iiif/src/main/java/org/dspace/app/iiif/service/WordHighlightSolrSearch.java index 0e614fae2a..da50f33582 100644 --- a/dspace-iiif/src/main/java/org/dspace/app/iiif/service/WordHighlightSolrSearch.java +++ b/dspace-iiif/src/main/java/org/dspace/app/iiif/service/WordHighlightSolrSearch.java @@ -12,14 +12,11 @@ import java.net.URLEncoder; import java.nio.charset.StandardCharsets; import java.util.ArrayList; import java.util.List; -import java.util.Map; import java.util.UUID; -import com.google.gson.Gson; -import com.google.gson.GsonBuilder; -import com.google.gson.JsonArray; -import com.google.gson.JsonElement; -import com.google.gson.JsonObject; +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.ObjectMapper; import org.apache.commons.validator.routines.UrlValidator; import org.apache.logging.log4j.Logger; import org.apache.solr.client.solrj.SolrQuery; @@ -35,7 +32,6 @@ import org.dspace.app.iiif.model.generator.ContentAsTextGenerator; import org.dspace.app.iiif.model.generator.ManifestGenerator; import org.dspace.app.iiif.model.generator.SearchResultGenerator; import org.dspace.app.iiif.service.utils.IIIFUtils; -import org.dspace.discovery.SolrSearchCore; import org.dspace.services.ConfigurationService; import org.dspace.services.factory.DSpaceServicesFactory; import org.springframework.beans.factory.annotation.Autowired; @@ -66,9 +62,6 @@ public class WordHighlightSolrSearch implements SearchAnnotationService { @Autowired SearchResultGenerator searchResult; - @Autowired - SolrSearchCore solrSearchCore; - @Autowired ManifestGenerator manifestGenerator; @@ -167,26 +160,49 @@ public class WordHighlightSolrSearch implements SearchAnnotationService { private String getAnnotationList(UUID uuid, String json, String query) { searchResult.setIdentifier(manifestId + "/search?q=" + URLEncoder.encode(query, StandardCharsets.UTF_8)); - GsonBuilder builder = new GsonBuilder(); - Gson gson = builder.create(); - JsonObject body = gson.fromJson(json, JsonObject.class); + + ObjectMapper mapper = new ObjectMapper(); + JsonNode body = null; + try { + body = mapper.readTree(json); + } catch (JsonProcessingException e) { + log.error("Unable to process json response.", e); + } + // If error occurred or no body, return immediately if (body == null) { - log.warn("Unable to process json response."); return utils.asJson(searchResult.generateResource()); } - // outer ocr highlight element - JsonObject highs = body.getAsJsonObject("ocrHighlighting"); - // highlight entries - for (Map.Entry ocrIds: highs.entrySet()) { - // ocr_text - JsonObject ocrObj = ocrIds.getValue().getAsJsonObject().getAsJsonObject("ocr_text"); - // snippets array - if (ocrObj != null) { - for (JsonElement snippetArray : ocrObj.getAsJsonObject().get("snippets").getAsJsonArray()) { - String pageId = getCanvasId(snippetArray.getAsJsonObject().get("pages")); - for (JsonElement highlights : snippetArray.getAsJsonObject().getAsJsonArray("highlights")) { - for (JsonElement highlight : highlights.getAsJsonArray()) { - searchResult.addResource(getAnnotation(highlight, pageId, uuid)); + + // Example structure of Solr response available at + // https://github.com/dbmdz/solr-ocrhighlighting/blob/main/docs/query.md + // Get the outer ocrHighlighting node + JsonNode highs = body.get("ocrHighlighting"); + if (highs != null) { + // Loop through each highlight entry under ocrHighlighting + for (final JsonNode highEntry : highs) { + // Get the ocr_text node under the entry + JsonNode ocrNode = highEntry.get("ocr_text"); + if (ocrNode != null) { + // Loop through the snippets array under that + for (final JsonNode snippet : ocrNode.get("snippets")) { + if (snippet != null) { + // Get a canvas ID based on snippet's pages + String pageId = getCanvasId(snippet.get("pages")); + if (pageId != null) { + // Loop through array of highlights for each snippet. + for (final JsonNode highlights : snippet.get("highlights")) { + if (highlights != null) { + // May be multiple word highlights on a page, so loop through them. + for (int i = 0; i < highlights.size(); i++) { + // Add annotation associated with each highlight + AnnotationGenerator anno = getAnnotation(highlights.get(i), pageId, uuid); + if (anno != null) { + searchResult.addResource(anno); + } + } + } + } + } } } } @@ -198,21 +214,24 @@ public class WordHighlightSolrSearch implements SearchAnnotationService { /** * Returns the annotation generator for the highlight. - * @param highlight highlight element from solor response + * @param highlight highlight node from Solr response * @param pageId page id from solr response * @return generator for a single annotation */ - private AnnotationGenerator getAnnotation(JsonElement highlight, String pageId, UUID uuid) { - JsonObject hcoords = highlight.getAsJsonObject(); - String text = (hcoords.get("text").getAsString()); - int ulx = hcoords.get("ulx").getAsInt(); - int uly = hcoords.get("uly").getAsInt(); - int lrx = hcoords.get("lrx").getAsInt(); - int lry = hcoords.get("lry").getAsInt(); - String w = Integer.toString(lrx - ulx); - String h = Integer.toString(lry - uly); - String params = ulx + "," + uly + "," + w + "," + h; - return createSearchResultAnnotation(params, text, pageId, uuid); + private AnnotationGenerator getAnnotation(JsonNode highlight, String pageId, UUID uuid) { + String text = highlight.get("text") != null ? highlight.get("text").asText() : null; + int ulx = highlight.get("ulx") != null ? highlight.get("ulx").asInt() : -1; + int uly = highlight.get("uly") != null ? highlight.get("uly").asInt() : -1; + int lrx = highlight.get("lrx") != null ? highlight.get("lrx").asInt() : -1; + int lry = highlight.get("lry") != null ? highlight.get("lry").asInt() : -1; + String w = (lrx >= 0 && ulx >= 0) ? Integer.toString(lrx - ulx) : null; + String h = (lry >= 0 && uly >= 0) ? Integer.toString(lry - uly) : null; + + if (text != null && w != null && h != null) { + String params = ulx + "," + uly + "," + w + "," + h; + return createSearchResultAnnotation(params, text, pageId, uuid); + } + return null; } /** @@ -221,15 +240,22 @@ public class WordHighlightSolrSearch implements SearchAnnotationService { * delimited with a "." and that the integer corresponds to the * canvas identifier in the manifest. For METS/ALTO documents, the page * order can be derived from the METS file when loading the solr index. - * @param element the pages element - * @return canvas id + * @param pagesNode the pages node + * @return canvas id or null if node was null */ - private String getCanvasId(JsonElement element) { - JsonArray pages = element.getAsJsonArray(); - JsonObject page = pages.get(0).getAsJsonObject(); - String[] identArr = page.get("id").getAsString().split("\\."); - // the canvas id. - return "c" + identArr[1]; + private String getCanvasId(JsonNode pagesNode) { + if (pagesNode != null) { + JsonNode page = pagesNode.get(0); + if (page != null) { + JsonNode pageId = page.get("id"); + if (pageId != null) { + String[] identArr = pageId.asText().split("\\."); + // the canvas id. + return "c" + identArr[1]; + } + } + } + return null; } /** diff --git a/dspace-iiif/src/main/java/org/dspace/app/iiif/service/utils/IIIFUtils.java b/dspace-iiif/src/main/java/org/dspace/app/iiif/service/utils/IIIFUtils.java index 4a4357b803..782a5a9852 100644 --- a/dspace-iiif/src/main/java/org/dspace/app/iiif/service/utils/IIIFUtils.java +++ b/dspace-iiif/src/main/java/org/dspace/app/iiif/service/utils/IIIFUtils.java @@ -14,6 +14,7 @@ import static org.dspace.iiif.util.IIIFSharedUtils.METADATA_IIIF_WIDTH_QUALIFIER import java.sql.SQLException; import java.util.ArrayList; +import java.util.Arrays; import java.util.List; import java.util.stream.Collectors; @@ -136,7 +137,7 @@ public class IIIFUtils { * @param b the DSpace bitstream to check * @return true if the bitstream can be used as IIIF resource */ - private boolean isIIIFBitstream(Context context, Bitstream b) { + public boolean isIIIFBitstream(Context context, Bitstream b) { return checkImageMimeType(getBitstreamMimeType(b, context)) && b.getMetadata().stream() .filter(m -> m.getMetadataField().toString('.').contentEquals(METADATA_IIIF_ENABLED)) .noneMatch(m -> m.getValue().equalsIgnoreCase("false") || m.getValue().equalsIgnoreCase("no")); @@ -227,7 +228,7 @@ public class IIIFUtils { * @param mimetype * @return true if an image */ - public boolean checkImageMimeType(String mimetype) { + private boolean checkImageMimeType(String mimetype) { if (mimetype != null && mimetype.contains("image/")) { return true; } @@ -335,12 +336,26 @@ public class IIIFUtils { public String getBundleIIIFToC(Bundle bundle) { String label = bundle.getMetadata().stream() .filter(m -> m.getMetadataField().toString('.').contentEquals(METADATA_IIIF_LABEL)) - .findFirst().map(m -> m.getValue()).orElse(bundle.getName()); + .findFirst().map(m -> m.getValue()).orElse(getToCBundleLabel(bundle)); return bundle.getMetadata().stream() .filter(m -> m.getMetadataField().toString('.').contentEquals(METADATA_IIIF_TOC)) .findFirst().map(m -> m.getValue() + TOC_SEPARATOR + label).orElse(label); } + /** + * Excludes bundles found in the iiif.exclude.toc.bundle list + * + * @param bundle the dspace bundle + * @return bundle name or null if bundle is excluded + */ + private String getToCBundleLabel(Bundle bundle) { + String[] iiifAlternate = configurationService.getArrayProperty("iiif.exclude.toc.bundle"); + if (Arrays.stream(iiifAlternate).anyMatch(x -> x.contentEquals(bundle.getName()))) { + return null; + } + return bundle.getName(); + } + /** * Return the iiif viewing hint for the item * diff --git a/dspace-oai/pom.xml b/dspace-oai/pom.xml index f4acb0d297..59bcdf39a2 100644 --- a/dspace-oai/pom.xml +++ b/dspace-oai/pom.xml @@ -8,7 +8,7 @@ dspace-parent org.dspace - 7.2 + 7.5-SNAPSHOT .. @@ -82,10 +82,6 @@ org.mockito mockito-all - - xml-apis - xml-apis - org.apache.commons commons-lang3 diff --git a/dspace-oai/src/main/java/org/dspace/xoai/app/XOAI.java b/dspace-oai/src/main/java/org/dspace/xoai/app/XOAI.java index 700105899a..e27a3ee947 100644 --- a/dspace-oai/src/main/java/org/dspace/xoai/app/XOAI.java +++ b/dspace-oai/src/main/java/org/dspace/xoai/app/XOAI.java @@ -8,6 +8,10 @@ package org.dspace.xoai.app; import static com.lyncode.xoai.dataprovider.core.Granularity.Second; +import static java.util.Objects.nonNull; +import static org.apache.commons.lang.StringUtils.EMPTY; +import static org.apache.solr.common.params.CursorMarkParams.CURSOR_MARK_PARAM; +import static org.apache.solr.common.params.CursorMarkParams.CURSOR_MARK_START; import static org.dspace.xoai.util.ItemUtils.retrieveMetadata; import java.io.ByteArrayOutputStream; @@ -38,6 +42,8 @@ import org.apache.solr.client.solrj.SolrClient; import org.apache.solr.client.solrj.SolrQuery; import org.apache.solr.client.solrj.SolrQuery.ORDER; import org.apache.solr.client.solrj.SolrServerException; +import org.apache.solr.client.solrj.response.QueryResponse; +import org.apache.solr.common.SolrDocument; import org.apache.solr.common.SolrDocumentList; import org.apache.solr.common.SolrInputDocument; import org.dspace.authorize.ResourcePolicy; @@ -77,6 +83,7 @@ import org.springframework.context.annotation.AnnotationConfigApplicationContext public class XOAI { private static Logger log = LogManager.getLogger(XOAI.class); + // needed because the solr query only returns 10 rows by default private final Context context; private boolean optimize; private final boolean verbose; @@ -94,8 +101,8 @@ public class XOAI { private final AuthorizeService authorizeService; private final ItemService itemService; - private final static ConfigurationService configurationService = DSpaceServicesFactory - .getInstance().getConfigurationService(); + private final static ConfigurationService configurationService = DSpaceServicesFactory.getInstance() + .getConfigurationService(); private List extensionPlugins; @@ -152,9 +159,8 @@ public class XOAI { System.out.println("Using full import."); result = this.indexAll(); } else { - SolrQuery solrParams = new SolrQuery("*:*") - .addField("item.lastmodified") - .addSort("item.lastmodified", ORDER.desc).setRows(1); + SolrQuery solrParams = new SolrQuery("*:*").addField("item.lastmodified") + .addSort("item.lastmodified", ORDER.desc).setRows(1); SolrDocumentList results = DSpaceSolrSearch.query(solrServerResolver.getServer(), solrParams); if (results.getNumFound() == 0) { @@ -167,7 +173,6 @@ public class XOAI { } solrServerResolver.getServer().commit(); - if (optimize) { println("Optimizing Index"); solrServerResolver.getServer().optimize(); @@ -183,12 +188,10 @@ public class XOAI { } private int index(Date last) throws DSpaceSolrIndexerException, IOException { - System.out - .println("Incremental import. Searching for documents modified after: " - + last.toString()); + System.out.println("Incremental import. Searching for documents modified after: " + last.toString()); /* - * Index all changed or new items or items whose visibility is viable to - * change due to an embargo. + * Index all changed or new items or items whose visibility is viable to change + * due to an embargo. */ try { Iterator discoverableChangedItems = itemService @@ -204,31 +207,55 @@ public class XOAI { } /** - * Get all items already in the index which are viable to change visibility - * due to an embargo. Only consider those which haven't been modified - * anyways since the last update, so they aren't updated twice in one import - * run. + * Get all items already in the index which are viable to change visibility due + * to an embargo. Only consider those which haven't been modified anyways since + * the last update, so they aren't updated twice in one import run. * - * @param last - * maximum date for an item to be considered for an update - * @return Iterator over list of items which might have changed their - * visibility since the last update. + * @param last maximum date for an item to be considered for an update + * @return Iterator over list of items which might have changed their visibility + * since the last update. * @throws DSpaceSolrIndexerException */ private Iterator getItemsWithPossibleChangesBefore(Date last) throws DSpaceSolrIndexerException, IOException { try { - SolrQuery params = new SolrQuery("item.willChangeStatus:true").addField("item.id"); - SolrDocumentList documents = DSpaceSolrSearch.query(solrServerResolver.getServer(), params); + SolrQuery params = new SolrQuery("item.willChangeStatus:true").addField("item.id").setRows(100) + .addSort("item.handle", SolrQuery.ORDER.asc); + SolrClient solrClient = solrServerResolver.getServer(); + List items = new LinkedList<>(); - for (int i = 0; i < documents.getNumFound(); i++) { - Item item = itemService.find(context, - UUID.fromString((String) documents.get(i).getFieldValue("item.id"))); - if (item.getLastModified().before(last)) { - items.add(item); + boolean done = false; + /* + * Using solr cursors to paginate and prevent the query from returning 10 + * SolrDocument objects only. + */ + String cursorMark = CURSOR_MARK_START; + String nextCursorMark = EMPTY; + + while (!done) { + params.set(CURSOR_MARK_PARAM, cursorMark); + QueryResponse response = solrClient.query(params); + nextCursorMark = response.getNextCursorMark(); + + for (SolrDocument document : response.getResults()) { + Item item = itemService.find(context, UUID.fromString((String) document.getFieldValue("item.id"))); + if (nonNull(item)) { + if (nonNull(item.getLastModified())) { + if (item.getLastModified().before(last)) { + items.add(item); + } + } else { + log.warn("Skipping item with id " + item.getID()); + } + } } + + if (cursorMark.equals(nextCursorMark)) { + done = true; + } + cursorMark = nextCursorMark; } return items.iterator(); - } catch (SolrServerException | SQLException | DSpaceSolrException ex) { + } catch (SolrServerException | SQLException ex) { throw new DSpaceSolrIndexerException(ex.getMessage(), ex); } } @@ -250,11 +277,10 @@ public class XOAI { } /** - * Check if an item is already indexed. Using this, it is possible to check - * if withdrawn or nondiscoverable items have to be indexed at all. + * Check if an item is already indexed. Using this, it is possible to check if + * withdrawn or nondiscoverable items have to be indexed at all. * - * @param item - * Item that should be checked for its presence in the index. + * @param item Item that should be checked for its presence in the index. * @return has it been indexed? */ private boolean checkIfIndexed(Item item) throws IOException { @@ -266,11 +292,11 @@ public class XOAI { return false; } } - /** + + /** * Check if an item is flagged visible in the index. * - * @param item - * Item that should be checked for its presence in the index. + * @param item Item that should be checked for its presence in the index. * @return has it been indexed? */ private boolean checkIfVisibleInOAI(Item item) throws IOException { @@ -287,8 +313,7 @@ public class XOAI { } } - private int index(Iterator iterator) - throws DSpaceSolrIndexerException { + private int index(Iterator iterator) throws DSpaceSolrIndexerException { try { int i = 0; int batchSize = configurationService.getIntProperty("oai.import.batch.size", 1000); @@ -302,7 +327,7 @@ public class XOAI { } else { list.add(this.index(item)); } - //Uncache the item to keep memory consumption low + // Uncache the item to keep memory consumption low context.uncacheEntity(item); } catch (SQLException | IOException | XMLStreamException | WritingXmlException ex) { @@ -334,12 +359,11 @@ public class XOAI { } /** - * Method to get the most recent date on which the item changed concerning - * the OAI deleted status (policy start and end dates for all anonymous READ + * Method to get the most recent date on which the item changed concerning the + * OAI deleted status (policy start and end dates for all anonymous READ * policies and the standard last modification date) * - * @param item - * Item + * @param item Item * @return date * @throws SQLException */ @@ -382,17 +406,16 @@ public class XOAI { boolean isIndexed = this.checkIfIndexed(item); /* - * If the item is not under embargo, it should be visible. If it is, - * make it invisible if this is the first time it is indexed. For - * subsequent index runs, keep the current status, so that if the item - * is embargoed again, it is flagged as deleted instead and does not - * just disappear, or if it is still under embargo, it won't become - * visible and be known to harvesters as deleted before it gets - * disseminated for the first time. The item has to be indexed directly - * after publication even if it is still embargoed, because its - * lastModified date will not change when the embargo end date (or start - * date) is reached. To circumvent this, an item which will change its - * status in the future will be marked as such. + * If the item is not under embargo, it should be visible. If it is, make it + * invisible if this is the first time it is indexed. For subsequent index runs, + * keep the current status, so that if the item is embargoed again, it is + * flagged as deleted instead and does not just disappear, or if it is still + * under embargo, it won't become visible and be known to harvesters as deleted + * before it gets disseminated for the first time. The item has to be indexed + * directly after publication even if it is still embargoed, because its + * lastModified date will not change when the embargo end date (or start date) + * is reached. To circumvent this, an item which will change its status in the + * future will be marked as such. */ boolean isPublic = isEmbargoed ? (isIndexed ? isCurrentlyVisible : false) : true; @@ -404,33 +427,31 @@ public class XOAI { doc.addField("item.willChangeStatus", willChangeStatus(item)); /* - * Mark an item as deleted not only if it is withdrawn, but also if it - * is made private, because items should not simply disappear from OAI - * with a transient deletion policy. Do not set the flag for still - * invisible embargoed items, because this will override the item.public - * flag. + * Mark an item as deleted not only if it is withdrawn, but also if it is made + * private, because items should not simply disappear from OAI with a transient + * deletion policy. Do not set the flag for still invisible embargoed items, + * because this will override the item.public flag. */ doc.addField("item.deleted", (item.isWithdrawn() || !item.isDiscoverable() || (isEmbargoed ? isPublic : false))); /* - * An item that is embargoed will potentially not be harvested by - * incremental harvesters if the from and until params do not encompass - * both the standard lastModified date and the anonymous-READ resource - * policy start date. The same is true for the end date, where - * harvesters might not get a tombstone record. Therefore, consider all - * relevant policy dates and the standard lastModified date and take the - * most recent of those which have already passed. + * An item that is embargoed will potentially not be harvested by incremental + * harvesters if the from and until params do not encompass both the standard + * lastModified date and the anonymous-READ resource policy start date. The same + * is true for the end date, where harvesters might not get a tombstone record. + * Therefore, consider all relevant policy dates and the standard lastModified + * date and take the most recent of those which have already passed. */ - doc.addField("item.lastmodified", SolrUtils.getDateFormatter() - .format(this.getMostRecentModificationDate(item))); + doc.addField("item.lastmodified", + SolrUtils.getDateFormatter().format(this.getMostRecentModificationDate(item))); if (item.getSubmitter() != null) { doc.addField("item.submitter", item.getSubmitter().getEmail()); } - for (Collection col: item.getCollections()) { + for (Collection col : item.getCollections()) { doc.addField("item.collections", "col_" + col.getHandle().replace("/", "_")); } for (Community com : collectionsService.flatParentCommunities(context, item)) { @@ -457,8 +478,7 @@ public class XOAI { // Message output before processing - for debugging purposes if (verbose) { - println(String.format("Item %s with handle %s is about to be indexed", - item.getID().toString(), handle)); + println(String.format("Item %s with handle %s is about to be indexed", item.getID().toString(), handle)); } ByteArrayOutputStream out = new ByteArrayOutputStream(); @@ -476,8 +496,7 @@ public class XOAI { doc.addField("item.compile", out.toString()); if (verbose) { - println(String.format("Item %s with handle %s indexed", - item.getID().toString(), handle)); + println(String.format("Item %s with handle %s indexed", item.getID().toString(), handle)); } return doc; @@ -510,12 +529,10 @@ public class XOAI { return pub; } - private static boolean getKnownExplanation(Throwable t) { if (t instanceof ConnectException) { - System.err.println("Solr server (" - + configurationService.getProperty("oai.solr.url", "") - + ") is down, turn it on."); + System.err.println( + "Solr server (" + configurationService.getProperty("oai.solr.url", "") + ") is down, turn it on."); return true; } @@ -544,7 +561,7 @@ public class XOAI { } private static void cleanCache(XOAIItemCacheService xoaiItemCacheService, XOAICacheService xoaiCacheService) - throws IOException { + throws IOException { System.out.println("Purging cached OAI responses."); xoaiItemCacheService.deleteAll(); xoaiCacheService.deleteAll(); @@ -557,10 +574,8 @@ public class XOAI { public static void main(String[] argv) throws IOException, ConfigurationException { - - AnnotationConfigApplicationContext applicationContext = new AnnotationConfigApplicationContext(new Class[] { - BasicConfiguration.class - }); + AnnotationConfigApplicationContext applicationContext = new AnnotationConfigApplicationContext( + new Class[] { BasicConfiguration.class }); XOAICacheService cacheService = applicationContext.getBean(XOAICacheService.class); XOAIItemCacheService itemCacheService = applicationContext.getBean(XOAIItemCacheService.class); @@ -571,21 +586,19 @@ public class XOAI { CommandLineParser parser = new DefaultParser(); Options options = new Options(); options.addOption("c", "clear", false, "Clear index before indexing"); - options.addOption("o", "optimize", false, - "Optimize index at the end"); + options.addOption("o", "optimize", false, "Optimize index at the end"); options.addOption("v", "verbose", false, "Verbose output"); options.addOption("h", "help", false, "Shows some help"); options.addOption("n", "number", true, "FOR DEVELOPMENT MUST DELETE"); CommandLine line = parser.parse(options, argv); - String[] validSolrCommands = {COMMAND_IMPORT, COMMAND_CLEAN_CACHE}; - String[] validDatabaseCommands = {COMMAND_CLEAN_CACHE, COMMAND_COMPILE_ITEMS, COMMAND_ERASE_COMPILED_ITEMS}; - + String[] validSolrCommands = { COMMAND_IMPORT, COMMAND_CLEAN_CACHE }; + String[] validDatabaseCommands = { COMMAND_CLEAN_CACHE, COMMAND_COMPILE_ITEMS, + COMMAND_ERASE_COMPILED_ITEMS }; boolean solr = true; // Assuming solr by default solr = !("database").equals(configurationService.getProperty("oai.storage", "solr")); - boolean run = false; if (line.getArgs().length > 0) { if (solr) { @@ -607,10 +620,7 @@ public class XOAI { if (COMMAND_IMPORT.equals(command)) { ctx = new Context(Context.Mode.READ_ONLY); - XOAI indexer = new XOAI(ctx, - line.hasOption('o'), - line.hasOption('c'), - line.hasOption('v')); + XOAI indexer = new XOAI(ctx, line.hasOption('o'), line.hasOption('c'), line.hasOption('v')); applicationContext.getAutowireCapableBeanFactory().autowireBean(indexer); @@ -635,8 +645,7 @@ public class XOAI { } System.out.println("OAI 2.0 manager action ended. It took " - + ((System.currentTimeMillis() - start) / 1000) - + " seconds."); + + ((System.currentTimeMillis() - start) / 1000) + " seconds."); } else { usage(); } @@ -688,7 +697,7 @@ public class XOAI { private static void usage() { boolean solr = true; // Assuming solr by default - solr = !("database").equals(configurationService.getProperty("oai.storage","solr")); + solr = !("database").equals(configurationService.getProperty("oai.storage", "solr")); if (solr) { System.out.println("OAI Manager Script"); diff --git a/dspace-oai/src/main/java/org/dspace/xoai/controller/DSpaceOAIDataProvider.java b/dspace-oai/src/main/java/org/dspace/xoai/controller/DSpaceOAIDataProvider.java index 212f1e3406..379f2fa181 100644 --- a/dspace-oai/src/main/java/org/dspace/xoai/controller/DSpaceOAIDataProvider.java +++ b/dspace-oai/src/main/java/org/dspace/xoai/controller/DSpaceOAIDataProvider.java @@ -72,7 +72,12 @@ public class DSpaceOAIDataProvider { private DSpaceResumptionTokenFormatter resumptionTokenFormat = new DSpaceResumptionTokenFormatter(); - @RequestMapping({"", "/"}) + @RequestMapping("") + public void index(HttpServletResponse response, HttpServletRequest request) throws IOException { + response.sendRedirect(request.getRequestURI() + "/"); + } + + @RequestMapping({"/"}) public String indexAction(HttpServletResponse response, Model model) throws ServletException { try { XOAIManager manager = xoaiManagerResolver.getManager(); diff --git a/dspace-oai/src/main/java/org/dspace/xoai/services/impl/resources/DSpaceResourceResolver.java b/dspace-oai/src/main/java/org/dspace/xoai/services/impl/resources/DSpaceResourceResolver.java index 26dd976495..e67e9c56bd 100644 --- a/dspace-oai/src/main/java/org/dspace/xoai/services/impl/resources/DSpaceResourceResolver.java +++ b/dspace-oai/src/main/java/org/dspace/xoai/services/impl/resources/DSpaceResourceResolver.java @@ -22,6 +22,7 @@ import org.dspace.services.ConfigurationService; import org.dspace.services.factory.DSpaceServicesFactory; public class DSpaceResourceResolver implements ResourceResolver { + // Requires usage of Saxon as OAI-PMH uses some XSLT 2 functions private static final TransformerFactory transformerFactory = TransformerFactory .newInstance("net.sf.saxon.TransformerFactoryImpl", null); diff --git a/dspace-oai/src/test/java/org/dspace/xoai/tests/stylesheets/AbstractXSLTest.java b/dspace-oai/src/test/java/org/dspace/xoai/tests/stylesheets/AbstractXSLTest.java index 6fab56b526..42dbed04b6 100644 --- a/dspace-oai/src/test/java/org/dspace/xoai/tests/stylesheets/AbstractXSLTest.java +++ b/dspace-oai/src/test/java/org/dspace/xoai/tests/stylesheets/AbstractXSLTest.java @@ -19,6 +19,7 @@ import javax.xml.transform.stream.StreamSource; import org.apache.commons.io.IOUtils; public abstract class AbstractXSLTest { + // Requires usage of Saxon as OAI-PMH uses some XSLT 2 functions private static final TransformerFactory factory = TransformerFactory .newInstance("net.sf.saxon.TransformerFactoryImpl", null); diff --git a/dspace-rdf/pom.xml b/dspace-rdf/pom.xml index 8a25dece7e..6dbe3f3466 100644 --- a/dspace-rdf/pom.xml +++ b/dspace-rdf/pom.xml @@ -9,7 +9,7 @@ org.dspace dspace-parent - 7.2 + 7.5-SNAPSHOT .. diff --git a/dspace-rest/pom.xml b/dspace-rest/pom.xml index c60383384f..f4df15ab1c 100644 --- a/dspace-rest/pom.xml +++ b/dspace-rest/pom.xml @@ -3,7 +3,7 @@ org.dspace dspace-rest war - 7.2 + 7.5-SNAPSHOT DSpace (Deprecated) REST Webapp DSpace RESTful Web Services API. NOTE: this REST API is DEPRECATED. Please consider using the REST API in the dspace-server-webapp instead! @@ -12,14 +12,13 @@ org.dspace dspace-parent - 7.2 + 7.5-SNAPSHOT .. ${basedir}/.. - 5.3.10.RELEASE @@ -63,45 +62,11 @@ org.glassfish.jersey.media jersey-media-json-jackson ${jersey.version} - - - com.fasterxml.jackson.core - jackson-module-jaxb-annotations - - - com.fasterxml.jackson.jaxrs - jackson-jaxrs-base - - - com.fasterxml.jackson.jaxrs - jackson-jaxrs-json-provider - - - com.fasterxml.jackson.module - jackson-module-jaxb-annotations - - - - - - com.fasterxml.jackson.core - jackson-annotations - ${jackson.version} - com.fasterxml.jackson.jaxrs - jackson-jaxrs-base - ${jackson.version} - - - com.fasterxml.jackson.jaxrs - jackson-jaxrs-json-provider - ${jackson.version} - - - com.fasterxml.jackson.module - jackson-module-jaxb-annotations - ${jackson.version} + org.glassfish.jersey.media + jersey-media-jaxb + ${jersey.version} @@ -156,11 +121,6 @@ jakarta.annotation jakarta.annotation-api - - - org.ow2.asm - asm-commons - @@ -193,30 +153,9 @@ ${spring-security.version} - - cglib - cglib - 2.2.2 - - - org.dspace dspace-api - - - com.fasterxml.jackson.core - jackson-core - - - com.fasterxml.jackson.core - jackson-databind - - - com.fasterxml.jackson.core - jackson-annotations - - diff --git a/dspace-rest/src/main/webapp/static/reports/restCollReport.js b/dspace-rest/src/main/webapp/static/reports/restCollReport.js index 1d1c04ae07..8d800a8edc 100644 --- a/dspace-rest/src/main/webapp/static/reports/restCollReport.js +++ b/dspace-rest/src/main/webapp/static/reports/restCollReport.js @@ -11,7 +11,7 @@ var CollReport = function() { //this.hasSorttable = function(){return true;} this.getLangSuffix = function(){ return "[en]"; - } + }; //Indicate if Password Authentication is supported //this.makeAuthLink = function(){return true;}; @@ -38,7 +38,7 @@ var CollReport = function() { icollection : "", ifilter : "", }; - } + }; this.getCurrentParameters = function(){ return { "show_fields[]" : this.myMetadataFields.getShowFields(), @@ -49,7 +49,7 @@ var CollReport = function() { icollection : $("#icollection").val(), ifilter : $("#ifilter").val(), }; - } + }; var self = this; this.init = function() { @@ -61,7 +61,7 @@ var CollReport = function() { collapsible: true, active: 2 }); - } + }; this.myAuth.callback = function(data) { self.createCollectionTable(); @@ -71,11 +71,11 @@ var CollReport = function() { $("#refresh-fields,#refresh-fields-bits").bind("click", function(){ self.drawItemTable($("#icollection").val(), $("#ifilter").val(), 0); }); - } + }; this.createCollectionTable = function() { var self = this; - var tbl = $(""); + var tbl = $("
    "); tbl.attr("id","table"); $("#report").replaceWith(tbl); @@ -93,7 +93,7 @@ var CollReport = function() { self.myHtmlUtil.makeTotalCol(thn); self.addCollections(); - } + }; this.addCollections = function() { var self = this; @@ -144,8 +144,6 @@ var CollReport = function() { self.myHtmlUtil.addTd(tr, parval).addClass("title comm"); self.myHtmlUtil.addTdAnchor(tr, coll.name, self.ROOTPATH + coll.handle).addClass("title"); - var td = self.myHtmlUtil.addTd(tr, "").addClass("num").addClass("link").addClass("numCount"); - td = self.myHtmlUtil.addTd(tr, "").addClass("num").addClass("numFiltered"); }; @@ -197,7 +195,7 @@ var CollReport = function() { $(".showCollections").attr("disabled", false); } }); - } + }; this.loadData = function() { self.spinner.spin($("h1")[0]); @@ -208,7 +206,7 @@ var CollReport = function() { $("#table tr.data").addClass("processing"); self.myFilters.filterString = self.myFilters.getFilterList(); self.doRow(0, self.THREADS, self.loadId); - } + }; this.doRow = function(row, threads, curLoadId) { if (self.loadId != curLoadId) return; @@ -285,14 +283,14 @@ var CollReport = function() { $("#table").addClass("sortable"); sorttable.makeSortable($("#table")[0]); } - } - + }; + this.totalFilters = function() { - var colcount = $("#table tr th").length; - for(var i=4; i= self.TOOBIG) { td.addClass("toobig"); - title+= "\nIt will take significant time to apply this filter to the entire collection." + title+= "\nIt will take significant time to apply this filter to the entire collection."; } td.attr("title", title); return false; @@ -359,7 +357,7 @@ var CollReport = function() { self.totalFilters(); } return true; - } + }; this.setCellCount = function(tr, cid, offset, isPartial, itemFilter) { var filterName = itemFilter["filter-name"]; @@ -391,7 +389,7 @@ var CollReport = function() { $("#ifilter").val(filterName); }); } - } + }; this.drawItemTable = function(cid, filter, offset) { @@ -433,7 +431,7 @@ var CollReport = function() { offset: offset, "show_fields[]" : fields, "show_fields_bits[]" : bitfields, - } + }; $.ajax({ url: "/rest/filtered-collections/"+cid, @@ -452,7 +450,6 @@ var CollReport = function() { self.myHtmlUtil.addTd(tr, item.name).addClass("ititle"); if (fields != null) { $.each(fields, function(index, field){ - var text = ""; var td = self.myHtmlUtil.addTd(tr, ""); $.each(item.metadata, function(mindex,mv){ if (mv.key == field) { @@ -493,7 +490,7 @@ var CollReport = function() { $("#itemResults").accordion("option", "active", self.IACCIDX_ITEM); } }); - } + }; //Ignore the first column containing a row number and the item handle this.exportCol = function(colnum, col) { @@ -503,8 +500,8 @@ var CollReport = function() { data += (colnum == 1) ? "" : ","; data += self.exportCell(col); return data; - } -} + }; +}; CollReport.prototype = Object.create(Report.prototype); $(document).ready(function(){ diff --git a/dspace-rest/src/main/webapp/static/reports/restQueryReport.js b/dspace-rest/src/main/webapp/static/reports/restQueryReport.js index 9a8297fb69..18e9a61d08 100644 --- a/dspace-rest/src/main/webapp/static/reports/restQueryReport.js +++ b/dspace-rest/src/main/webapp/static/reports/restQueryReport.js @@ -12,7 +12,7 @@ var QueryReport = function() { //this.hasSorttable = function(){return true;} this.getLangSuffix = function(){ return "[en]"; - } + }; //Indicate if Password Authentication is supported //this.makeAuthLink = function(){return true;}; @@ -31,7 +31,7 @@ var QueryReport = function() { "limit" : this.ITEM_LIMIT, "offset" : 0, }; - } + }; this.getCurrentParameters = function(){ var expand = "parentCollection,metadata"; if (this.myBitstreamFields.hasBitstreamFields()) { @@ -54,21 +54,20 @@ var QueryReport = function() { paramArr[paramArr.length] = $(this).val(); }); return params; - } + }; var self = this; this.init = function() { this.baseInit(); - var communitySelector = new CommunitySelector(this, $("#collSelector"), this.myReportParameters.params["collSel[]"]); - } + }; this.initMetadataFields = function() { this.myMetadataFields = new QueryableMetadataFields(self); this.myMetadataFields.load(); - } + }; this.myAuth.callback = function(data) { - $(".query-button").click(function(){self.runQuery();}) - } + $(".query-button").click(function(){self.runQuery();}); + }; this.runQuery = function() { this.spinner.spin($("body")[0]); @@ -93,7 +92,7 @@ var QueryReport = function() { $("button").not("#next,#prev").attr("disabled", false); } }); - } + }; this.drawItemFilterTable = function(data) { $("#itemtable").replaceWith($('
    ')); @@ -150,7 +149,7 @@ var QueryReport = function() { }); var fieldtext = self.myBitstreamFields.getKeyText(key, item, data.bitfields); for(var j=0; j"+fieldtext[j]+"")) + td.append($("
    "+fieldtext[j]+"
    ")); } } }); @@ -173,7 +172,7 @@ var QueryReport = function() { sorttable.makeSortable(itbl[0]); } $("#metadatadiv").accordion("option", "active", $("#metadatadiv > h3").length - 1); - } + }; //Ignore the first column containing a row number and the item handle, get handle for the collection this.exportCol = function(colnum, col) { @@ -189,8 +188,8 @@ var QueryReport = function() { } else { data += self.exportCell(col); } return data; - } -} + }; +}; QueryReport.prototype = Object.create(Report.prototype); $(document).ready(function(){ @@ -223,7 +222,7 @@ var QueryableMetadataFields = function(report) { self.initQueries(); report.spinner.stop(); $(".query-button").attr("disabled", false); - } + }; this.initQueries = function() { $("#predefselect") @@ -271,7 +270,7 @@ var QueryableMetadataFields = function(report) { self.drawFilterQuery("*","matches","^.*[^[:ascii:]].*$"); } }); - } + }; this.drawFilterQuery = function(pField, pOp, pVal) { var div = $("