diff --git a/.codecov.yml b/.codecov.yml new file mode 100644 index 0000000000..3dba42ef37 --- /dev/null +++ b/.codecov.yml @@ -0,0 +1,29 @@ +# DSpace configuration for Codecov.io coverage reports +# These override the default YAML settings at +# https://docs.codecov.io/docs/codecov-yaml#section-default-yaml +# Can be validated via instructions at: +# https://docs.codecov.io/docs/codecov-yaml#validate-your-repository-yaml + +# Settings related to code coverage analysis +coverage: + status: + # Configuration for project-level checks. This checks how the PR changes overall coverage. + project: + default: + # For each PR, auto compare coverage to previous commit. + # Require that overall (project) coverage does NOT drop more than 0.5% + target: auto + threshold: 0.5% + # Configuration for patch-level checks. This checks the relative coverage of the new PR code ONLY. + patch: + default: + # For each PR, make sure the coverage of the new code is within 1% of current overall coverage. + # We let 'patch' be more lenient as we only require *project* coverage to not drop significantly. + target: auto + threshold: 1% + +# Turn PR comments "off". This feature adds the code coverage summary as a +# comment on each PR. See https://docs.codecov.io/docs/pull-request-comments +# However, this same info is available from the Codecov checks in the PR's +# "Checks" tab in GitHub. So, the comment is unnecessary. +comment: false diff --git a/.github/ISSUE_TEMPLATE/bug_report.md b/.github/ISSUE_TEMPLATE/bug_report.md new file mode 100644 index 0000000000..9893d233e1 --- /dev/null +++ b/.github/ISSUE_TEMPLATE/bug_report.md @@ -0,0 +1,22 @@ +--- +name: Bug report +about: Create a report to help us improve +title: '' +labels: bug, needs triage +assignees: '' + +--- + +**Describe the bug** +A clear and concise description of what the bug is. Include the version(s) of DSpace where you've seen this problem. Link to examples if they are public. + +**To Reproduce** +Steps to reproduce the behavior: +1. Do this +2. Then this... + +**Expected behavior** +A clear and concise description of what you expected to happen. + +**Related work** +Link to any related tickets or PRs here. diff --git a/.github/ISSUE_TEMPLATE/feature_request.md b/.github/ISSUE_TEMPLATE/feature_request.md new file mode 100644 index 0000000000..34cc2c9e4f --- /dev/null +++ b/.github/ISSUE_TEMPLATE/feature_request.md @@ -0,0 +1,20 @@ +--- +name: Feature request +about: Suggest a new feature for this project +title: '' +labels: new feature, needs triage +assignees: '' + +--- + +**Is your feature request related to a problem? Please describe.** +A clear and concise description of what the problem is. Ex. I'm always frustrated when [...] + +**Describe the solution you'd like** +A clear and concise description of what you want to happen. + +**Describe alternatives or workarounds you've considered** +A clear and concise description of any alternative solutions or features you've considered. + +**Additional context** +Add any other context or screenshots about the feature request here. diff --git a/.github/disabled-workflows/pull_request_opened.yml b/.github/disabled-workflows/pull_request_opened.yml new file mode 100644 index 0000000000..0dc718c0b9 --- /dev/null +++ b/.github/disabled-workflows/pull_request_opened.yml @@ -0,0 +1,26 @@ +# This workflow runs whenever a new pull request is created +# TEMPORARILY DISABLED. Unfortunately this doesn't work for PRs created from forked repositories (which is how we tend to create PRs). +# There is no known workaround yet. See https://github.community/t/how-to-use-github-token-for-prs-from-forks/16818 +name: Pull Request opened + +# Only run for newly opened PRs against the "main" branch +on: + pull_request: + types: [opened] + branches: + - main + +jobs: + automation: + runs-on: ubuntu-latest + steps: + # Assign the PR to whomever created it. This is useful for visualizing assignments on project boards + # See https://github.com/marketplace/actions/pull-request-assigner + - name: Assign PR to creator + uses: thomaseizinger/assign-pr-creator-action@v1.0.0 + # Note, this authentication token is created automatically + # See: https://docs.github.com/en/actions/configuring-and-managing-workflows/authenticating-with-the-github_token + with: + repo-token: ${{ secrets.GITHUB_TOKEN }} + # Ignore errors. It is possible the PR was created by someone who cannot be assigned + continue-on-error: true diff --git a/.github/pull_request_template.md b/.github/pull_request_template.md index 849fbf93be..b11e3cd531 100644 --- a/.github/pull_request_template.md +++ b/.github/pull_request_template.md @@ -1,8 +1,7 @@ ## References -_Add references/links to any related tickets or PRs. These may include:_ -* Link to [JIRA](https://jira.lyrasis.org/projects/DS/summary) ticket(s), if any -* Link to [REST Contract](https://github.com/DSpace/Rest7Contract) or an open REST Contract PR, if any -* Link to [Angular issue or PR](https://github.com/DSpace/dspace-angular/issues) related to this PR, if any +_Add references/links to any related issues or PRs. These may include:_ +* Fixes #[issue-number] +* Related to [REST Contract](https://github.com/DSpace/Rest7Contract) ## Description Short summary of changes (1-2 sentences). @@ -20,11 +19,8 @@ List of changes in this PR: _This checklist provides a reminder of what we are going to look for when reviewing your PR. You need not complete this checklist prior to creating your PR (draft PRs are always welcome). If you are unsure about an item in the checklist, don't hesitate to ask. We're here to help!_ - [ ] My PR is small in size (e.g. less than 1,000 lines of code, not including comments & integration tests). Exceptions may be made if previously agreed upon. -- [ ] My PR passes Checkstyle validation based on the [Code Style Guide](https://wiki.lyrasis.org/display/DSPACE/Code+Style+Guide) +- [ ] My PR passes Checkstyle validation based on the [Code Style Guide](https://wiki.lyrasis.org/display/DSPACE/Code+Style+Guide). - [ ] My PR includes Javadoc for _all new (or modified) public methods and classes_. It also includes Javadoc for large or complex private methods. -- [ ] My PR passes all tests and includes new/updated Unit or Integration Tests for any bug fixes, improvements or new features. A few reminders about what constitutes good tests: - * Include tests for different user types, including: (1) Anonymous user, (2) Logged in user (non-admin), and (3) Administrator. - * Include tests for known error scenarios and error codes (e.g. `400 Bad Request`, `401 Unauthorized`, `403 Forbidden`, `404 Not Found`, etc) - * For bug fixes, include a test that reproduces the bug and proves it is fixed. For clarity, it may be useful to provide the test in a separate commit from the bug fix. -- [ ] If my PR includes new, third-party dependencies (in any `pom.xml`), I've made sure their licenses align with the [DSpace BSD License](https://github.com/DSpace/DSpace/blob/master/LICENSE) based on the [Licensing of Contributions](https://wiki.lyrasis.org/display/DSPACE/Code+Contribution+Guidelines#CodeContributionGuidelines-LicensingofContributions) documentation. +- [ ] My PR passes all tests and includes new/updated Unit or Integration Tests based on the [Code Testing Guide](https://wiki.lyrasis.org/display/DSPACE/Code+Testing+Guide). +- [ ] If my PR includes new, third-party dependencies (in any `pom.xml`), I've made sure their licenses align with the [DSpace BSD License](https://github.com/DSpace/DSpace/blob/main/LICENSE) based on the [Licensing of Contributions](https://wiki.lyrasis.org/display/DSPACE/Code+Contribution+Guidelines#CodeContributionGuidelines-LicensingofContributions) documentation. - [ ] If my PR modifies the REST API, I've linked to the REST Contract page (or open PR) related to this change. diff --git a/.github/workflows/issue_opened.yml b/.github/workflows/issue_opened.yml new file mode 100644 index 0000000000..3ccdd22a0d --- /dev/null +++ b/.github/workflows/issue_opened.yml @@ -0,0 +1,29 @@ +# This workflow runs whenever a new issue is created +name: Issue opened + +on: + issues: + types: [opened] + +jobs: + automation: + runs-on: ubuntu-latest + steps: + # Add the new issue to a project board, if it needs triage + # See https://github.com/marketplace/actions/create-project-card-action + - name: Add issue to project board + # Only add to project board if issue is flagged as "needs triage" or has no labels + # NOTE: By default we flag new issues as "needs triage" in our issue template + if: (contains(github.event.issue.labels.*.name, 'needs triage') || join(github.event.issue.labels.*.name) == '') + uses: technote-space/create-project-card-action@v1 + # Note, the authentication token below is an ORG level Secret. + # It must be created/recreated manually via a personal access token with "public_repo" and "admin:org" permissions + # See: https://docs.github.com/en/actions/configuring-and-managing-workflows/authenticating-with-the-github_token#permissions-for-the-github_token + # This is necessary because the "DSpace Backlog" project is an org level project (i.e. not repo specific) + with: + GITHUB_TOKEN: ${{ secrets.ORG_PROJECT_TOKEN }} + PROJECT: DSpace Backlog + COLUMN: Triage + CHECK_ORG_PROJECT: true + # Ignore errors. + continue-on-error: true diff --git a/.github/workflows/label_merge_conflicts.yml b/.github/workflows/label_merge_conflicts.yml new file mode 100644 index 0000000000..dcbab18f1b --- /dev/null +++ b/.github/workflows/label_merge_conflicts.yml @@ -0,0 +1,25 @@ +# This workflow checks open PRs for merge conflicts and labels them when conflicts are found +name: Check for merge conflicts + +# Run whenever the "main" branch is updated +# NOTE: This means merge conflicts are only checked for when a PR is merged to main. +on: + push: + branches: + - main + +jobs: + triage: + runs-on: ubuntu-latest + steps: + # See: https://github.com/mschilde/auto-label-merge-conflicts/ + - name: Auto-label PRs with merge conflicts + uses: mschilde/auto-label-merge-conflicts@v2.0 + # Add "merge conflict" label if a merge conflict is detected. Remove it when resolved. + # Note, the authentication token is created automatically + # See: https://docs.github.com/en/actions/configuring-and-managing-workflows/authenticating-with-the-github_token + with: + CONFLICT_LABEL_NAME: 'merge conflict' + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + # Ignore errors + continue-on-error: true diff --git a/.lgtm.yml b/.lgtm.yml new file mode 100644 index 0000000000..132de8a6de --- /dev/null +++ b/.lgtm.yml @@ -0,0 +1,9 @@ +# LGTM Settings (https://lgtm.com/) +# For reference, see https://lgtm.com/help/lgtm/lgtm.yml-configuration-file +# or template at https://lgtm.com/static/downloads/lgtm.template.yml + +extraction: + java: + index: + # Specify the Java version required to build the project + java_version: 11 diff --git a/.travis.yml b/.travis.yml index dfc4c31799..89cb443597 100644 --- a/.travis.yml +++ b/.travis.yml @@ -1,46 +1,55 @@ +# DSpace's Travis CI Configuration +# Builds: https://travis-ci.com/github/DSpace/DSpace +# Travis configuration guide/validation: https://config.travis-ci.com/explore language: java -sudo: false +# TODO: Upgrade to Bionic dist: trusty - -env: - # Give Maven 1GB of memory to work with - - MAVEN_OPTS=-Xmx1024M +os: linux jdk: # DS-3384 Oracle JDK has DocLint enabled by default. # Let's use this to catch any newly introduced DocLint issues. - oraclejdk11 -## Should we run into any problems with oraclejdk8 on Travis, we may try the following workaround. -## https://docs.travis-ci.com/user/languages/java#Testing-Against-Multiple-JDKs -## https://github.com/travis-ci/travis-ci/issues/3259#issuecomment-130860338 -#addons: -# apt: -# packages: -# - oracle-java8-installer +# Define global environment variables (shared across all jobs) +env: + global: + # Suppress all Maven "downloading" messages in Travis logs (see https://stackoverflow.com/a/35653426) + # This also slightly speeds builds in Travis, as there is less logging + - HIDE_MAVEN_DOWNLOADS="-Dorg.slf4j.simpleLogger.log.org.apache.maven.cli.transfer.Slf4jMavenTransferListener=warn" + # Give Maven 1GB of memory to work with + - MAVEN_OPTS="-Xmx1024M $HIDE_MAVEN_DOWNLOADS" + # Maven options which will skip ALL code validation checks. Includes skipping: + # - enforcer.skip => Skip maven-enforcer-plugin rules + # - checkstyle.skip => Skip all checkstyle checks by maven-checkstyle-plugin + # - license.skip => Skip all license header checks by license-maven-plugin + # - xml.skip => Skip all XML/XSLT validation by xml-maven-plugin + # (Useful for builds which don't need to repeat code checks) + - SKIP_CODE_CHECKS="-Denforcer.skip=true -Dcheckstyle.skip=true -Dlicense.skip=true -Dxml.skip=true" -before_install: - # Remove outdated settings.xml from Travis builds. Workaround for https://github.com/travis-ci/travis-ci/issues/4629 - - rm ~/.m2/settings.xml +# Create two jobs to run Unit & Integration tests in parallel. +# These jobs only differ in the TEST_FLAGS defined below, +# and otherwise share all the other configs in this file +jobs: + include: + - name: "Run Unit Tests & Check Code" + # NOTE: unit tests include deprecated REST API v6 (as it has unit tests) + env: TEST_FLAGS="-DskipUnitTests=false -Pdspace-rest" + - name: "Run Integration Tests" + # NOTE: skips code checks, as they are already done by Unit Test job + env: TEST_FLAGS="-DskipIntegrationTests=false $SKIP_CODE_CHECKS" -# Skip install stage, as we'll do it below -install: "echo 'Skipping install stage, dependencies will be downloaded during build and test stages.'" +# Skip 'install' process to save time. We build/install/test all at once in "script" below. +install: skip -# Build DSpace and run both Unit and Integration Tests -script: - # Summary of flags used (below): - # license:check => Validate all source code license headers - # -Dmaven.test.skip=false => Enable DSpace Unit Tests - # -DskipITs=false => Enable DSpace Integration Tests - # -Pdspace-rest => Enable optional dspace-rest module as part of build - # -P !assembly => Skip assembly of "dspace-installer" directory (as it can be memory intensive) - # -B => Maven batch/non-interactive mode (recommended for CI) - # -V => Display Maven version info before build - # -Dsurefire.rerunFailingTestsCount=2 => try again for flakey tests, and keep track of/report on number of retries - - "mvn clean install license:check -Dmaven.test.skip=false -DskipITs=false -Pdspace-rest -P !assembly -B -V -Dsurefire.rerunFailingTestsCount=2" +# Build DSpace and run configured tests (see 'jobs' above) +# Notes on flags used: +# -B => Maven batch/non-interactive mode (recommended for CI) +# -V => Display Maven version info before build +# -P-assembly => Disable build of dspace-installer in [src]/dspace/, as it can be memory intensive +# -Pcoverage-report => Enable aggregate code coverage report (across all modules) via JaCoCo +script: mvn install -B -V -P-assembly -Pcoverage-report $TEST_FLAGS -# After a successful build and test (see 'script'), send code coverage reports to coveralls.io -# These code coverage reports are generated by jacoco-maven-plugin (during test process above). -after_success: - # Run "verify", enabling the "coveralls" profile. This sends our reports to coveralls.io (see coveralls-maven-plugin) - - "cd dspace && mvn verify -P coveralls" +# After a successful build and test (see 'script'), send aggregate code coverage reports +# (generated by -Pcoverage-report above) to CodeCov.io +after_success: bash <(curl -s https://codecov.io/bash) diff --git a/Dockerfile b/Dockerfile index 006f32f28e..2dc3ee9bda 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,5 +1,5 @@ # This image will be published as dspace/dspace -# See https://github.com/DSpace/DSpace/tree/master/dspace/src/main/docker for usage details +# See https://github.com/DSpace/DSpace/tree/main/dspace/src/main/docker for usage details # # This version is JDK11 compatible # - tomcat:8-jdk11 diff --git a/Dockerfile.cli b/Dockerfile.cli index 116b251f2d..d4204ebdd0 100644 --- a/Dockerfile.cli +++ b/Dockerfile.cli @@ -1,5 +1,5 @@ # This image will be published as dspace/dspace-cli -# See https://github.com/DSpace/DSpace/tree/master/dspace/src/main/docker for usage details +# See https://github.com/DSpace/DSpace/tree/main/dspace/src/main/docker for usage details # # This version is JDK11 compatible # - openjdk:11 diff --git a/Dockerfile.test b/Dockerfile.test index 090f714e28..82ffdef177 100644 --- a/Dockerfile.test +++ b/Dockerfile.test @@ -1,5 +1,5 @@ # This image will be published as dspace/dspace -# See https://github.com/DSpace/DSpace/tree/master/dspace/src/main/docker for usage details +# See https://github.com/DSpace/DSpace/tree/main/dspace/src/main/docker for usage details # # This version is JDK11 compatible # - tomcat:8-jdk11 diff --git a/README.md b/README.md index 0e701bf140..2e6c0ad54e 100644 --- a/README.md +++ b/README.md @@ -1,24 +1,24 @@ # DSpace -[![Build Status](https://travis-ci.org/DSpace/DSpace.png?branch=master)](https://travis-ci.org/DSpace/DSpace) +[![Build Status](https://travis-ci.com/DSpace/DSpace.png?branch=main)](https://travis-ci.com/DSpace/DSpace) -[DSpace Documentation](https://wiki.duraspace.org/display/DSDOC/) | +[DSpace Documentation](https://wiki.lyrasis.org/display/DSDOC/) | [DSpace Releases](https://github.com/DSpace/DSpace/releases) | -[DSpace Wiki](https://wiki.duraspace.org/display/DSPACE/Home) | -[Support](https://wiki.duraspace.org/display/DSPACE/Support) +[DSpace Wiki](https://wiki.lyrasis.org/display/DSPACE/Home) | +[Support](https://wiki.lyrasis.org/display/DSPACE/Support) DSpace open source software is a turnkey repository application used by more than 2,000 organizations and institutions worldwide to provide durable access to digital resources. For more information, visit http://www.dspace.org/ *** -:warning: **Work on DSpace 7 has begun on our `master` branch.** This means that there is temporarily NO user interface on this `master` branch. DSpace 7 will feature a new, unified [Angular](https://angular.io/) user interface, along with an enhanced, rebuilt REST API. The latest status of this work can be found on the [DSpace 7 UI Working Group](https://wiki.duraspace.org/display/DSPACE/DSpace+7+UI+Working+Group) page. Additionally, the codebases can be found in the following places: - * DSpace 7 REST API work is occurring on the [`master` branch](https://github.com/DSpace/DSpace/tree/master/dspace-server-webapp) of this repository. - * The REST Contract is being documented at https://github.com/DSpace/Rest7Contract +:warning: **Work on DSpace 7 has begun on our `main` branch.** This means that there is NO user interface on this `main` branch. DSpace 7 will feature a new, unified [Angular](https://angular.io/) user interface, along with an enhanced, rebuilt REST API. The latest status of this work can be found on the [DSpace 7 Working Group](https://wiki.lyrasis.org/display/DSPACE/DSpace+7+Working+Group) page. Additionally, the codebases can be found in the following places: + * DSpace 7 REST API work is occurring on the [`main` branch](https://github.com/DSpace/DSpace/tree/main/dspace-server-webapp) of this repository. + * The REST Contract is at https://github.com/DSpace/Rest7Contract * DSpace 7 Angular UI work is occurring at https://github.com/DSpace/dspace-angular -**If you would like to get involved in our DSpace 7 development effort, we welcome new contributors.** Just join one of our meetings or get in touch via Slack. See the [DSpace 7 UI Working Group](https://wiki.duraspace.org/display/DSPACE/DSpace+7+UI+Working+Group) wiki page for more info. +**If you would like to get involved in our DSpace 7 development effort, we welcome new contributors.** Just join one of our meetings or get in touch via Slack. See the [DSpace 7 Working Group](https://wiki.lyrasis.org/display/DSPACE/DSpace+7+Working+Group) wiki page for more info. **If you are looking for the ongoing maintenance work for DSpace 6 (or prior releases)**, you can find that work on the corresponding maintenance branch (e.g. [`dspace-6_x`](https://github.com/DSpace/DSpace/tree/dspace-6_x)) in this repository. *** @@ -31,10 +31,10 @@ Past releases are all available via GitHub at https://github.com/DSpace/DSpace/r ## Documentation / Installation -Documentation for each release may be viewed online or downloaded via our [Documentation Wiki](https://wiki.duraspace.org/display/DSDOC/). +Documentation for each release may be viewed online or downloaded via our [Documentation Wiki](https://wiki.lyrasis.org/display/DSDOC/). The latest DSpace Installation instructions are available at: -https://wiki.duraspace.org/display/DSDOC6x/Installing+DSpace +https://wiki.lyrasis.org/display/DSDOC6x/Installing+DSpace Please be aware that, as a Java web application, DSpace requires a database (PostgreSQL or Oracle) and a servlet container (usually Tomcat) in order to function. @@ -49,14 +49,14 @@ DSpace is a community built and supported project. We do not have a centralized but have a dedicated group of volunteers who help us improve the software, documentation, resources, etc. We welcome contributions of any type. Here's a few basic guides that provide suggestions for contributing to DSpace: -* [How to Contribute to DSpace](https://wiki.duraspace.org/display/DSPACE/How+to+Contribute+to+DSpace): How to contribute in general (via code, documentation, bug reports, expertise, etc) -* [Code Contribution Guidelines](https://wiki.duraspace.org/display/DSPACE/Code+Contribution+Guidelines): How to give back code or contribute features, bug fixes, etc. -* [DSpace Community Advisory Team (DCAT)](https://wiki.duraspace.org/display/cmtygp/DSpace+Community+Advisory+Team): If you are not a developer, we also have an interest group specifically for repository managers. The DCAT group meets virtually, once a month, and sends open invitations to join their meetings via the [DCAT mailing list](https://groups.google.com/d/forum/DSpaceCommunityAdvisoryTeam). +* [How to Contribute to DSpace](https://wiki.lyrasis.org/display/DSPACE/How+to+Contribute+to+DSpace): How to contribute in general (via code, documentation, bug reports, expertise, etc) +* [Code Contribution Guidelines](https://wiki.lyrasis.org/display/DSPACE/Code+Contribution+Guidelines): How to give back code or contribute features, bug fixes, etc. +* [DSpace Community Advisory Team (DCAT)](https://wiki.lyrasis.org/display/cmtygp/DSpace+Community+Advisory+Team): If you are not a developer, we also have an interest group specifically for repository managers. The DCAT group meets virtually, once a month, and sends open invitations to join their meetings via the [DCAT mailing list](https://groups.google.com/d/forum/DSpaceCommunityAdvisoryTeam). -We also encourage GitHub Pull Requests (PRs) at any time. Please see our [Development with Git](https://wiki.duraspace.org/display/DSPACE/Development+with+Git) guide for more info. +We also encourage GitHub Pull Requests (PRs) at any time. Please see our [Development with Git](https://wiki.lyrasis.org/display/DSPACE/Development+with+Git) guide for more info. In addition, a listing of all known contributors to DSpace software can be -found online at: https://wiki.duraspace.org/display/DSPACE/DSpaceContributors +found online at: https://wiki.lyrasis.org/display/DSPACE/DSpaceContributors ## Getting Help @@ -64,12 +64,12 @@ DSpace provides public mailing lists where you can post questions or raise topic We welcome everyone to participate in these lists: * [dspace-community@googlegroups.com](https://groups.google.com/d/forum/dspace-community) : General discussion about DSpace platform, announcements, sharing of best practices -* [dspace-tech@googlegroups.com](https://groups.google.com/d/forum/dspace-tech) : Technical support mailing list. See also our guide for [How to troubleshoot an error](https://wiki.duraspace.org/display/DSPACE/Troubleshoot+an+error). +* [dspace-tech@googlegroups.com](https://groups.google.com/d/forum/dspace-tech) : Technical support mailing list. See also our guide for [How to troubleshoot an error](https://wiki.lyrasis.org/display/DSPACE/Troubleshoot+an+error). * [dspace-devel@googlegroups.com](https://groups.google.com/d/forum/dspace-devel) : Developers / Development mailing list Great Q&A is also available under the [DSpace tag on Stackoverflow](http://stackoverflow.com/questions/tagged/dspace) -Additional support options are listed at https://wiki.duraspace.org/display/DSPACE/Support +Additional support options are at https://wiki.lyrasis.org/display/DSPACE/Support DSpace also has an active service provider network. If you'd rather hire a service provider to install, upgrade, customize or host DSpace, then we recommend getting in touch with one of our @@ -77,47 +77,46 @@ install, upgrade, customize or host DSpace, then we recommend getting in touch w ## Issue Tracker -The DSpace Issue Tracker can be found at: https://jira.duraspace.org/projects/DS/summary +DSpace uses GitHub to track issues: +* Backend (REST API) issues: https://github.com/DSpace/DSpace/issues +* Frontend (User Interface) issues: https://github.com/DSpace/dspace-angular/issues ## Testing ### Running Tests By default, in DSpace, Unit Tests and Integration Tests are disabled. However, they are -run automatically by [Travis CI](https://travis-ci.org/DSpace/DSpace/) for all Pull Requests and code commits. +run automatically by [Travis CI](https://travis-ci.com/DSpace/DSpace/) for all Pull Requests and code commits. * How to run both Unit Tests (via `maven-surefire-plugin`) and Integration Tests (via `maven-failsafe-plugin`): ``` - # NOTE: while "mvn test" runs Unit Tests, - # Integration Tests only run for "verify" or "install" phases - mvn clean install -Dmaven.test.skip=false -DskipITs=false + mvn install -DskipUnitTests=false -DskipIntegrationTests=false ``` -* How to run just Unit Tests: +* How to run _only_ Unit Tests: ``` - mvn clean test -Dmaven.test.skip=false + mvn test -DskipUnitTests=false ``` * How to run a *single* Unit Test ``` # Run all tests in a specific test class - # NOTE: testClassName is just the class name, do not include package - mvn clean test -Dmaven.test.skip=false -Dtest=[testClassName] + # NOTE: failIfNoTests=false is required to skip tests in other modules + mvn test -DskipUnitTests=false -Dtest=[full.package.testClassName] -DfailIfNoTests=false # Run one test method in a specific test class - mvn clean test -Dmaven.test.skip=false -Dtest=[testClassName]#[testMethodName] + mvn test -DskipUnitTests=false -Dtest=[full.package.testClassName]#[testMethodName] -DfailIfNoTests=false ``` -* How to run Integration Tests (requires running Unit tests too) +* How to run _only_ Integration Tests ``` - mvn clean verify -Dmaven.test.skip=false -DskipITs=false + mvn install -DskipIntegrationTests=false ``` -* How to run a *single* Integration Test (requires running Unit tests too) +* How to run a *single* Integration Test ``` # Run all integration tests in a specific test class - # NOTE: Integration Tests only run for "verify" or "install" phases - # NOTE: testClassName is just the class name, do not include package - mvn clean verify -Dmaven.test.skip=false -DskipITs=false -Dit.test=[testClassName] + # NOTE: failIfNoTests=false is required to skip tests in other modules + mvn install -DskipIntegrationTests=false -Dit.test=[full.package.testClassName] -DfailIfNoTests=false # Run one test method in a specific test class - mvn clean verify -Dmaven.test.skip=false -DskipITs=false -Dit.test=[testClassName]#[testMethodName] + mvn install -DskipIntegrationTests=false -Dit.test=[full.package.testClassName]#[testMethodName] -DfailIfNoTests=false ``` * How to run only tests of a specific DSpace module ``` @@ -133,4 +132,4 @@ run automatically by [Travis CI](https://travis-ci.org/DSpace/DSpace/) for all P ## License DSpace source code is freely available under a standard [BSD 3-Clause license](https://opensource.org/licenses/BSD-3-Clause). -The full license is available at http://www.dspace.org/license/ +The full license is available in the [LICENSE](LICENSE) file or online at http://www.dspace.org/license/ diff --git a/dspace-api/pom.xml b/dspace-api/pom.xml index b016f1bff6..ced0f562bf 100644 --- a/dspace-api/pom.xml +++ b/dspace-api/pom.xml @@ -12,7 +12,7 @@ org.dspace dspace-parent - 7.0-SNAPSHOT + 7.0-beta5-SNAPSHOT .. @@ -98,20 +98,6 @@ - - - com.mycila - license-maven-plugin - - - **/src/test/resources/** - **/src/test/data/** - **/.gitignore - **/src/main/resources/rebel.xml - src/test/data/dspaceFolder/config/spiders/** - - - org.codehaus.mojo @@ -141,44 +127,82 @@ + + + org.codehaus.gmaven + groovy-maven-plugin + + + setproperty + initialize + + execute + + + + project.properties['agnostic.build.dir'] = project.build.directory.replace(File.separator, '/'); + log.info("Initializing Maven property 'agnostic.build.dir' to: {}", project.properties['agnostic.build.dir']); + + + + + + + + com.mycila + license-maven-plugin + + + src/test/resources/** + src/test/data/** + + src/main/resources/org/dspace/storage/rdbms/flywayupgrade/** + + + + - findbugs + spotbugs false - - org.codehaus.mojo - findbugs-maven-plugin + com.github.spotbugs + spotbugs-maven-plugin - + - test-environment + unit-test-environment false - maven.test.skip + skipUnitTests false - @@ -198,53 +222,16 @@ - setupTestEnvironment + setupUnitTestEnvironment generate-test-resources unpack - - setupIntegrationTestEnvironment - pre-integration-test - - unpack - - - - - org.codehaus.gmaven - groovy-maven-plugin - - - setproperty - initialize - - execute - - - - project.properties['agnostic.build.dir'] = project.build.directory.replace(File.separator, '/'); - log.info("Initializing Maven property 'agnostic.build.dir' to: {}", project.properties['agnostic.build.dir']); - - - - - - - + maven-surefire-plugin @@ -255,11 +242,56 @@ ${agnostic.build.dir}/testing/dspace/ true + ${agnostic.build.dir}/testing/dspace/solr/ + + + - + + + integration-test-environment + + false + + skipIntegrationTests + false + + + + + + + maven-dependency-plugin + + ${project.build.directory}/testing + + + org.dspace + dspace-parent + ${project.version} + zip + testEnvironment + + + + + + setupIntegrationTestEnvironment + pre-integration-test + + unpack + + + + + + maven-failsafe-plugin @@ -269,12 +301,12 @@ ${agnostic.build.dir}/testing/dspace/ true + ${agnostic.build.dir}/testing/dspace/solr/ - @@ -305,19 +337,25 @@ - org.dspace + net.handle handle - org.eclipse.jetty.aggregate - jetty-all + net.cnri + cnri-servlet-container + - javax.servlet - org.eclipse.jetty.orbit + org.ow2.asm + asm-commons + + + org.eclipse.jetty + jetty-server + org.dspace jargon @@ -331,6 +369,18 @@ apache-jena-libs pom + + + + org.glassfish.jersey.inject + jersey-hk2 + ${jersey.version} + + + + commons-cli + commons-cli + commons-codec commons-codec @@ -487,9 +537,164 @@ org.apache.solr - solr-cell + solr-solrj ${solr.client.version} + + + + org.apache.solr + solr-core + test + ${solr.client.version} + + + commons-cli + commons-cli + + + org.eclipse.jetty + jetty-continuation + + + org.eclipse.jetty + jetty-deploy + + + org.eclipse.jetty + jetty-http + + + org.eclipse.jetty + jetty-io + + + org.eclipse.jetty + jetty-jmx + + + org.eclipse.jetty + jetty-rewrite + + + org.eclipse.jetty + jetty-security + + + org.eclipse.jetty + jetty-server + + + org.eclipse.jetty + jetty-servlet + + + org.eclipse.jetty + jetty-servlets + + + org.eclipse.jetty + jetty-util + + + org.eclipse.jetty + jetty-webapp + + + org.eclipse.jetty + jetty-xml + + + + + org.apache.solr + solr-cell + + + + commons-cli + commons-cli + + + org.ow2.asm + asm-commons + + + org.bouncycastle + bcpkix-jdk15on + + + org.bouncycastle + bcprov-jdk15on + + + org.eclipse.jetty + jetty-xml + + + org.eclipse.jetty + jetty-http + + + org.eclipse.jetty + jetty-servlet + + + org.eclipse.jetty + jetty-webapp + + + org.eclipse.jetty + jetty-util + + + org.eclipse.jetty + jetty-deploy + + + org.eclipse.jetty + jetty-continuation + + + org.eclipse.jetty + jetty-servlets + + + org.eclipse.jetty + jetty-io + + + org.eclipse.jetty + jetty-security + + + + + org.apache.lucene + lucene-core + + + + org.apache.lucene + lucene-analyzers-icu + test + + + org.apache.lucene + lucene-analyzers-smartcn + test + + + org.apache.lucene + lucene-analyzers-stempel + test + + + org.apache.xmlbeans + xmlbeans + 2.6.0 + com.maxmind.geoip2 @@ -547,7 +752,7 @@ org.flywaydb flyway-core - 4.0.3 + 6.5.5 @@ -571,6 +776,7 @@ com.google.oauth-client google-oauth-client + com.google.code.findbugs @@ -580,6 +786,7 @@ com.google.code.findbugs annotations + joda-time joda-time @@ -670,7 +877,7 @@ org.xmlunit - xmlunit-matchers + xmlunit-core 2.6.3 test diff --git a/dspace-api/src/main/java/org/dspace/administer/CreateAdministrator.java b/dspace-api/src/main/java/org/dspace/administer/CreateAdministrator.java index a58691e251..983038c812 100644 --- a/dspace-api/src/main/java/org/dspace/administer/CreateAdministrator.java +++ b/dspace-api/src/main/java/org/dspace/administer/CreateAdministrator.java @@ -115,7 +115,7 @@ public final class CreateAdministrator { String lastName = null; char[] password1 = null; char[] password2 = null; - String language = I18nUtil.DEFAULTLOCALE.getLanguage(); + String language = I18nUtil.getDefaultLocale().getLanguage(); while (!dataOK) { System.out.print("E-mail address: "); diff --git a/dspace-api/src/main/java/org/dspace/app/bulkedit/DSpaceCSV.java b/dspace-api/src/main/java/org/dspace/app/bulkedit/DSpaceCSV.java index 55bb3fed4b..ad7824bebf 100644 --- a/dspace-api/src/main/java/org/dspace/app/bulkedit/DSpaceCSV.java +++ b/dspace-api/src/main/java/org/dspace/app/bulkedit/DSpaceCSV.java @@ -8,14 +8,10 @@ package org.dspace.app.bulkedit; import java.io.BufferedReader; -import java.io.BufferedWriter; -import java.io.File; -import java.io.FileInputStream; -import java.io.FileOutputStream; -import java.io.IOException; +import java.io.InputStream; import java.io.InputStreamReader; -import java.io.OutputStreamWriter; import java.io.Serializable; +import java.nio.charset.StandardCharsets; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; @@ -27,6 +23,7 @@ import java.util.UUID; import java.util.regex.Matcher; import java.util.regex.Pattern; +import org.apache.commons.io.IOUtils; import org.apache.commons.lang3.StringUtils; import org.dspace.authority.AuthorityValue; import org.dspace.authority.factory.AuthorityServiceFactory; @@ -141,18 +138,18 @@ public class DSpaceCSV implements Serializable { /** * Create a new instance, reading the lines in from file * - * @param f The file to read from + * @param inputStream the inputstream to read from * @param c The DSpace Context * @throws Exception thrown if there is an error reading or processing the file */ - public DSpaceCSV(File f, Context c) throws Exception { + public DSpaceCSV(InputStream inputStream, Context c) throws Exception { // Initialise the class init(); // Open the CSV file BufferedReader input = null; try { - input = new BufferedReader(new InputStreamReader(new FileInputStream(f), "UTF-8")); + input = new BufferedReader(new InputStreamReader(inputStream, StandardCharsets.UTF_8)); // Read the heading line String head = input.readLine(); @@ -623,21 +620,15 @@ public class DSpaceCSV implements Serializable { } /** - * Save the CSV file to the given filename - * - * @param filename The filename to save the CSV file to - * @throws IOException Thrown if an error occurs when writing the file + * Creates and returns an InputStream from the CSV Lines in this DSpaceCSV + * @return The InputStream created from the CSVLines in this DSpaceCSV */ - public final void save(String filename) throws IOException { - // Save the file - BufferedWriter out = new BufferedWriter( - new OutputStreamWriter( - new FileOutputStream(filename), "UTF-8")); + public InputStream getInputStream() { + StringBuilder stringBuilder = new StringBuilder(); for (String csvLine : getCSVLinesAsStringArray()) { - out.write(csvLine + "\n"); + stringBuilder.append(csvLine + "\n"); } - out.flush(); - out.close(); + return IOUtils.toInputStream(stringBuilder.toString(), StandardCharsets.UTF_8); } /** diff --git a/dspace-api/src/main/java/org/dspace/app/bulkedit/MetadataExport.java b/dspace-api/src/main/java/org/dspace/app/bulkedit/MetadataExport.java index bc015ef5e0..3332440f06 100644 --- a/dspace-api/src/main/java/org/dspace/app/bulkedit/MetadataExport.java +++ b/dspace-api/src/main/java/org/dspace/app/bulkedit/MetadataExport.java @@ -8,271 +8,107 @@ package org.dspace.app.bulkedit; import java.sql.SQLException; -import java.util.ArrayList; -import java.util.Iterator; -import java.util.List; -import com.google.common.collect.Iterators; -import org.apache.commons.cli.CommandLine; -import org.apache.commons.cli.CommandLineParser; -import org.apache.commons.cli.HelpFormatter; -import org.apache.commons.cli.Options; import org.apache.commons.cli.ParseException; -import org.apache.commons.cli.PosixParser; -import org.dspace.content.Collection; -import org.dspace.content.Community; +import org.apache.commons.lang3.StringUtils; import org.dspace.content.DSpaceObject; -import org.dspace.content.Item; import org.dspace.content.factory.ContentServiceFactory; -import org.dspace.content.service.ItemService; -import org.dspace.core.Constants; +import org.dspace.content.service.MetadataDSpaceCsvExportService; import org.dspace.core.Context; +import org.dspace.eperson.factory.EPersonServiceFactory; +import org.dspace.eperson.service.EPersonService; import org.dspace.handle.factory.HandleServiceFactory; +import org.dspace.scripts.DSpaceRunnable; +import org.dspace.utils.DSpace; /** * Metadata exporter to allow the batch export of metadata into a file * * @author Stuart Lewis */ -public class MetadataExport { - /** - * The items to export - */ - protected Iterator toExport; +public class MetadataExport extends DSpaceRunnable { - protected ItemService itemService; + private boolean help = false; + private String filename = null; + private String handle = null; + private boolean exportAllMetadata = false; + private boolean exportAllItems = false; - protected Context context; + private static final String EXPORT_CSV = "exportCSV"; - /** - * Whether to export all metadata, or just normally edited metadata - */ - protected boolean exportAll; + private MetadataDSpaceCsvExportService metadataDSpaceCsvExportService = new DSpace().getServiceManager() + .getServicesByType(MetadataDSpaceCsvExportService.class).get(0); - protected MetadataExport() { - itemService = ContentServiceFactory.getInstance().getItemService(); - } + private EPersonService ePersonService = EPersonServiceFactory.getInstance().getEPersonService(); - /** - * Set up a new metadata export - * - * @param c The Context - * @param toExport The ItemIterator of items to export - * @param exportAll whether to export all metadata or not (include handle, provenance etc) - */ - public MetadataExport(Context c, Iterator toExport, boolean exportAll) { - itemService = ContentServiceFactory.getInstance().getItemService(); - - // Store the export settings - this.toExport = toExport; - this.exportAll = exportAll; - this.context = c; - } - - /** - * Method to export a community (and sub-communities and collections) - * - * @param c The Context - * @param toExport The Community to export - * @param exportAll whether to export all metadata or not (include handle, provenance etc) - */ - public MetadataExport(Context c, Community toExport, boolean exportAll) { - itemService = ContentServiceFactory.getInstance().getItemService(); + @Override + public void internalRun() throws Exception { + if (help) { + logHelpInfo(); + printHelp(); + return; + } + Context context = new Context(); + context.turnOffAuthorisationSystem(); try { - // Try to export the community - this.toExport = buildFromCommunity(c, toExport, 0); - this.exportAll = exportAll; - this.context = c; - } catch (SQLException sqle) { - // Something went wrong... - System.err.println("Error running exporter:"); - sqle.printStackTrace(System.err); - System.exit(1); + context.setCurrentUser(ePersonService.find(context, this.getEpersonIdentifier())); + } catch (SQLException e) { + handler.handleException(e); } + DSpaceCSV dSpaceCSV = metadataDSpaceCsvExportService + .handleExport(context, exportAllItems, exportAllMetadata, handle, + handler); + handler.writeFilestream(context, filename, dSpaceCSV.getInputStream(), EXPORT_CSV); + context.restoreAuthSystemState(); + context.complete(); } - /** - * Build an array list of item ids that are in a community (include sub-communities and collections) - * - * @param context DSpace context - * @param community The community to build from - * @param indent How many spaces to use when writing out the names of items added - * @return The list of item ids - * @throws SQLException if database error - */ - protected Iterator buildFromCommunity(Context context, Community community, int indent) - throws SQLException { - // Add all the collections - List collections = community.getCollections(); - Iterator result = null; - for (Collection collection : collections) { - for (int i = 0; i < indent; i++) { - System.out.print(" "); - } - - Iterator items = itemService.findByCollection(context, collection); - result = addItemsToResult(result, items); - - } - // Add all the sub-communities - List communities = community.getSubcommunities(); - for (Community subCommunity : communities) { - for (int i = 0; i < indent; i++) { - System.out.print(" "); - } - Iterator items = buildFromCommunity(context, subCommunity, indent + 1); - result = addItemsToResult(result, items); - } - - return result; + protected void logHelpInfo() { + handler.logInfo("\nfull export: metadata-export"); + handler.logInfo("partial export: metadata-export -i handle"); } - private Iterator addItemsToResult(Iterator result, Iterator items) { - if (result == null) { - result = items; - } else { - result = Iterators.concat(result, items); - } - - return result; + @Override + public MetadataExportScriptConfiguration getScriptConfiguration() { + return new DSpace().getServiceManager().getServiceByName("metadata-export", + MetadataExportScriptConfiguration.class); } - /** - * Run the export - * - * @return the exported CSV lines - */ - public DSpaceCSV export() { + @Override + public void setup() throws ParseException { + + if (commandLine.hasOption('h')) { + help = true; + return; + } + + if (!commandLine.hasOption('i')) { + exportAllItems = true; + } + handle = commandLine.getOptionValue('i'); + filename = getFileNameForExportFile(); + + exportAllMetadata = commandLine.hasOption('a'); + + } + + protected String getFileNameForExportFile() throws ParseException { + Context context = new Context(); try { - Context.Mode originalMode = context.getCurrentMode(); - context.setMode(Context.Mode.READ_ONLY); - - // Process each item - DSpaceCSV csv = new DSpaceCSV(exportAll); - while (toExport.hasNext()) { - Item item = toExport.next(); - csv.addItem(item); - context.uncacheEntity(item); - } - - context.setMode(originalMode); - // Return the results - return csv; - } catch (Exception e) { - // Something went wrong... - System.err.println("Error exporting to CSV:"); - e.printStackTrace(); - return null; - } - } - - /** - * Print the help message - * - * @param options The command line options the user gave - * @param exitCode the system exit code to use - */ - private static void printHelp(Options options, int exitCode) { - // print the help message - HelpFormatter myhelp = new HelpFormatter(); - myhelp.printHelp("MetadataExport\n", options); - System.out.println("\nfull export: metadataexport -f filename"); - System.out.println("partial export: metadataexport -i handle -f filename"); - System.exit(exitCode); - } - - /** - * main method to run the metadata exporter - * - * @param argv the command line arguments given - * @throws Exception if error occurs - */ - public static void main(String[] argv) throws Exception { - // Create an options object and populate it - CommandLineParser parser = new PosixParser(); - - Options options = new Options(); - - options.addOption("i", "id", true, "ID or handle of thing to export (item, collection, or community)"); - options.addOption("f", "file", true, "destination where you want file written"); - options.addOption("a", "all", false, - "include all metadata fields that are not normally changed (e.g. provenance)"); - options.addOption("h", "help", false, "help"); - - CommandLine line = null; - - try { - line = parser.parse(options, argv); - } catch (ParseException pe) { - System.err.println("Error with commands."); - printHelp(options, 1); - System.exit(0); - } - - if (line.hasOption('h')) { - printHelp(options, 0); - } - - // Check a filename is given - if (!line.hasOption('f')) { - System.err.println("Required parameter -f missing!"); - printHelp(options, 1); - } - String filename = line.getOptionValue('f'); - - // Create a context - Context c = new Context(Context.Mode.READ_ONLY); - c.turnOffAuthorisationSystem(); - - // The things we'll export - Iterator toExport = null; - MetadataExport exporter = null; - - // Export everything? - boolean exportAll = line.hasOption('a'); - - ContentServiceFactory contentServiceFactory = ContentServiceFactory.getInstance(); - // Check we have an item OK - ItemService itemService = contentServiceFactory.getItemService(); - if (!line.hasOption('i')) { - System.out.println("Exporting whole repository WARNING: May take some time!"); - exporter = new MetadataExport(c, itemService.findAll(c), exportAll); - } else { - String handle = line.getOptionValue('i'); - DSpaceObject dso = HandleServiceFactory.getInstance().getHandleService().resolveToObject(c, handle); - if (dso == null) { - System.err.println("Item '" + handle + "' does not resolve to an item in your repository!"); - printHelp(options, 1); - } - - if (dso.getType() == Constants.ITEM) { - System.out.println("Exporting item '" + dso.getName() + "' (" + handle + ")"); - List item = new ArrayList<>(); - item.add((Item) dso); - exporter = new MetadataExport(c, item.iterator(), exportAll); - } else if (dso.getType() == Constants.COLLECTION) { - System.out.println("Exporting collection '" + dso.getName() + "' (" + handle + ")"); - Collection collection = (Collection) dso; - toExport = itemService.findByCollection(c, collection); - exporter = new MetadataExport(c, toExport, exportAll); - } else if (dso.getType() == Constants.COMMUNITY) { - System.out.println("Exporting community '" + dso.getName() + "' (" + handle + ")"); - exporter = new MetadataExport(c, (Community) dso, exportAll); + DSpaceObject dso = null; + if (StringUtils.isNotBlank(handle)) { + dso = HandleServiceFactory.getInstance().getHandleService().resolveToObject(context, handle); } else { - System.err.println("Error identifying '" + handle + "'"); - System.exit(1); + dso = ContentServiceFactory.getInstance().getSiteService().findSite(context); } + if (dso == null) { + throw new ParseException("A handle got given that wasn't able to be parsed to a DSpaceObject"); + } + return dso.getID().toString() + ".csv"; + } catch (SQLException e) { + handler.handleException("Something went wrong trying to retrieve DSO for handle: " + handle, e); } - - // Perform the export - DSpaceCSV csv = exporter.export(); - - // Save the files to the file - csv.save(filename); - - // Finish off and tidy up - c.restoreAuthSystemState(); - c.complete(); + return null; } } diff --git a/dspace-api/src/main/java/org/dspace/app/bulkedit/MetadataExportCli.java b/dspace-api/src/main/java/org/dspace/app/bulkedit/MetadataExportCli.java new file mode 100644 index 0000000000..88ef66cbf6 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/app/bulkedit/MetadataExportCli.java @@ -0,0 +1,33 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.bulkedit; + +import org.apache.commons.cli.ParseException; + +public class MetadataExportCli extends MetadataExport { + + @Override + protected String getFileNameForExportFile() { + return commandLine.getOptionValue('f'); + } + + @Override + public void setup() throws ParseException { + super.setup(); + // Check a filename is given + if (!commandLine.hasOption('f')) { + throw new ParseException("Required parameter -f missing!"); + } + } + + @Override + protected void logHelpInfo() { + handler.logInfo("\nfull export: metadata-export -f filename"); + handler.logInfo("partial export: metadata-export -i handle -f filename"); + } +} diff --git a/dspace-api/src/main/java/org/dspace/app/bulkedit/MetadataExportCliScriptConfiguration.java b/dspace-api/src/main/java/org/dspace/app/bulkedit/MetadataExportCliScriptConfiguration.java new file mode 100644 index 0000000000..a7699fe9a4 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/app/bulkedit/MetadataExportCliScriptConfiguration.java @@ -0,0 +1,26 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.bulkedit; + +import java.io.OutputStream; + +import org.apache.commons.cli.Options; + +public class MetadataExportCliScriptConfiguration extends MetadataExportScriptConfiguration { + + + @Override + public Options getOptions() { + Options options = super.getOptions(); + options.addOption("f", "file", true, "destination where you want file written"); + options.getOption("f").setType(OutputStream .class); + options.getOption("f").setRequired(true); + super.options = options; + return options; + } +} diff --git a/dspace-api/src/main/java/org/dspace/app/bulkedit/MetadataExportScriptConfiguration.java b/dspace-api/src/main/java/org/dspace/app/bulkedit/MetadataExportScriptConfiguration.java new file mode 100644 index 0000000000..0c513c4667 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/app/bulkedit/MetadataExportScriptConfiguration.java @@ -0,0 +1,70 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.bulkedit; + +import java.sql.SQLException; + +import org.apache.commons.cli.Options; +import org.dspace.authorize.service.AuthorizeService; +import org.dspace.core.Context; +import org.dspace.scripts.configuration.ScriptConfiguration; +import org.springframework.beans.factory.annotation.Autowired; + +/** + * The {@link ScriptConfiguration} for the {@link MetadataExport} script + */ +public class MetadataExportScriptConfiguration extends ScriptConfiguration { + + @Autowired + private AuthorizeService authorizeService; + + private Class dspaceRunnableClass; + + @Override + public Class getDspaceRunnableClass() { + return dspaceRunnableClass; + } + + /** + * Generic setter for the dspaceRunnableClass + * @param dspaceRunnableClass The dspaceRunnableClass to be set on this MetadataExportScriptConfiguration + */ + @Override + public void setDspaceRunnableClass(Class dspaceRunnableClass) { + this.dspaceRunnableClass = dspaceRunnableClass; + } + + @Override + public boolean isAllowedToExecute(Context context) { + try { + return authorizeService.isAdmin(context); + } catch (SQLException e) { + throw new RuntimeException("SQLException occurred when checking if the current user is an admin", e); + } + } + + @Override + public Options getOptions() { + if (options == null) { + Options options = new Options(); + + options.addOption("i", "id", true, "ID or handle of thing to export (item, collection, or community)"); + options.getOption("i").setType(String.class); + options.addOption("a", "all", false, + "include all metadata fields that are not normally changed (e.g. provenance)"); + options.getOption("a").setType(boolean.class); + options.addOption("h", "help", false, "help"); + options.getOption("h").setType(boolean.class); + + + super.options = options; + } + return options; + } + +} diff --git a/dspace-api/src/main/java/org/dspace/app/bulkedit/MetadataImport.java b/dspace-api/src/main/java/org/dspace/app/bulkedit/MetadataImport.java index e8fff71cf4..67086c1536 100644 --- a/dspace-api/src/main/java/org/dspace/app/bulkedit/MetadataImport.java +++ b/dspace-api/src/main/java/org/dspace/app/bulkedit/MetadataImport.java @@ -7,10 +7,8 @@ */ package org.dspace.app.bulkedit; -import java.io.BufferedReader; -import java.io.File; import java.io.IOException; -import java.io.InputStreamReader; +import java.io.InputStream; import java.sql.SQLException; import java.util.ArrayList; import java.util.Enumeration; @@ -19,16 +17,12 @@ import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Map; +import java.util.Optional; import java.util.Set; import java.util.UUID; import javax.annotation.Nullable; -import org.apache.commons.cli.CommandLine; -import org.apache.commons.cli.CommandLineParser; -import org.apache.commons.cli.HelpFormatter; -import org.apache.commons.cli.Options; import org.apache.commons.cli.ParseException; -import org.apache.commons.cli.PosixParser; import org.apache.commons.lang3.StringUtils; import org.apache.logging.log4j.Logger; import org.dspace.authority.AuthorityValue; @@ -65,6 +59,10 @@ import org.dspace.eperson.EPerson; import org.dspace.eperson.factory.EPersonServiceFactory; import org.dspace.handle.factory.HandleServiceFactory; import org.dspace.handle.service.HandleService; +import org.dspace.scripts.DSpaceRunnable; +import org.dspace.scripts.handler.DSpaceRunnableHandler; +import org.dspace.utils.DSpace; +import org.dspace.workflow.WorkflowException; import org.dspace.workflow.WorkflowItem; import org.dspace.workflow.WorkflowService; import org.dspace.workflow.factory.WorkflowServiceFactory; @@ -74,11 +72,7 @@ import org.dspace.workflow.factory.WorkflowServiceFactory; * * @author Stuart Lewis */ -public class MetadataImport { - /** - * The Context - */ - Context c; +public class MetadataImport extends DSpaceRunnable { /** * The DSpaceCSV object we're processing @@ -95,10 +89,6 @@ public class MetadataImport { */ protected static Set authorityControlled; - static { - setAuthorizedMetadataFields(); - } - /** * The prefix of the authority controlled field */ @@ -143,45 +133,200 @@ public class MetadataImport { */ protected Integer rowCount = 1; + private boolean useTemplate = false; + private String filename = null; + private boolean useWorkflow = false; + private boolean workflowNotify = false; + private boolean change = false; + private boolean help = false; + protected boolean validateOnly; + /** * Logger */ protected static final Logger log = org.apache.logging.log4j.LogManager.getLogger(MetadataImport.class); - protected final AuthorityValueService authorityValueService; - - protected final ItemService itemService; - protected final InstallItemService installItemService; - protected final CollectionService collectionService; - protected final HandleService handleService; - protected final WorkspaceItemService workspaceItemService; - protected final RelationshipTypeService relationshipTypeService; - protected final RelationshipService relationshipService; - protected final EntityTypeService entityTypeService; - protected final EntityService entityService; + protected ItemService itemService = ContentServiceFactory.getInstance().getItemService(); + protected InstallItemService installItemService = ContentServiceFactory.getInstance().getInstallItemService(); + protected CollectionService collectionService = ContentServiceFactory.getInstance().getCollectionService(); + protected HandleService handleService = HandleServiceFactory.getInstance().getHandleService(); + protected WorkspaceItemService workspaceItemService = ContentServiceFactory.getInstance().getWorkspaceItemService(); + protected RelationshipTypeService relationshipTypeService = ContentServiceFactory.getInstance() + .getRelationshipTypeService(); + protected RelationshipService relationshipService = ContentServiceFactory.getInstance().getRelationshipService(); + protected EntityTypeService entityTypeService = ContentServiceFactory.getInstance().getEntityTypeService(); + protected EntityService entityService = ContentServiceFactory.getInstance().getEntityService(); + protected AuthorityValueService authorityValueService = AuthorityServiceFactory.getInstance() + .getAuthorityValueService(); /** * Create an instance of the metadata importer. Requires a context and an array of CSV lines * to examine. * - * @param c The context * @param toImport An array of CSV lines to examine */ - public MetadataImport(Context c, DSpaceCSV toImport) { + public void initMetadataImport(DSpaceCSV toImport) { // Store the import settings - this.c = c; - csv = toImport; this.toImport = toImport.getCSVLines(); - installItemService = ContentServiceFactory.getInstance().getInstallItemService(); - itemService = ContentServiceFactory.getInstance().getItemService(); - collectionService = ContentServiceFactory.getInstance().getCollectionService(); - handleService = HandleServiceFactory.getInstance().getHandleService(); - authorityValueService = AuthorityServiceFactory.getInstance().getAuthorityValueService(); - workspaceItemService = ContentServiceFactory.getInstance().getWorkspaceItemService(); - relationshipService = ContentServiceFactory.getInstance().getRelationshipService(); - relationshipTypeService = ContentServiceFactory.getInstance().getRelationshipTypeService(); - entityTypeService = ContentServiceFactory.getInstance().getEntityTypeService(); - entityService = ContentServiceFactory.getInstance().getEntityService(); + } + + @Override + public void internalRun() throws Exception { + if (help) { + printHelp(); + return; + } + // Create a context + Context c = null; + c = new Context(); + c.turnOffAuthorisationSystem(); + + // Find the EPerson, assign to context + assignCurrentUserInContext(c); + + if (authorityControlled == null) { + setAuthorizedMetadataFields(); + } + // Read commandLines from the CSV file + try { + + Optional optionalFileStream = handler.getFileStream(c, filename); + if (optionalFileStream.isPresent()) { + csv = new DSpaceCSV(optionalFileStream.get(), c); + } else { + throw new IllegalArgumentException("Error reading file, the file couldn't be found for filename: " + + filename); + } + } catch (MetadataImportInvalidHeadingException miihe) { + throw miihe; + } catch (Exception e) { + throw new Exception("Error reading file: " + e.getMessage(), e); + } + + // Perform the first import - just highlight differences + initMetadataImport(csv); + List changes; + + if (!commandLine.hasOption('s') || validateOnly) { + // See what has changed + try { + changes = runImport(c, false, useWorkflow, workflowNotify, useTemplate); + } catch (MetadataImportException mie) { + throw mie; + } + + // Display the changes + int changeCounter = displayChanges(changes, false); + + // If there were changes, ask if we should execute them + if (!validateOnly && changeCounter > 0) { + try { + // Ask the user if they want to make the changes + handler.logInfo("\n" + changeCounter + " item(s) will be changed\n"); + change = determineChange(handler); + + } catch (IOException ioe) { + throw new IOException("Error: " + ioe.getMessage() + ", No changes have been made", ioe); + } + } else { + handler.logInfo("There were no changes detected"); + } + } else { + change = true; + } + + try { + // If required, make the change + if (change && !validateOnly) { + try { + // Make the changes + changes = runImport(c, true, useWorkflow, workflowNotify, useTemplate); + } catch (MetadataImportException mie) { + throw mie; + } + + // Display the changes + displayChanges(changes, true); + } + + // Finsh off and tidy up + c.restoreAuthSystemState(); + c.complete(); + } catch (Exception e) { + c.abort(); + throw new Exception( + "Error committing changes to database: " + e.getMessage() + ", aborting most recent changes", e); + } + + } + + protected void assignCurrentUserInContext(Context context) throws ParseException { + UUID uuid = getEpersonIdentifier(); + if (uuid != null) { + try { + EPerson ePerson = EPersonServiceFactory.getInstance().getEPersonService().find(context, uuid); + context.setCurrentUser(ePerson); + } catch (SQLException e) { + log.error("Something went wrong trying to fetch the eperson for uuid: " + uuid, e); + } + } + } + + /** + * This method determines whether the changes should be applied or not. This is default set to true for the REST + * script as we don't want to interact with the caller. This will be overwritten in the CLI script to ask for + * confirmation + * @param handler Applicable DSpaceRunnableHandler + * @return boolean indicating the value + * @throws IOException If something goes wrong + */ + protected boolean determineChange(DSpaceRunnableHandler handler) throws IOException { + return true; + } + + @Override + public MetadataImportScriptConfiguration getScriptConfiguration() { + return new DSpace().getServiceManager().getServiceByName("metadata-import", + MetadataImportScriptConfiguration.class); + } + + + public void setup() throws ParseException { + useTemplate = false; + filename = null; + useWorkflow = false; + workflowNotify = false; + + if (commandLine.hasOption('h')) { + help = true; + return; + } + + // Check a filename is given + if (!commandLine.hasOption('f')) { + throw new ParseException("Required parameter -f missing!"); + } + filename = commandLine.getOptionValue('f'); + + // Option to apply template to new items + if (commandLine.hasOption('t')) { + useTemplate = true; + } + + // Options for workflows, and workflow notifications for new items + if (commandLine.hasOption('w')) { + useWorkflow = true; + if (commandLine.hasOption('n')) { + workflowNotify = true; + } + } else if (commandLine.hasOption('n')) { + throw new ParseException( + "Invalid option 'n': (notify) can only be specified with the 'w' (workflow) option."); + } + validateOnly = commandLine.hasOption('v'); + + // Is this a silent run? + change = false; } /** @@ -195,281 +340,277 @@ public class MetadataImport { * @return An array of BulkEditChange elements representing the items that have changed * @throws MetadataImportException if something goes wrong */ - public List runImport(boolean change, + public List runImport(Context c, boolean change, boolean useWorkflow, boolean workflowNotify, - boolean useTemplate) throws MetadataImportException { + boolean useTemplate) + throws MetadataImportException, SQLException, AuthorizeException, WorkflowException, IOException { // Store the changes ArrayList changes = new ArrayList(); // Make the changes - try { - Context.Mode originalMode = c.getCurrentMode(); - c.setMode(Context.Mode.BATCH_EDIT); + Context.Mode originalMode = c.getCurrentMode(); + c.setMode(Context.Mode.BATCH_EDIT); - // Process each change - rowCount = 1; - for (DSpaceCSVLine line : toImport) { - // Resolve target references to other items - populateRefAndRowMap(line, line.getID()); - line = resolveEntityRefs(line); - // Get the DSpace item to compare with - UUID id = line.getID(); + // Process each change + rowCount = 1; + for (DSpaceCSVLine line : toImport) { + // Resolve target references to other items + populateRefAndRowMap(line, line.getID()); + line = resolveEntityRefs(c, line); + // Get the DSpace item to compare with + UUID id = line.getID(); - // Is there an action column? - if (csv.hasActions() && (!"".equals(line.getAction())) && (id == null)) { - throw new MetadataImportException("'action' not allowed for new items!"); - } - - WorkspaceItem wsItem = null; - WorkflowItem wfItem = null; - Item item = null; - - // Is this an existing item? - if (id != null) { - // Get the item - item = itemService.find(c, id); - if (item == null) { - throw new MetadataImportException("Unknown item ID " + id); - } - - // Record changes - BulkEditChange whatHasChanged = new BulkEditChange(item); - - // Has it moved collection? - List collections = line.get("collection"); - if (collections != null) { - // Sanity check we're not orphaning it - if (collections.size() == 0) { - throw new MetadataImportException("Missing collection from item " + item.getHandle()); - } - List actualCollections = item.getCollections(); - compare(item, collections, actualCollections, whatHasChanged, change); - } - - // Iterate through each metadata element in the csv line - for (String md : line.keys()) { - // Get the values we already have - if (!"id".equals(md)) { - // Get the values from the CSV - String[] fromCSV = line.get(md).toArray(new String[line.get(md).size()]); - // Remove authority unless the md is not authority controlled - if (!isAuthorityControlledField(md)) { - for (int i = 0; i < fromCSV.length; i++) { - int pos = fromCSV[i].indexOf(csv.getAuthoritySeparator()); - if (pos > -1) { - fromCSV[i] = fromCSV[i].substring(0, pos); - } - } - } - // Compare - compareAndUpdate(item, fromCSV, change, md, whatHasChanged, line); - } - } - - if (csv.hasActions()) { - // Perform the action - String action = line.getAction(); - if ("".equals(action)) { - // Do nothing - } else if ("expunge".equals(action)) { - // Does the configuration allow deletes? - if (!ConfigurationManager.getBooleanProperty("bulkedit", "allowexpunge", false)) { - throw new MetadataImportException("'expunge' action denied by configuration"); - } - - // Remove the item - - if (change) { - itemService.delete(c, item); - } - - whatHasChanged.setDeleted(); - } else if ("withdraw".equals(action)) { - // Withdraw the item - if (!item.isWithdrawn()) { - if (change) { - itemService.withdraw(c, item); - } - whatHasChanged.setWithdrawn(); - } - } else if ("reinstate".equals(action)) { - // Reinstate the item - if (item.isWithdrawn()) { - if (change) { - itemService.reinstate(c, item); - } - whatHasChanged.setReinstated(); - } - } else { - // Unknown action! - throw new MetadataImportException("Unknown action: " + action); - } - } - - // Only record if changes have been made - if (whatHasChanged.hasChanges()) { - changes.add(whatHasChanged); - } - } else { - // This is marked as a new item, so no need to compare - - // First check a user is set, otherwise this can't happen - if (c.getCurrentUser() == null) { - throw new MetadataImportException( - "When adding new items, a user must be specified with the -e option"); - } - - // Iterate through each metadata element in the csv line - BulkEditChange whatHasChanged = new BulkEditChange(); - for (String md : line.keys()) { - // Get the values we already have - if (!"id".equals(md) && !"rowName".equals(md)) { - // Get the values from the CSV - String[] fromCSV = line.get(md).toArray(new String[line.get(md).size()]); - - // Remove authority unless the md is not authority controlled - if (!isAuthorityControlledField(md)) { - for (int i = 0; i < fromCSV.length; i++) { - int pos = fromCSV[i].indexOf(csv.getAuthoritySeparator()); - if (pos > -1) { - fromCSV[i] = fromCSV[i].substring(0, pos); - } - } - } - - // Add all the values from the CSV line - add(fromCSV, md, whatHasChanged); - } - } - - // Check it has an owning collection - List collections = line.get("collection"); - if (collections == null) { - throw new MetadataImportException( - "New items must have a 'collection' assigned in the form of a handle"); - } - - // Check collections are really collections - ArrayList check = new ArrayList(); - Collection collection; - for (String handle : collections) { - try { - // Resolve the handle to the collection - collection = (Collection) handleService.resolveToObject(c, handle); - - // Check it resolved OK - if (collection == null) { - throw new MetadataImportException( - "'" + handle + "' is not a Collection! You must specify a valid collection for " + - "new items"); - } - - // Check for duplicate - if (check.contains(collection)) { - throw new MetadataImportException( - "Duplicate collection assignment detected in new item! " + handle); - } else { - check.add(collection); - } - } catch (Exception ex) { - throw new MetadataImportException( - "'" + handle + "' is not a Collection! You must specify a valid collection for new " + - "items", - ex); - } - } - - // Record the addition to collections - boolean first = true; - for (String handle : collections) { - Collection extra = (Collection) handleService.resolveToObject(c, handle); - if (first) { - whatHasChanged.setOwningCollection(extra); - } else { - whatHasChanged.registerNewMappedCollection(extra); - } - first = false; - } - - // Create the new item? - if (change) { - // Create the item - String collectionHandle = line.get("collection").get(0); - collection = (Collection) handleService.resolveToObject(c, collectionHandle); - wsItem = workspaceItemService.create(c, collection, useTemplate); - item = wsItem.getItem(); - - // Add the metadata to the item - for (BulkEditMetadataValue dcv : whatHasChanged.getAdds()) { - if (!StringUtils.equals(dcv.getSchema(), MetadataSchemaEnum.RELATION.getName())) { - itemService.addMetadata(c, item, dcv.getSchema(), - dcv.getElement(), - dcv.getQualifier(), - dcv.getLanguage(), - dcv.getValue(), - dcv.getAuthority(), - dcv.getConfidence()); - } - } - //Add relations after all metadata has been processed - for (BulkEditMetadataValue dcv : whatHasChanged.getAdds()) { - if (StringUtils.equals(dcv.getSchema(), MetadataSchemaEnum.RELATION.getName())) { - addRelationship(c, item, dcv.getElement(), dcv.getValue()); - } - } - - - // Should the workflow be used? - if (useWorkflow) { - WorkflowService workflowService = WorkflowServiceFactory.getInstance().getWorkflowService(); - if (workflowNotify) { - wfItem = workflowService.start(c, wsItem); - } else { - wfItem = workflowService.startWithoutNotify(c, wsItem); - } - } else { - // Install the item - installItemService.installItem(c, wsItem); - } - - // Add to extra collections - if (line.get("collection").size() > 0) { - for (int i = 1; i < collections.size(); i++) { - String handle = collections.get(i); - Collection extra = (Collection) handleService.resolveToObject(c, handle); - collectionService.addItem(c, extra, item); - } - } - - whatHasChanged.setItem(item); - } - - // Record the changes - changes.add(whatHasChanged); - } - - if (change) { - //only clear cache if changes have been made. - c.uncacheEntity(wsItem); - c.uncacheEntity(wfItem); - c.uncacheEntity(item); - } - populateRefAndRowMap(line, item == null ? null : item.getID()); - // keep track of current rows processed - rowCount++; + // Is there an action column? + if (csv.hasActions() && (!"".equals(line.getAction())) && (id == null)) { + throw new MetadataImportException("'action' not allowed for new items!"); } - c.setMode(originalMode); - } catch (MetadataImportException mie) { - throw mie; - } catch (Exception e) { - e.printStackTrace(); + WorkspaceItem wsItem = null; + WorkflowItem wfItem = null; + Item item = null; + + // Is this an existing item? + if (id != null) { + // Get the item + item = itemService.find(c, id); + if (item == null) { + throw new MetadataImportException("Unknown item ID " + id); + } + + // Record changes + BulkEditChange whatHasChanged = new BulkEditChange(item); + + // Has it moved collection? + List collections = line.get("collection"); + if (collections != null) { + // Sanity check we're not orphaning it + if (collections.size() == 0) { + throw new MetadataImportException("Missing collection from item " + item.getHandle()); + } + List actualCollections = item.getCollections(); + compare(c, item, collections, actualCollections, whatHasChanged, change); + } + + // Iterate through each metadata element in the csv line + for (String md : line.keys()) { + // Get the values we already have + if (!"id".equals(md)) { + // Get the values from the CSV + String[] fromCSV = line.get(md).toArray(new String[line.get(md).size()]); + // Remove authority unless the md is not authority controlled + if (!isAuthorityControlledField(md)) { + for (int i = 0; i < fromCSV.length; i++) { + int pos = fromCSV[i].indexOf(csv.getAuthoritySeparator()); + if (pos > -1) { + fromCSV[i] = fromCSV[i].substring(0, pos); + } + } + } + // Compare + compareAndUpdate(c, item, fromCSV, change, md, whatHasChanged, line); + } + } + + if (csv.hasActions()) { + // Perform the action + String action = line.getAction(); + if ("".equals(action)) { + // Do nothing + } else if ("expunge".equals(action)) { + // Does the configuration allow deletes? + if (!ConfigurationManager.getBooleanProperty("bulkedit", "allowexpunge", false)) { + throw new MetadataImportException("'expunge' action denied by configuration"); + } + + // Remove the item + + if (change) { + itemService.delete(c, item); + } + + whatHasChanged.setDeleted(); + } else if ("withdraw".equals(action)) { + // Withdraw the item + if (!item.isWithdrawn()) { + if (change) { + itemService.withdraw(c, item); + } + whatHasChanged.setWithdrawn(); + } + } else if ("reinstate".equals(action)) { + // Reinstate the item + if (item.isWithdrawn()) { + if (change) { + itemService.reinstate(c, item); + } + whatHasChanged.setReinstated(); + } + } else { + // Unknown action! + throw new MetadataImportException("Unknown action: " + action); + } + } + + // Only record if changes have been made + if (whatHasChanged.hasChanges()) { + changes.add(whatHasChanged); + } + } else { + // This is marked as a new item, so no need to compare + + // First check a user is set, otherwise this can't happen + if (c.getCurrentUser() == null) { + throw new MetadataImportException( + "When adding new items, a user must be specified with the -e option"); + } + + // Iterate through each metadata element in the csv line + BulkEditChange whatHasChanged = new BulkEditChange(); + for (String md : line.keys()) { + // Get the values we already have + if (!"id".equals(md) && !"rowName".equals(md)) { + // Get the values from the CSV + String[] fromCSV = line.get(md).toArray(new String[line.get(md).size()]); + + // Remove authority unless the md is not authority controlled + if (!isAuthorityControlledField(md)) { + for (int i = 0; i < fromCSV.length; i++) { + int pos = fromCSV[i].indexOf(csv.getAuthoritySeparator()); + if (pos > -1) { + fromCSV[i] = fromCSV[i].substring(0, pos); + } + } + } + + // Add all the values from the CSV line + add(c, fromCSV, md, whatHasChanged); + } + } + + // Check it has an owning collection + List collections = line.get("collection"); + if (collections == null) { + throw new MetadataImportException( + "New items must have a 'collection' assigned in the form of a handle"); + } + + // Check collections are really collections + ArrayList check = new ArrayList(); + Collection collection; + for (String handle : collections) { + try { + // Resolve the handle to the collection + collection = (Collection) handleService.resolveToObject(c, handle); + + // Check it resolved OK + if (collection == null) { + throw new MetadataImportException( + "'" + handle + "' is not a Collection! You must specify a valid collection for " + + "new items"); + } + + // Check for duplicate + if (check.contains(collection)) { + throw new MetadataImportException( + "Duplicate collection assignment detected in new item! " + handle); + } else { + check.add(collection); + } + } catch (Exception ex) { + throw new MetadataImportException( + "'" + handle + "' is not a Collection! You must specify a valid collection for new " + + "items", + ex); + } + } + + // Record the addition to collections + boolean first = true; + for (String handle : collections) { + Collection extra = (Collection) handleService.resolveToObject(c, handle); + if (first) { + whatHasChanged.setOwningCollection(extra); + } else { + whatHasChanged.registerNewMappedCollection(extra); + } + first = false; + } + + // Create the new item? + if (change) { + // Create the item + String collectionHandle = line.get("collection").get(0); + collection = (Collection) handleService.resolveToObject(c, collectionHandle); + wsItem = workspaceItemService.create(c, collection, useTemplate); + item = wsItem.getItem(); + + // Add the metadata to the item + for (BulkEditMetadataValue dcv : whatHasChanged.getAdds()) { + if (!StringUtils.equals(dcv.getSchema(), MetadataSchemaEnum.RELATION.getName())) { + itemService.addMetadata(c, item, dcv.getSchema(), + dcv.getElement(), + dcv.getQualifier(), + dcv.getLanguage(), + dcv.getValue(), + dcv.getAuthority(), + dcv.getConfidence()); + } + } + //Add relations after all metadata has been processed + for (BulkEditMetadataValue dcv : whatHasChanged.getAdds()) { + if (StringUtils.equals(dcv.getSchema(), MetadataSchemaEnum.RELATION.getName())) { + addRelationship(c, item, dcv.getElement(), dcv.getValue()); + } + } + + + // Should the workflow be used? + if (useWorkflow) { + WorkflowService workflowService = WorkflowServiceFactory.getInstance().getWorkflowService(); + if (workflowNotify) { + wfItem = workflowService.start(c, wsItem); + } else { + wfItem = workflowService.startWithoutNotify(c, wsItem); + } + } else { + // Install the item + installItemService.installItem(c, wsItem); + } + + // Add to extra collections + if (line.get("collection").size() > 0) { + for (int i = 1; i < collections.size(); i++) { + String handle = collections.get(i); + Collection extra = (Collection) handleService.resolveToObject(c, handle); + collectionService.addItem(c, extra, item); + } + } + + whatHasChanged.setItem(item); + } + + // Record the changes + changes.add(whatHasChanged); + } + + if (change) { + //only clear cache if changes have been made. + c.uncacheEntity(wsItem); + c.uncacheEntity(wfItem); + c.uncacheEntity(item); + } + populateRefAndRowMap(line, item == null ? null : item.getID()); + // keep track of current rows processed + rowCount++; } + c.setMode(originalMode); + + // Return the changes - if (!change ) { - validateExpressedRelations(); + if (!change) { + validateExpressedRelations(c); } return changes; } @@ -487,7 +628,7 @@ public class MetadataImport { * @throws AuthorizeException if there is an authorization problem with permissions * @throws MetadataImportException custom exception for error handling within metadataimport */ - protected void compareAndUpdate(Item item, String[] fromCSV, boolean change, + protected void compareAndUpdate(Context c, Item item, String[] fromCSV, boolean change, String md, BulkEditChange changes, DSpaceCSVLine line) throws SQLException, AuthorizeException, MetadataImportException { // Log what metadata element we're looking at @@ -565,7 +706,7 @@ public class MetadataImport { // Compare from current->csv for (int v = 0; v < fromCSV.length; v++) { String value = fromCSV[v]; - BulkEditMetadataValue dcv = getBulkEditValueFromCSV(language, schema, element, qualifier, value, + BulkEditMetadataValue dcv = getBulkEditValueFromCSV(c, language, schema, element, qualifier, value, fromAuthority); if (fromAuthority != null) { value = dcv.getValue() + csv.getAuthoritySeparator() + dcv.getAuthority() + csv @@ -694,8 +835,8 @@ public class MetadataImport { * @throws AuthorizeException If something goes wrong */ private void addRelationships(Context c, Item item, String typeName, List values) - throws SQLException, AuthorizeException, - MetadataImportException { + throws SQLException, AuthorizeException, + MetadataImportException { for (String value : values) { addRelationship(c, item, typeName, value); } @@ -746,22 +887,23 @@ public class MetadataImport { Entity relationEntity = getEntity(c, value); // Get relationship type of entity and item String relationEntityRelationshipType = itemService.getMetadata(relationEntity.getItem(), - "relationship", "type", - null, Item.ANY).get(0).getValue(); + "relationship", "type", + null, Item.ANY).get(0).getValue(); String itemRelationshipType = itemService.getMetadata(item, "relationship", "type", - null, Item.ANY).get(0).getValue(); + null, Item.ANY).get(0).getValue(); // Get the correct RelationshipType based on typeName List relType = relationshipTypeService.findByLeftwardOrRightwardTypeName(c, typeName); RelationshipType foundRelationshipType = matchRelationshipType(relType, - relationEntityRelationshipType, itemRelationshipType, typeName); + relationEntityRelationshipType, + itemRelationshipType, typeName); if (foundRelationshipType == null) { throw new MetadataImportException("Error on CSV row " + rowCount + ":" + "\n" + - "No Relationship type found for:\n" + - "Target type: " + relationEntityRelationshipType + "\n" + - "Origin referer type: " + itemRelationshipType + "\n" + - "with typeName: " + typeName); + "No Relationship type found for:\n" + + "Target type: " + relationEntityRelationshipType + "\n" + + "Origin referer type: " + itemRelationshipType + "\n" + + "with typeName: " + typeName); } if (foundRelationshipType.getLeftwardType().equalsIgnoreCase(typeName)) { @@ -783,7 +925,7 @@ public class MetadataImport { int leftPlace = relationshipService.findNextLeftPlaceByLeftItem(c, leftItem); int rightPlace = relationshipService.findNextRightPlaceByRightItem(c, rightItem); Relationship persistedRelationship = relationshipService.create(c, leftItem, rightItem, - foundRelationshipType, leftPlace, rightPlace); + foundRelationshipType, leftPlace, rightPlace); relationshipService.update(c, persistedRelationship); } @@ -801,7 +943,7 @@ public class MetadataImport { * @throws IOException Can be thrown when moving items in communities * @throws MetadataImportException If something goes wrong to be reported back to the user */ - protected void compare(Item item, + protected void compare(Context c, Item item, List collections, List actualCollections, BulkEditChange bechange, @@ -898,8 +1040,8 @@ public class MetadataImport { // Remove from old owned collection (if still a member) if (bechange.getOldOwningCollection() != null) { boolean found = false; - for (Collection c : item.getCollections()) { - if (c.getID().equals(bechange.getOldOwningCollection().getID())) { + for (Collection collection : item.getCollections()) { + if (collection.getID().equals(bechange.getOldOwningCollection().getID())) { found = true; } } @@ -926,7 +1068,7 @@ public class MetadataImport { * @throws SQLException when an SQL error has occurred (querying DSpace) * @throws AuthorizeException If the user can't make the changes */ - protected void add(String[] fromCSV, String md, BulkEditChange changes) + protected void add(Context c, String[] fromCSV, String md, BulkEditChange changes) throws SQLException, AuthorizeException { // Don't add owning collection or action if (("collection".equals(md)) || ("action".equals(md))) { @@ -964,7 +1106,7 @@ public class MetadataImport { // Add all the values for (String value : fromCSV) { - BulkEditMetadataValue dcv = getBulkEditValueFromCSV(language, schema, element, qualifier, value, + BulkEditMetadataValue dcv = getBulkEditValueFromCSV(c, language, schema, element, qualifier, value, fromAuthority); if (fromAuthority != null) { value = dcv.getValue() + csv.getAuthoritySeparator() + dcv.getAuthority() + csv @@ -978,7 +1120,7 @@ public class MetadataImport { } } - protected BulkEditMetadataValue getBulkEditValueFromCSV(String language, String schema, String element, + protected BulkEditMetadataValue getBulkEditValueFromCSV(Context c, String language, String schema, String element, String qualifier, String value, AuthorityValue fromAuthority) { // Look to see if it should be removed @@ -1057,20 +1199,6 @@ public class MetadataImport { return in.replaceAll("\r\n", "").replaceAll("\n", "").trim(); } - /** - * Print the help message - * - * @param options The command line options the user gave - * @param exitCode the system exit code to use - */ - private static void printHelp(Options options, int exitCode) { - // print the help message - HelpFormatter myhelp = new HelpFormatter(); - myhelp.printHelp("MetatadataImport\n", options); - System.out.println("\nmetadataimport: MetadataImport -f filename"); - System.exit(exitCode); - } - /** * Display the changes that have been detected, or that have been made * @@ -1078,7 +1206,7 @@ public class MetadataImport { * @param changed Whether or not the changes have been made * @return The number of items that have changed */ - private static int displayChanges(List changes, boolean changed) { + private int displayChanges(List changes, boolean changed) { // Display the changes int changeCounter = 0; for (BulkEditChange change : changes) { @@ -1093,20 +1221,18 @@ public class MetadataImport { (change.isDeleted()) || (change.isWithdrawn()) || (change.isReinstated())) { // Show the item Item i = change.getItem(); - - System.out.println("-----------------------------------------------------------"); + handler.logInfo("-----------------------------------------------------------"); if (!change.isNewItem()) { - System.out.println("Changes for item: " + i.getID() + " (" + i.getHandle() + ")"); + handler.logInfo("Changes for item: " + i.getID() + " (" + i.getHandle() + ")"); } else { - System.out.print("New item: "); + handler.logInfo("New item: "); if (i != null) { if (i.getHandle() != null) { - System.out.print(i.getID() + " (" + i.getHandle() + ")"); + handler.logInfo(i.getID() + " (" + i.getHandle() + ")"); } else { - System.out.print(i.getID() + " (in workflow)"); + handler.logInfo(i.getID() + " (in workflow)"); } } - System.out.println(); } changeCounter++; } @@ -1114,23 +1240,23 @@ public class MetadataImport { // Show actions if (change.isDeleted()) { if (changed) { - System.out.println(" - EXPUNGED!"); + handler.logInfo(" - EXPUNGED!"); } else { - System.out.println(" - EXPUNGE!"); + handler.logInfo(" - EXPUNGE!"); } } if (change.isWithdrawn()) { if (changed) { - System.out.println(" - WITHDRAWN!"); + handler.logInfo(" - WITHDRAWN!"); } else { - System.out.println(" - WITHDRAW!"); + handler.logInfo(" - WITHDRAW!"); } } if (change.isReinstated()) { if (changed) { - System.out.println(" - REINSTATED!"); + handler.logInfo(" - REINSTATED!"); } else { - System.out.println(" - REINSTATE!"); + handler.logInfo(" - REINSTATE!"); } } @@ -1140,11 +1266,11 @@ public class MetadataImport { String cHandle = c.getHandle(); String cName = c.getName(); if (!changed) { - System.out.print(" + New owning collection (" + cHandle + "): "); + handler.logInfo(" + New owning collection (" + cHandle + "): "); } else { - System.out.print(" + New owning collection (" + cHandle + "): "); + handler.logInfo(" + New owning collection (" + cHandle + "): "); } - System.out.println(cName); + handler.logInfo(cName); } c = change.getOldOwningCollection(); @@ -1152,11 +1278,11 @@ public class MetadataImport { String cHandle = c.getHandle(); String cName = c.getName(); if (!changed) { - System.out.print(" + Old owning collection (" + cHandle + "): "); + handler.logInfo(" + Old owning collection (" + cHandle + "): "); } else { - System.out.print(" + Old owning collection (" + cHandle + "): "); + handler.logInfo(" + Old owning collection (" + cHandle + "): "); } - System.out.println(cName); + handler.logInfo(cName); } } @@ -1165,11 +1291,11 @@ public class MetadataImport { String cHandle = c.getHandle(); String cName = c.getName(); if (!changed) { - System.out.print(" + Map to collection (" + cHandle + "): "); + handler.logInfo(" + Map to collection (" + cHandle + "): "); } else { - System.out.print(" + Mapped to collection (" + cHandle + "): "); + handler.logInfo(" + Mapped to collection (" + cHandle + "): "); } - System.out.println(cName); + handler.logInfo(cName); } // Show old mapped collections @@ -1177,11 +1303,11 @@ public class MetadataImport { String cHandle = c.getHandle(); String cName = c.getName(); if (!changed) { - System.out.print(" + Un-map from collection (" + cHandle + "): "); + handler.logInfo(" + Un-map from collection (" + cHandle + "): "); } else { - System.out.print(" + Un-mapped from collection (" + cHandle + "): "); + handler.logInfo(" + Un-mapped from collection (" + cHandle + "): "); } - System.out.println(cName); + handler.logInfo(cName); } // Show additions @@ -1194,16 +1320,15 @@ public class MetadataImport { md += "[" + metadataValue.getLanguage() + "]"; } if (!changed) { - System.out.print(" + Add (" + md + "): "); + handler.logInfo(" + Add (" + md + "): "); } else { - System.out.print(" + Added (" + md + "): "); + handler.logInfo(" + Added (" + md + "): "); } - System.out.print(metadataValue.getValue()); + handler.logInfo(metadataValue.getValue()); if (isAuthorityControlledField(md)) { - System.out.print(", authority = " + metadataValue.getAuthority()); - System.out.print(", confidence = " + metadataValue.getConfidence()); + handler.logInfo(", authority = " + metadataValue.getAuthority()); + handler.logInfo(", confidence = " + metadataValue.getConfidence()); } - System.out.println(""); } // Show removals @@ -1216,16 +1341,15 @@ public class MetadataImport { md += "[" + metadataValue.getLanguage() + "]"; } if (!changed) { - System.out.print(" - Remove (" + md + "): "); + handler.logInfo(" - Remove (" + md + "): "); } else { - System.out.print(" - Removed (" + md + "): "); + handler.logInfo(" - Removed (" + md + "): "); } - System.out.print(metadataValue.getValue()); + handler.logInfo(metadataValue.getValue()); if (isAuthorityControlledField(md)) { - System.out.print(", authority = " + metadataValue.getAuthority()); - System.out.print(", confidence = " + metadataValue.getConfidence()); + handler.logInfo(", authority = " + metadataValue.getAuthority()); + handler.logInfo(", confidence = " + metadataValue.getConfidence()); } - System.out.println(""); } } return changeCounter; @@ -1243,7 +1367,7 @@ public class MetadataImport { /** * Set authority controlled fields */ - private static void setAuthorizedMetadataFields() { + private void setAuthorizedMetadataFields() { authorityControlled = new HashSet(); Enumeration propertyNames = ConfigurationManager.getProperties().propertyNames(); while (propertyNames.hasMoreElements()) { @@ -1255,191 +1379,6 @@ public class MetadataImport { } } - /** - * main method to run the metadata exporter - * - * @param argv the command line arguments given - */ - public static void main(String[] argv) { - // Create an options object and populate it - CommandLineParser parser = new PosixParser(); - - Options options = new Options(); - - options.addOption("f", "file", true, "source file"); - options.addOption("e", "email", true, "email address or user id of user (required if adding new items)"); - options.addOption("s", "silent", false, - "silent operation - doesn't request confirmation of changes USE WITH CAUTION"); - options.addOption("w", "workflow", false, "workflow - when adding new items, use collection workflow"); - options.addOption("n", "notify", false, - "notify - when adding new items using a workflow, send notification emails"); - options.addOption("t", "template", false, - "template - when adding new items, use the collection template (if it exists)"); - options.addOption("v", "validate-only", false, - "validate - just validate the csv, don't run the import"); - options.addOption("h", "help", false, "help"); - - // Parse the command line arguments - CommandLine line; - try { - line = parser.parse(options, argv); - } catch (ParseException pe) { - System.err.println("Error parsing command line arguments: " + pe.getMessage()); - System.exit(1); - return; - } - - if (line.hasOption('h')) { - printHelp(options, 0); - } - - // Check a filename is given - if (!line.hasOption('f')) { - System.err.println("Required parameter -f missing!"); - printHelp(options, 1); - } - String filename = line.getOptionValue('f'); - - // Option to apply template to new items - boolean useTemplate = false; - if (line.hasOption('t')) { - useTemplate = true; - } - - // Options for workflows, and workflow notifications for new items - boolean useWorkflow = false; - boolean workflowNotify = false; - if (line.hasOption('w')) { - useWorkflow = true; - if (line.hasOption('n')) { - workflowNotify = true; - } - } else if (line.hasOption('n')) { - System.err.println("Invalid option 'n': (notify) can only be specified with the 'w' (workflow) option."); - System.exit(1); - } - - // Create a context - Context c; - try { - c = new Context(); - c.turnOffAuthorisationSystem(); - } catch (Exception e) { - System.err.println("Unable to create a new DSpace Context: " + e.getMessage()); - System.exit(1); - return; - } - - // Find the EPerson, assign to context - try { - if (line.hasOption('e')) { - EPerson eperson; - String e = line.getOptionValue('e'); - if (e.indexOf('@') != -1) { - eperson = EPersonServiceFactory.getInstance().getEPersonService().findByEmail(c, e); - } else { - eperson = EPersonServiceFactory.getInstance().getEPersonService().find(c, UUID.fromString(e)); - } - - if (eperson == null) { - System.out.println("Error, eperson cannot be found: " + e); - System.exit(1); - } - c.setCurrentUser(eperson); - } - } catch (Exception e) { - System.err.println("Unable to find DSpace user: " + e.getMessage()); - System.exit(1); - return; - } - - // Is this a silent run? - boolean change = false; - - // Read lines from the CSV file - DSpaceCSV csv; - try { - csv = new DSpaceCSV(new File(filename), c); - } catch (MetadataImportInvalidHeadingException miihe) { - System.err.println(miihe.getMessage()); - System.exit(1); - return; - } catch (Exception e) { - System.err.println("Error reading file: " + e.getMessage()); - System.exit(1); - return; - } - - // Perform the first import - just highlight differences - MetadataImport importer = new MetadataImport(c, csv); - List changes; - - boolean validateOnly = line.hasOption('v'); - - if (!line.hasOption('s') || validateOnly) { - // See what has changed - try { - changes = importer.runImport(false, useWorkflow, workflowNotify, useTemplate); - } catch (MetadataImportException mie) { - System.err.println("Error: " + mie.getMessage()); - System.exit(1); - return; - } - - // Display the changes - int changeCounter = displayChanges(changes, false); - - // If there were changes, ask if we should execute them - if (!validateOnly && changeCounter > 0) { - try { - // Ask the user if they want to make the changes - System.out.println("\n" + changeCounter + " item(s) will be changed\n"); - System.out.print("Do you want to make these changes? [y/n] "); - String yn = (new BufferedReader(new InputStreamReader(System.in))).readLine(); - if ("y".equalsIgnoreCase(yn)) { - change = true; - } else { - System.out.println("No data has been changed."); - } - } catch (IOException ioe) { - System.err.println("Error: " + ioe.getMessage()); - System.err.println("No changes have been made"); - System.exit(1); - } - } else { - System.out.println("There were no changes detected"); - } - } else { - change = true; - } - - try { - // If required, make the change - if (change && !validateOnly) { - try { - // Make the changes - changes = importer.runImport(true, useWorkflow, workflowNotify, useTemplate); - } catch (MetadataImportException mie) { - System.err.println("Error: " + mie.getMessage()); - System.exit(1); - return; - } - - // Display the changes - displayChanges(changes, true); - } - - // Finsh off and tidy up - c.restoreAuthSystemState(); - c.complete(); - } catch (Exception e) { - c.abort(); - System.err.println("Error committing changes to database: " + e.getMessage()); - System.err.println("Aborting most recent changes."); - System.exit(1); - } - } - /** * Gets a copy of the given csv line with all entity target references resolved to UUID strings. * Keys being iterated over represent metadatafields or special columns to be processed. @@ -1448,7 +1387,7 @@ public class MetadataImport { * @return a copy, with all references resolved. * @throws MetadataImportException if there is an error resolving any entity target reference. */ - public DSpaceCSVLine resolveEntityRefs(DSpaceCSVLine line) throws MetadataImportException { + public DSpaceCSVLine resolveEntityRefs(Context c, DSpaceCSVLine line) throws MetadataImportException { DSpaceCSVLine newLine = new DSpaceCSVLine(line.getID()); UUID originId = evaluateOriginId(line.getID()); for (String key : line.keys()) { @@ -1503,7 +1442,7 @@ public class MetadataImport { originIds.add(originId); typeNames.put(relationField, originIds); } else { - ArrayList originIds = typeNames.get(relationField); + ArrayList originIds = typeNames.get(relationField); originIds.add(originId); typeNames.put(relationField, originIds); } @@ -1533,7 +1472,7 @@ public class MetadataImport { } for (String key : line.keys()) { if (key.contains(".") && !key.split("\\.")[0].equalsIgnoreCase("relation") || - key.equalsIgnoreCase("rowName")) { + key.equalsIgnoreCase("rowName")) { for (String value : line.get(key)) { String valueKey = key + ":" + value; Set rowNums = csvRefMap.get(valueKey); @@ -1575,20 +1514,20 @@ public class MetadataImport { try { return UUID.fromString(reference); } catch (IllegalArgumentException e) { - throw new MetadataImportException("Error in CSV row " + rowCount + ":\n" + - "Not a UUID or indirect entity reference: '" + reference + "'"); + throw new MetadataImportException("Error in CSV row " + rowCount + ":\n" + + "Not a UUID or indirect entity reference: '" + reference + "'"); } - } else if (!reference.startsWith("rowName:") ) { // Not a rowName ref; so it's a metadata value reference + } else if (!reference.startsWith("rowName:")) { // Not a rowName ref; so it's a metadata value reference MetadataValueService metadataValueService = ContentServiceFactory.getInstance().getMetadataValueService(); MetadataFieldService metadataFieldService = - ContentServiceFactory.getInstance().getMetadataFieldService(); + ContentServiceFactory.getInstance().getMetadataFieldService(); int i = reference.indexOf(":"); String mfValue = reference.substring(i + 1); String mf[] = reference.substring(0, i).split("\\."); if (mf.length < 2) { - throw new MetadataImportException("Error in CSV row " + rowCount + ":\n" + - "Bad metadata field in reference: '" + reference - + "' (expected syntax is schema.element[.qualifier])"); + throw new MetadataImportException("Error in CSV row " + rowCount + ":\n" + + "Bad metadata field in reference: '" + reference + + "' (expected syntax is schema.element[.qualifier])"); } String schema = mf[0]; String element = mf[1]; @@ -1600,13 +1539,13 @@ public class MetadataImport { MetadataValue mdvVal = mdv.next(); uuid = mdvVal.getDSpaceObject().getID(); if (mdv.hasNext()) { - throw new MetadataImportException("Error in CSV row " + rowCount + ":\n" + - "Ambiguous reference; multiple matches in db: " + reference); + throw new MetadataImportException("Error in CSV row " + rowCount + ":\n" + + "Ambiguous reference; multiple matches in db: " + reference); } } } catch (SQLException e) { - throw new MetadataImportException("Error in CSV row " + rowCount + ":\n" + - "Error looking up item by metadata reference: " + reference, e); + throw new MetadataImportException("Error in CSV row " + rowCount + ":\n" + + "Error looking up item by metadata reference: " + reference, e); } } // Lookup UUIDs that may have already been processed into the csvRefMap @@ -1614,24 +1553,25 @@ public class MetadataImport { // See getMatchingCSVUUIDs() for how the reference param is sourced from the csvRefMap Set csvUUIDs = getMatchingCSVUUIDs(reference); if (csvUUIDs.size() > 1) { - throw new MetadataImportException("Error in CSV row " + rowCount + ":\n" + - "Ambiguous reference; multiple matches in csv: " + reference); + throw new MetadataImportException("Error in CSV row " + rowCount + ":\n" + + "Ambiguous reference; multiple matches in csv: " + reference); } else if (csvUUIDs.size() == 1) { UUID csvUUID = csvUUIDs.iterator().next(); if (csvUUID.equals(uuid)) { return uuid; // one match from csv and db (same item) } else if (uuid != null) { - throw new MetadataImportException("Error in CSV row " + rowCount + ":\n" + - "Ambiguous reference; multiple matches in db and csv: " + reference); + throw new MetadataImportException("Error in CSV row " + rowCount + ":\n" + + "Ambiguous reference; multiple matches in db and csv: " + reference); } else { return csvUUID; // one match from csv } } else { // size == 0; the reference does not exist throw an error if (uuid == null) { - throw new MetadataImportException("Error in CSV row " + rowCount + ":\n" + - "No matches found for reference: " + reference - + "\nKeep in mind you can only reference entries that are listed before " + - "this one within the CSV."); + throw new MetadataImportException("Error in CSV row " + rowCount + ":\n" + + "No matches found for reference: " + reference + + "\nKeep in mind you can only reference entries that are " + + "listed before " + + "this one within the CSV."); } else { return uuid; // one match from db } @@ -1688,14 +1628,16 @@ public class MetadataImport { * Validate every relation modification expressed in the CSV. * */ - private void validateExpressedRelations() throws MetadataImportException { + private void validateExpressedRelations(Context c) throws MetadataImportException { for (String targetUUID : entityRelationMap.keySet()) { String targetType = null; try { // Get the type of reference. Attempt lookup in processed map first before looking in archive. if (entityTypeMap.get(UUID.fromString(targetUUID)) != null) { targetType = entityTypeService. - findByEntityType(c, entityTypeMap.get(UUID.fromString(targetUUID))).getLabel(); + findByEntityType(c, + entityTypeMap.get(UUID.fromString(targetUUID))) + .getLabel(); } else { // Target item may be archived; check there. // Add to errors if Realtionship.type cannot be derived @@ -1703,18 +1645,19 @@ public class MetadataImport { if (itemService.find(c, UUID.fromString(targetUUID)) != null) { targetItem = itemService.find(c, UUID.fromString(targetUUID)); List relTypes = itemService. - getMetadata(targetItem, "relationship", "type", null, Item.ANY); + getMetadata(targetItem, "relationship", "type", + null, Item.ANY); String relTypeValue = null; if (relTypes.size() > 0) { relTypeValue = relTypes.get(0).getValue(); targetType = entityTypeService.findByEntityType(c, relTypeValue).getLabel(); } else { relationValidationErrors.add("Cannot resolve Entity type for target UUID: " + - targetUUID); + targetUUID); } } else { relationValidationErrors.add("Cannot resolve Entity type for target UUID: " + - targetUUID); + targetUUID); } } if (targetType == null) { @@ -1739,7 +1682,7 @@ public class MetadataImport { // Attempt lookup in processed map first before looking in archive. if (entityTypeMap.get(UUID.fromString(originRefererUUID)) != null) { originType = entityTypeMap.get(UUID.fromString(originRefererUUID)); - validateTypesByTypeByTypeName(targetType, originType, typeName, originRow); + validateTypesByTypeByTypeName(c, targetType, originType, typeName, originRow); } else { // Origin item may be archived; check there. // Add to errors if Realtionship.type cannot be derived. @@ -1747,22 +1690,23 @@ public class MetadataImport { if (itemService.find(c, UUID.fromString(targetUUID)) != null) { originItem = itemService.find(c, UUID.fromString(originRefererUUID)); List relTypes = itemService. - getMetadata(originItem, "relationship", "type", null, Item.ANY); + getMetadata(originItem, "relationship", + "type", null, Item.ANY); String relTypeValue = null; if (relTypes.size() > 0) { relTypeValue = relTypes.get(0).getValue(); originType = entityTypeService.findByEntityType(c, relTypeValue).getLabel(); - validateTypesByTypeByTypeName(targetType, originType, typeName, originRow); + validateTypesByTypeByTypeName(c, targetType, originType, typeName, originRow); } else { relationValidationErrors.add("Error on CSV row " + originRow + ":" + "\n" + - "Cannot resolve Entity type for reference: " - + originRefererUUID); + "Cannot resolve Entity type for reference: " + + originRefererUUID); } } else { relationValidationErrors.add("Error on CSV row " + originRow + ":" + "\n" + - "Cannot resolve Entity type for reference: " - + originRefererUUID + " in row: " + originRow ); + "Cannot resolve Entity type for reference: " + + originRefererUUID + " in row: " + originRow); } } } @@ -1791,20 +1735,22 @@ public class MetadataImport { * @param typeName left or right typeName of the respective Relationship. * @return the UUID of the item. */ - private void validateTypesByTypeByTypeName(String targetType, String originType, String typeName, String originRow) - throws MetadataImportException { + private void validateTypesByTypeByTypeName(Context c, + String targetType, String originType, String typeName, String originRow) + throws MetadataImportException { try { RelationshipType foundRelationshipType = null; List relationshipTypeList = relationshipTypeService. - findByLeftwardOrRightwardTypeName(c, typeName.split("\\.")[1]); + findByLeftwardOrRightwardTypeName( + c, typeName.split("\\.")[1]); // Validate described relationship form the CSV. foundRelationshipType = matchRelationshipType(relationshipTypeList, targetType, originType, typeName); if (foundRelationshipType == null) { relationValidationErrors.add("Error on CSV row " + originRow + ":" + "\n" + - "No Relationship type found for:\n" + - "Target type: " + targetType + "\n" + - "Origin referer type: " + originType + "\n" + - "with typeName: " + typeName + " for type: " + originType); + "No Relationship type found for:\n" + + "Target type: " + targetType + "\n" + + "Origin referer type: " + originType + "\n" + + "with typeName: " + typeName + " for type: " + originType); } } catch (SQLException sqle) { throw new MetadataImportException("Error interacting with database!", sqle); @@ -1837,7 +1783,7 @@ public class MetadataImport { continue; } if (relationshipType.getLeftType().getLabel().equalsIgnoreCase(originType) && - relationshipType.getRightType().getLabel().equalsIgnoreCase(targetType)) { + relationshipType.getRightType().getLabel().equalsIgnoreCase(targetType)) { foundRelationshipType = relationshipType; } } else { @@ -1845,7 +1791,7 @@ public class MetadataImport { continue; } if (relationshipType.getLeftType().getLabel().equalsIgnoreCase(targetType) && - relationshipType.getRightType().getLabel().equalsIgnoreCase(originType)) { + relationshipType.getRightType().getLabel().equalsIgnoreCase(originType)) { foundRelationshipType = relationshipType; } } @@ -1853,4 +1799,4 @@ public class MetadataImport { return foundRelationshipType; } -} +} \ No newline at end of file diff --git a/dspace-api/src/main/java/org/dspace/app/bulkedit/MetadataImportCLI.java b/dspace-api/src/main/java/org/dspace/app/bulkedit/MetadataImportCLI.java new file mode 100644 index 0000000000..c319c26971 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/app/bulkedit/MetadataImportCLI.java @@ -0,0 +1,68 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.bulkedit; + +import java.io.BufferedReader; +import java.io.IOException; +import java.io.InputStreamReader; +import java.util.UUID; + +import org.apache.commons.cli.ParseException; +import org.dspace.core.Context; +import org.dspace.eperson.EPerson; +import org.dspace.eperson.factory.EPersonServiceFactory; +import org.dspace.scripts.handler.DSpaceRunnableHandler; + +/** + * CLI variant for the {@link MetadataImport} class + * This has been made so that we can specify the behaviour of the determineChanges method to be specific for the CLI + */ +public class MetadataImportCLI extends MetadataImport { + + @Override + protected boolean determineChange(DSpaceRunnableHandler handler) throws IOException { + handler.logInfo("Do you want to make these changes? [y/n] "); + try (BufferedReader bufferedReader = new BufferedReader(new InputStreamReader(System.in))) { + String yn = bufferedReader.readLine(); + if ("y".equalsIgnoreCase(yn)) { + return true; + } + return false; + } + } + + @Override + protected void assignCurrentUserInContext(Context context) throws ParseException { + try { + if (commandLine.hasOption('e')) { + EPerson eperson; + String e = commandLine.getOptionValue('e'); + if (e.indexOf('@') != -1) { + eperson = EPersonServiceFactory.getInstance().getEPersonService().findByEmail(context, e); + } else { + eperson = EPersonServiceFactory.getInstance().getEPersonService().find(context, UUID.fromString(e)); + } + + if (eperson == null) { + throw new ParseException("Error, eperson cannot be found: " + e); + } + context.setCurrentUser(eperson); + } + } catch (Exception e) { + throw new ParseException("Unable to find DSpace user: " + e.getMessage()); + } + } + + @Override + public void setup() throws ParseException { + super.setup(); + if (!commandLine.hasOption('e')) { + throw new ParseException("Required parameter -e missing!"); + } + } +} diff --git a/dspace-api/src/main/java/org/dspace/app/bulkedit/MetadataImportCliScriptConfiguration.java b/dspace-api/src/main/java/org/dspace/app/bulkedit/MetadataImportCliScriptConfiguration.java new file mode 100644 index 0000000000..038df616ca --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/app/bulkedit/MetadataImportCliScriptConfiguration.java @@ -0,0 +1,27 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.bulkedit; + +import org.apache.commons.cli.Options; +import org.dspace.scripts.configuration.ScriptConfiguration; + +/** + * The {@link ScriptConfiguration} for the {@link org.dspace.app.bulkedit.MetadataImportCLI} CLI script + */ +public class MetadataImportCliScriptConfiguration extends MetadataImportScriptConfiguration { + + @Override + public Options getOptions() { + Options options = super.getOptions(); + options.addOption("e", "email", true, "email address or user id of user (required if adding new items)"); + options.getOption("e").setType(String.class); + options.getOption("e").setRequired(true); + super.options = options; + return options; + } +} diff --git a/dspace-api/src/main/java/org/dspace/app/bulkedit/MetadataImportScriptConfiguration.java b/dspace-api/src/main/java/org/dspace/app/bulkedit/MetadataImportScriptConfiguration.java new file mode 100644 index 0000000000..07e6a9aec9 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/app/bulkedit/MetadataImportScriptConfiguration.java @@ -0,0 +1,81 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.bulkedit; + +import java.io.InputStream; +import java.sql.SQLException; + +import org.apache.commons.cli.Options; +import org.dspace.authorize.service.AuthorizeService; +import org.dspace.core.Context; +import org.dspace.scripts.configuration.ScriptConfiguration; +import org.springframework.beans.factory.annotation.Autowired; + +/** + * The {@link ScriptConfiguration} for the {@link MetadataImport} script + */ +public class MetadataImportScriptConfiguration extends ScriptConfiguration { + + @Autowired + private AuthorizeService authorizeService; + + private Class dspaceRunnableClass; + + @Override + public Class getDspaceRunnableClass() { + return dspaceRunnableClass; + } + + /** + * Generic setter for the dspaceRunnableClass + * @param dspaceRunnableClass The dspaceRunnableClass to be set on this MetadataImportScriptConfiguration + */ + @Override + public void setDspaceRunnableClass(Class dspaceRunnableClass) { + this.dspaceRunnableClass = dspaceRunnableClass; + } + + @Override + public boolean isAllowedToExecute(Context context) { + try { + return authorizeService.isAdmin(context); + } catch (SQLException e) { + throw new RuntimeException("SQLException occurred when checking if the current user is an admin", e); + } + } + + @Override + public Options getOptions() { + if (options == null) { + Options options = new Options(); + + options.addOption("f", "file", true, "source file"); + options.getOption("f").setType(InputStream.class); + options.getOption("f").setRequired(true); + options.addOption("s", "silent", false, + "silent operation - doesn't request confirmation of changes USE WITH CAUTION"); + options.getOption("s").setType(boolean.class); + options.addOption("w", "workflow", false, "workflow - when adding new items, use collection workflow"); + options.getOption("w").setType(boolean.class); + options.addOption("n", "notify", false, + "notify - when adding new items using a workflow, send notification emails"); + options.getOption("n").setType(boolean.class); + options.addOption("v", "validate-only", false, + "validate - just validate the csv, don't run the import"); + options.getOption("v").setType(boolean.class); + options.addOption("t", "template", false, + "template - when adding new items, use the collection template (if it exists)"); + options.getOption("t").setType(boolean.class); + options.addOption("h", "help", false, "help"); + options.getOption("h").setType(boolean.class); + + super.options = options; + } + return options; + } +} diff --git a/dspace-api/src/main/java/org/dspace/app/itemimport/ItemImportServiceImpl.java b/dspace-api/src/main/java/org/dspace/app/itemimport/ItemImportServiceImpl.java index 12fcd84d04..13aa236f54 100644 --- a/dspace-api/src/main/java/org/dspace/app/itemimport/ItemImportServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/app/itemimport/ItemImportServiceImpl.java @@ -1519,6 +1519,12 @@ public class ItemImportServiceImpl implements ItemImportService, InitializingBea if (!dir.exists() && !dir.mkdirs()) { log.error("Unable to create directory: " + dir.getAbsolutePath()); } + // Verify that the directory the entry is using is a subpath of zipDir (and not somewhere else!) + if (!dir.toPath().normalize().startsWith(zipDir)) { + throw new IOException("Bad zip entry: '" + entry.getName() + + "' in file '" + zipfile.getAbsolutePath() + "'!" + + " Cannot process this file."); + } //Entries could have too many directories, and we need to adjust the sourcedir // file1.zip (SimpleArchiveFormat / item1 / contents|dublin_core|... @@ -1539,9 +1545,16 @@ public class ItemImportServiceImpl implements ItemImportService, InitializingBea } byte[] buffer = new byte[1024]; int len; + File outFile = new File(zipDir + entry.getName()); + // Verify that this file will be created in our zipDir (and not somewhere else!) + if (!outFile.toPath().normalize().startsWith(zipDir)) { + throw new IOException("Bad zip entry: '" + entry.getName() + + "' in file '" + zipfile.getAbsolutePath() + "'!" + + " Cannot process this file."); + } InputStream in = zf.getInputStream(entry); BufferedOutputStream out = new BufferedOutputStream( - new FileOutputStream(zipDir + entry.getName())); + new FileOutputStream(outFile)); while ((len = in.read(buffer)) >= 0) { out.write(buffer, 0, len); } diff --git a/dspace-api/src/main/java/org/dspace/app/launcher/ScriptLauncher.java b/dspace-api/src/main/java/org/dspace/app/launcher/ScriptLauncher.java index ef6b0b538e..6ee62bd904 100644 --- a/dspace-api/src/main/java/org/dspace/app/launcher/ScriptLauncher.java +++ b/dspace-api/src/main/java/org/dspace/app/launcher/ScriptLauncher.java @@ -16,9 +16,11 @@ import java.util.TreeMap; import org.apache.commons.cli.ParseException; import org.apache.log4j.Logger; import org.dspace.scripts.DSpaceRunnable; +import org.dspace.scripts.configuration.ScriptConfiguration; import org.dspace.scripts.factory.ScriptServiceFactory; import org.dspace.scripts.handler.DSpaceRunnableHandler; import org.dspace.scripts.handler.impl.CommandLineDSpaceRunnableHandler; +import org.dspace.scripts.service.ScriptService; import org.dspace.servicemanager.DSpaceKernelImpl; import org.dspace.servicemanager.DSpaceKernelInit; import org.dspace.services.RequestService; @@ -44,7 +46,8 @@ public class ScriptLauncher { /** * Default constructor */ - private ScriptLauncher() { } + private ScriptLauncher() { + } /** * Execute the DSpace script launcher @@ -54,7 +57,7 @@ public class ScriptLauncher { * @throws FileNotFoundException if file doesn't exist */ public static void main(String[] args) - throws FileNotFoundException, IOException { + throws FileNotFoundException, IOException, IllegalAccessException, InstantiationException { // Initialise the service manager kernel try { kernelImpl = DSpaceKernelInit.getKernel(null); @@ -107,13 +110,18 @@ public class ScriptLauncher { * @param commandConfigs The Document * @param dSpaceRunnableHandler The DSpaceRunnableHandler for this execution * @param kernelImpl The relevant DSpaceKernelImpl - * @return A 1 or 0 depending on whether the script failed or passed respectively + * @return A 1 or 0 depending on whether the script failed or passed respectively */ public static int handleScript(String[] args, Document commandConfigs, - DSpaceRunnableHandler dSpaceRunnableHandler, - DSpaceKernelImpl kernelImpl) { + DSpaceRunnableHandler dSpaceRunnableHandler, + DSpaceKernelImpl kernelImpl) throws InstantiationException, IllegalAccessException { int status; - DSpaceRunnable script = ScriptServiceFactory.getInstance().getScriptService().getScriptForName(args[0]); + ScriptService scriptService = ScriptServiceFactory.getInstance().getScriptService(); + ScriptConfiguration scriptConfiguration = scriptService.getScriptConfiguration(args[0]); + DSpaceRunnable script = null; + if (scriptConfiguration != null) { + script = scriptService.createDSpaceRunnableForScriptConfiguration(scriptConfiguration); + } if (script != null) { status = executeScript(args, dSpaceRunnableHandler, script); } else { @@ -127,12 +135,12 @@ public class ScriptLauncher { * @param args The arguments of the script with the script name as first place in the array * @param dSpaceRunnableHandler The relevant DSpaceRunnableHandler * @param script The script to be executed - * @return A 1 or 0 depending on whether the script failed or passed respectively + * @return A 1 or 0 depending on whether the script failed or passed respectively */ private static int executeScript(String[] args, DSpaceRunnableHandler dSpaceRunnableHandler, DSpaceRunnable script) { try { - script.initialize(args, dSpaceRunnableHandler); + script.initialize(args, dSpaceRunnableHandler, null); script.run(); return 0; } catch (ParseException e) { diff --git a/dspace-api/src/main/java/org/dspace/app/requestitem/RequestItemAuthorExtractor.java b/dspace-api/src/main/java/org/dspace/app/requestitem/RequestItemAuthorExtractor.java index bba0913193..9b66030e90 100644 --- a/dspace-api/src/main/java/org/dspace/app/requestitem/RequestItemAuthorExtractor.java +++ b/dspace-api/src/main/java/org/dspace/app/requestitem/RequestItemAuthorExtractor.java @@ -19,6 +19,15 @@ import org.dspace.core.Context; * @author Andrea Bollini */ public interface RequestItemAuthorExtractor { - public RequestItemAuthor getRequestItemAuthor(Context context, Item item) - throws SQLException; + + /** + * Retrieve the auhtor to contact for a request copy of the give item. + * + * @param context DSpace context object + * @param item item to request + * @return An object containing name an email address to send the request to + * or null if no valid email address was found. + * @throws SQLException if database error + */ + public RequestItemAuthor getRequestItemAuthor(Context context, Item item) throws SQLException; } diff --git a/dspace-api/src/main/java/org/dspace/app/requestitem/RequestItemHelpdeskStrategy.java b/dspace-api/src/main/java/org/dspace/app/requestitem/RequestItemHelpdeskStrategy.java index a5f7341039..5d22efaa7a 100644 --- a/dspace-api/src/main/java/org/dspace/app/requestitem/RequestItemHelpdeskStrategy.java +++ b/dspace-api/src/main/java/org/dspace/app/requestitem/RequestItemHelpdeskStrategy.java @@ -74,8 +74,8 @@ public class RequestItemHelpdeskStrategy extends RequestItemSubmitterStrategy { return new RequestItemAuthor(helpdeskEPerson); } else { String helpdeskName = I18nUtil.getMessage( - "org.dspace.app.requestitem.RequestItemHelpdeskStrategy.helpdeskname", - context); + "org.dspace.app.requestitem.RequestItemHelpdeskStrategy.helpdeskname", + context); return new RequestItemAuthor(helpdeskName, helpDeskEmail); } } diff --git a/dspace-api/src/main/java/org/dspace/app/requestitem/RequestItemMetadataStrategy.java b/dspace-api/src/main/java/org/dspace/app/requestitem/RequestItemMetadataStrategy.java index 4d2f78408a..9838e58697 100644 --- a/dspace-api/src/main/java/org/dspace/app/requestitem/RequestItemMetadataStrategy.java +++ b/dspace-api/src/main/java/org/dspace/app/requestitem/RequestItemMetadataStrategy.java @@ -16,6 +16,7 @@ import org.dspace.content.MetadataValue; import org.dspace.content.service.ItemService; import org.dspace.core.Context; import org.dspace.core.I18nUtil; +import org.dspace.services.factory.DSpaceServicesFactory; import org.springframework.beans.factory.annotation.Autowired; /** @@ -38,6 +39,7 @@ public class RequestItemMetadataStrategy extends RequestItemSubmitterStrategy { @Override public RequestItemAuthor getRequestItemAuthor(Context context, Item item) throws SQLException { + RequestItemAuthor author = null; if (emailMetadata != null) { List vals = itemService.getMetadataByMetadataString(item, emailMetadata); if (vals.size() > 0) { @@ -49,19 +51,38 @@ public class RequestItemMetadataStrategy extends RequestItemSubmitterStrategy { fullname = nameVals.iterator().next().getValue(); } } - if (StringUtils.isBlank(fullname)) { fullname = I18nUtil - .getMessage( - "org.dspace.app.requestitem.RequestItemMetadataStrategy.unnamed", - context); + .getMessage( + "org.dspace.app.requestitem.RequestItemMetadataStrategy.unnamed", + context); } - RequestItemAuthor author = new RequestItemAuthor( - fullname, email); + author = new RequestItemAuthor(fullname, email); return author; } + } else { + // Uses the basic strategy to look for the original submitter + author = super.getRequestItemAuthor(context, item); + // Is the author or his email null, so get the help desk or admin name and email + if (null == author || null == author.getEmail()) { + String email = null; + String name = null; + //First get help desk name and email + email = DSpaceServicesFactory.getInstance() + .getConfigurationService().getProperty("mail.helpdesk"); + name = DSpaceServicesFactory.getInstance() + .getConfigurationService().getProperty("mail.helpdesk.name"); + // If help desk mail is null get the mail and name of admin + if (email == null) { + email = DSpaceServicesFactory.getInstance() + .getConfigurationService().getProperty("mail.admin"); + name = DSpaceServicesFactory.getInstance() + .getConfigurationService().getProperty("mail.admin.name"); + } + author = new RequestItemAuthor(name, email); + } } - return super.getRequestItemAuthor(context, item); + return author; } public void setEmailMetadata(String emailMetadata) { diff --git a/dspace-api/src/main/java/org/dspace/app/requestitem/RequestItemSubmitterStrategy.java b/dspace-api/src/main/java/org/dspace/app/requestitem/RequestItemSubmitterStrategy.java index 8ed6238a8c..2708c24ba9 100644 --- a/dspace-api/src/main/java/org/dspace/app/requestitem/RequestItemSubmitterStrategy.java +++ b/dspace-api/src/main/java/org/dspace/app/requestitem/RequestItemSubmitterStrategy.java @@ -23,13 +23,22 @@ public class RequestItemSubmitterStrategy implements RequestItemAuthorExtractor public RequestItemSubmitterStrategy() { } + /** + * Returns the submitter of an Item as RequestItemAuthor or null if the + * Submitter is deleted. + * + * @return The submitter of the item or null if the submitter is deleted + * @throws SQLException if database error + */ @Override public RequestItemAuthor getRequestItemAuthor(Context context, Item item) throws SQLException { EPerson submitter = item.getSubmitter(); - RequestItemAuthor author = new RequestItemAuthor( - submitter.getFullName(), submitter.getEmail()); + RequestItemAuthor author = null; + if (null != submitter) { + author = new RequestItemAuthor( + submitter.getFullName(), submitter.getEmail()); + } return author; } - } diff --git a/dspace-api/src/main/java/org/dspace/app/sherpa/SHERPAResponse.java b/dspace-api/src/main/java/org/dspace/app/sherpa/SHERPAResponse.java index c5b8bbebf3..bd2909c0c1 100644 --- a/dspace-api/src/main/java/org/dspace/app/sherpa/SHERPAResponse.java +++ b/dspace-api/src/main/java/org/dspace/app/sherpa/SHERPAResponse.java @@ -48,6 +48,9 @@ public class SHERPAResponse { factory.setValidating(false); factory.setIgnoringComments(true); factory.setIgnoringElementContentWhitespace(true); + // disallow DTD parsing to ensure no XXE attacks can occur. + // See https://cheatsheetseries.owasp.org/cheatsheets/XML_External_Entity_Prevention_Cheat_Sheet.html + factory.setFeature("http://apache.org/xml/features/disallow-doctype-decl", true); DocumentBuilder db = factory.newDocumentBuilder(); Document inDoc = db.parse(xmlData); diff --git a/dspace-api/src/main/java/org/dspace/app/sitemap/GenerateSitemaps.java b/dspace-api/src/main/java/org/dspace/app/sitemap/GenerateSitemaps.java index bb35cd3ff9..e2743951e7 100644 --- a/dspace-api/src/main/java/org/dspace/app/sitemap/GenerateSitemaps.java +++ b/dspace-api/src/main/java/org/dspace/app/sitemap/GenerateSitemaps.java @@ -27,6 +27,7 @@ import org.apache.commons.cli.HelpFormatter; import org.apache.commons.cli.Options; import org.apache.commons.cli.ParseException; import org.apache.commons.cli.PosixParser; +import org.apache.commons.io.FileUtils; import org.apache.commons.lang3.ArrayUtils; import org.apache.commons.lang3.StringUtils; import org.apache.logging.log4j.Logger; @@ -84,6 +85,9 @@ public class GenerateSitemaps { options .addOption("p", "ping", true, "ping specified search engine URL"); + options + .addOption("d", "delete", false, + "delete sitemaps dir and its contents"); CommandLine line = null; @@ -105,10 +109,9 @@ public class GenerateSitemaps { } /* - * Sanity check -- if no sitemap generation or pinging to do, print - * usage + * Sanity check -- if no sitemap generation or pinging to do, or deletion, print usage */ - if (line.getArgs().length != 0 || line.hasOption('b') + if (line.getArgs().length != 0 || line.hasOption('d') || line.hasOption('b') && line.hasOption('s') && !line.hasOption('g') && !line.hasOption('m') && !line.hasOption('y') && !line.hasOption('p')) { @@ -123,6 +126,10 @@ public class GenerateSitemaps { generateSitemaps(!line.hasOption('b'), !line.hasOption('s')); } + if (line.hasOption('d')) { + deleteSitemaps(); + } + if (line.hasOption('a')) { pingConfiguredSearchEngines(); } @@ -140,6 +147,29 @@ public class GenerateSitemaps { System.exit(0); } + /** + * Runs generate-sitemaps without any params for the scheduler (task-scheduler.xml). + * + * @throws SQLException if a database error occurs. + * @throws IOException if IO error occurs. + */ + public static void generateSitemapsScheduled() throws IOException, SQLException { + generateSitemaps(true, true); + } + + /** + * Delete the sitemaps directory and its contents if it exists + * @throws IOException if IO error occurs + */ + public static void deleteSitemaps() throws IOException { + File outputDir = new File(configurationService.getProperty("sitemap.dir")); + if (!outputDir.exists() && !outputDir.isDirectory()) { + log.error("Unable to delete sitemaps directory, doesn't exist or isn't a directort"); + } else { + FileUtils.deleteDirectory(outputDir); + } + } + /** * Generate sitemap.org protocol and/or basic HTML sitemaps. * @@ -150,14 +180,9 @@ public class GenerateSitemaps { * @throws IOException if IO error * if IO error occurs. */ - public static void generateSitemaps(boolean makeHTMLMap, - boolean makeSitemapOrg) throws SQLException, IOException { - String sitemapStem = configurationService.getProperty("dspace.ui.url") - + "/sitemap"; - String htmlMapStem = configurationService.getProperty("dspace.ui.url") - + "/htmlmap"; - String handleURLStem = configurationService.getProperty("dspace.ui.url") - + "/handle/"; + public static void generateSitemaps(boolean makeHTMLMap, boolean makeSitemapOrg) throws SQLException, IOException { + String uiURLStem = configurationService.getProperty("dspace.ui.url"); + String sitemapStem = uiURLStem + "/sitemap"; File outputDir = new File(configurationService.getProperty("sitemap.dir")); if (!outputDir.exists() && !outputDir.mkdir()) { @@ -168,13 +193,11 @@ public class GenerateSitemaps { AbstractGenerator sitemapsOrg = null; if (makeHTMLMap) { - html = new HTMLSitemapGenerator(outputDir, htmlMapStem + "?map=", - null); + html = new HTMLSitemapGenerator(outputDir, sitemapStem, ".html"); } if (makeSitemapOrg) { - sitemapsOrg = new SitemapsOrgGenerator(outputDir, sitemapStem - + "?map=", null); + sitemapsOrg = new SitemapsOrgGenerator(outputDir, sitemapStem, ".xml"); } Context c = new Context(Context.Mode.READ_ONLY); @@ -182,7 +205,7 @@ public class GenerateSitemaps { List comms = communityService.findAll(c); for (Community comm : comms) { - String url = handleURLStem + comm.getHandle(); + String url = uiURLStem + "/communities/" + comm.getID(); if (makeHTMLMap) { html.addURL(url, null); @@ -197,7 +220,7 @@ public class GenerateSitemaps { List colls = collectionService.findAll(c); for (Collection coll : colls) { - String url = handleURLStem + coll.getHandle(); + String url = uiURLStem + "/collections/" + coll.getID(); if (makeHTMLMap) { html.addURL(url, null); @@ -214,7 +237,7 @@ public class GenerateSitemaps { while (allItems.hasNext()) { Item i = allItems.next(); - String url = handleURLStem + i.getHandle(); + String url = uiURLStem + "/items/" + i.getID(); Date lastMod = i.getLastModified(); if (makeHTMLMap) { diff --git a/dspace-api/src/main/java/org/dspace/app/sitemap/SitemapsOrgGenerator.java b/dspace-api/src/main/java/org/dspace/app/sitemap/SitemapsOrgGenerator.java index 9a0d5a6ba4..3ec4ca8239 100644 --- a/dspace-api/src/main/java/org/dspace/app/sitemap/SitemapsOrgGenerator.java +++ b/dspace-api/src/main/java/org/dspace/app/sitemap/SitemapsOrgGenerator.java @@ -59,7 +59,7 @@ public class SitemapsOrgGenerator extends AbstractGenerator { @Override public String getFilename(int number) { - return "sitemap" + number + ".xml.gz"; + return "sitemap" + number + ".xml"; } @Override @@ -100,12 +100,12 @@ public class SitemapsOrgGenerator extends AbstractGenerator { @Override public boolean useCompression() { - return true; + return false; } @Override public String getIndexFilename() { - return "sitemap_index.xml.gz"; + return "sitemap_index.xml"; } @Override diff --git a/dspace-api/src/main/java/org/dspace/app/util/AuthorizeUtil.java b/dspace-api/src/main/java/org/dspace/app/util/AuthorizeUtil.java index 6c4271e1f2..efd813d29b 100644 --- a/dspace-api/src/main/java/org/dspace/app/util/AuthorizeUtil.java +++ b/dspace-api/src/main/java/org/dspace/app/util/AuthorizeUtil.java @@ -9,7 +9,10 @@ package org.dspace.app.util; import java.sql.SQLException; import java.util.List; +import javax.servlet.http.HttpServletRequest; +import org.apache.logging.log4j.Logger; +import org.dspace.authenticate.factory.AuthenticateServiceFactory; import org.dspace.authorize.AuthorizeConfiguration; import org.dspace.authorize.AuthorizeException; import org.dspace.authorize.ResourcePolicy; @@ -19,12 +22,22 @@ import org.dspace.content.Bitstream; import org.dspace.content.Bundle; import org.dspace.content.Collection; import org.dspace.content.Community; +import org.dspace.content.DSpaceObject; import org.dspace.content.Item; import org.dspace.content.factory.ContentServiceFactory; import org.dspace.content.service.CollectionService; import org.dspace.content.service.ItemService; import org.dspace.core.Constants; import org.dspace.core.Context; +import org.dspace.eperson.EPerson; +import org.dspace.eperson.Group; +import org.dspace.eperson.factory.EPersonServiceFactory; +import org.dspace.eperson.service.GroupService; +import org.dspace.services.factory.DSpaceServicesFactory; +import org.dspace.utils.DSpace; +import org.dspace.xmlworkflow.factory.XmlWorkflowServiceFactory; +import org.dspace.xmlworkflow.storedcomponents.CollectionRole; +import org.dspace.xmlworkflow.storedcomponents.service.CollectionRoleService; /** * This class is an addition to the AuthorizeManager that perform authorization @@ -34,6 +47,7 @@ import org.dspace.core.Context; */ public class AuthorizeUtil { + private static final Logger log = org.apache.logging.log4j.LogManager.getLogger(AuthorizeUtil.class); /** * Default constructor */ @@ -525,4 +539,154 @@ public class AuthorizeUtil { } } } + + /** + * This method will check whether the current user is authorized to manage the default read group + * @param context The relevant DSpace context + * @param collection The collection for which this will be checked + * @throws AuthorizeException If something goes wrong + * @throws SQLException If something goes wrong + */ + public static void authorizeManageDefaultReadGroup(Context context, + Collection collection) throws AuthorizeException, SQLException { + AuthorizeService authorizeService = AuthorizeServiceFactory.getInstance().getAuthorizeService(); + authorizeService.authorizeAction(context, collection, Constants.ADMIN); + } + + /** + * This method checks whether the current user has sufficient rights to modify the group. + * Depending on the kind of group and due to delegated administration, separate checks need to be done to verify + * whether the user is allowed to modify the group. + * + * @param context the context of which the user will be checked + * @param group the group to be checked + * @throws SQLException + * @throws AuthorizeException + */ + public static void authorizeManageGroup(Context context, Group group) throws SQLException, AuthorizeException { + AuthorizeService authorizeService = AuthorizeServiceFactory.getInstance().getAuthorizeService(); + GroupService groupService = EPersonServiceFactory.getInstance().getGroupService(); + CollectionRoleService collectionRoleService = XmlWorkflowServiceFactory.getInstance() + .getCollectionRoleService(); + if (authorizeService.isAdmin(context)) { + return; + } + + DSpaceObject parentObject = groupService.getParentObject(context, group); + if (parentObject == null) { + throw new AuthorizeException("not authorized to manage this group"); + } + if (parentObject.getType() == Constants.COLLECTION) { + Collection collection = (Collection) parentObject; + + if (group.equals(collection.getSubmitters())) { + authorizeManageSubmittersGroup(context, collection); + return; + } + + + List collectionRoles = collectionRoleService.findByCollection(context, collection); + for (CollectionRole role : collectionRoles) { + if (group.equals(role.getGroup())) { + authorizeManageWorkflowsGroup(context, collection); + return; + } + } + + if (group.equals(collection.getAdministrators())) { + authorizeManageAdminGroup(context, collection); + return; + } + // if we reach this point, it means that the group is related + // to a collection but as it is not the submitters, nor the administrators, + // nor a workflow groups it must be a default item/bitstream groups + authorizeManageDefaultReadGroup(context, collection); + return; + } + if (parentObject.getType() == Constants.COMMUNITY) { + Community community = (Community) parentObject; + authorizeManageAdminGroup(context, community); + return; + } + + throw new AuthorizeException("not authorized to manage this group"); + } + + /** + * This method will return a boolean indicating whether the current user is allowed to register a new + * account or not + * @param context The relevant DSpace context + * @param request The current request + * @return A boolean indicating whether the current user can register a new account or not + * @throws SQLException If something goes wrong + */ + public static boolean authorizeNewAccountRegistration(Context context, HttpServletRequest request) + throws SQLException { + if (DSpaceServicesFactory.getInstance().getConfigurationService() + .getBooleanProperty("user.registration", true)) { + // This allowSetPassword is currently the only mthod that would return true only when it's + // actually expected to be returning true. + // For example the LDAP canSelfRegister will return true due to auto-register, while that + // does not imply a new user can register explicitly + return AuthenticateServiceFactory.getInstance().getAuthenticationService() + .allowSetPassword(context, request, null); + } + return false; + } + + /** + * This method will return a boolean indicating whether it's allowed to update the password for the EPerson + * with the given email and canLogin property + * @param context The relevant DSpace context + * @param email The email to be checked + * @return A boolean indicating if the password can be updated or not + */ + public static boolean authorizeUpdatePassword(Context context, String email) { + try { + EPerson eperson = EPersonServiceFactory.getInstance().getEPersonService().findByEmail(context, email); + if (eperson != null && eperson.canLogIn()) { + HttpServletRequest request = new DSpace().getRequestService().getCurrentRequest() + .getHttpServletRequest(); + return AuthenticateServiceFactory.getInstance().getAuthenticationService() + .allowSetPassword(context, request, null); + } + } catch (SQLException e) { + log.error("Something went wrong trying to retrieve EPerson for email: " + email, e); + } + return false; + } + + /** + * This method checks if the community Admin can manage accounts + * + * @return true if is able + */ + public static boolean canCommunityAdminManageAccounts() { + boolean isAble = false; + if (AuthorizeConfiguration.canCommunityAdminManagePolicies() + || AuthorizeConfiguration.canCommunityAdminManageAdminGroup() + || AuthorizeConfiguration.canCommunityAdminManageCollectionPolicies() + || AuthorizeConfiguration.canCommunityAdminManageCollectionSubmitters() + || AuthorizeConfiguration.canCommunityAdminManageCollectionWorkflows() + || AuthorizeConfiguration.canCommunityAdminManageCollectionAdminGroup()) { + isAble = true; + } + return isAble; + } + + /** + * This method checks if the Collection Admin can manage accounts + * + * @return true if is able + */ + public static boolean canCollectionAdminManageAccounts() { + boolean isAble = false; + if (AuthorizeConfiguration.canCollectionAdminManagePolicies() + || AuthorizeConfiguration.canCollectionAdminManageSubmitters() + || AuthorizeConfiguration.canCollectionAdminManageWorkflows() + || AuthorizeConfiguration.canCollectionAdminManageAdminGroup()) { + isAble = true; + } + return isAble; + } } diff --git a/dspace-api/src/main/java/org/dspace/app/util/DCInput.java b/dspace-api/src/main/java/org/dspace/app/util/DCInput.java index a6444a3890..c3cbac115a 100644 --- a/dspace-api/src/main/java/org/dspace/app/util/DCInput.java +++ b/dspace-api/src/main/java/org/dspace/app/util/DCInput.java @@ -12,6 +12,7 @@ import java.util.List; import java.util.Map; import java.util.regex.Pattern; import java.util.regex.PatternSyntaxException; +import javax.annotation.Nullable; import org.apache.commons.lang3.StringUtils; import org.dspace.content.MetadataSchemaEnum; @@ -291,7 +292,7 @@ public class DCInput { * * @return the input type */ - public String getInputType() { + public @Nullable String getInputType() { return inputType; } diff --git a/dspace-api/src/main/java/org/dspace/app/util/DCInputSet.java b/dspace-api/src/main/java/org/dspace/app/util/DCInputSet.java index faa3fb7190..bfd4270cf2 100644 --- a/dspace-api/src/main/java/org/dspace/app/util/DCInputSet.java +++ b/dspace-api/src/main/java/org/dspace/app/util/DCInputSet.java @@ -10,6 +10,8 @@ package org.dspace.app.util; import java.util.List; import java.util.Map; +import org.apache.commons.lang3.StringUtils; +import org.dspace.core.Utils; /** * Class representing all DC inputs required for a submission, organized into pages * @@ -107,9 +109,21 @@ public class DCInputSet { for (int i = 0; i < inputs.length; i++) { for (int j = 0; j < inputs[i].length; j++) { DCInput field = inputs[i][j]; - String fullName = field.getFieldName(); - if (fullName.equals(fieldName)) { - return true; + // If this is a "qualdrop_value" field, then the full field name is the field + dropdown qualifier + if (StringUtils.equals(field.getInputType(), "qualdrop_value")) { + List pairs = field.getPairs(); + for (int k = 0; k < pairs.size(); k += 2) { + String qualifier = pairs.get(k + 1); + String fullName = Utils.standardize(field.getSchema(), field.getElement(), qualifier, "."); + if (fullName.equals(fieldName)) { + return true; + } + } + } else { + String fullName = field.getFieldName(); + if (fullName.equals(fieldName)) { + return true; + } } } } diff --git a/dspace-api/src/main/java/org/dspace/app/util/IndexVersion.java b/dspace-api/src/main/java/org/dspace/app/util/IndexVersion.java index d8b2d6868a..7bdaa95b5c 100644 --- a/dspace-api/src/main/java/org/dspace/app/util/IndexVersion.java +++ b/dspace-api/src/main/java/org/dspace/app/util/IndexVersion.java @@ -250,12 +250,8 @@ public class IndexVersion { } else if (firstMinor > secondMinor) { // If we get here, major versions must be EQUAL. Now, time to check our minor versions return GREATER_THAN; - } else if (firstMinor < secondMinor) { - return LESS_THAN; } else { - // This is an impossible scenario. - // This 'else' should never be triggered since we've checked for equality above already - return EQUAL; + return LESS_THAN; } } diff --git a/dspace-api/src/main/java/org/dspace/authenticate/IPMatcher.java b/dspace-api/src/main/java/org/dspace/authenticate/IPMatcher.java index 955b6c86d3..439e53af1d 100644 --- a/dspace-api/src/main/java/org/dspace/authenticate/IPMatcher.java +++ b/dspace-api/src/main/java/org/dspace/authenticate/IPMatcher.java @@ -87,13 +87,16 @@ public class IPMatcher { + ipSpec); } - int maskBytes = maskBits / 8; - for (int i = 0; i < maskBytes; i++) { - netmask[i] = (byte) 0Xff; - } - netmask[maskBytes] = (byte) ((byte) 0Xff << 8 - (maskBits % 8)); // FIXME test! - for (int i = maskBytes + 1; i < (128 / 8); i++) { - netmask[i] = 0; + for (int i = 0; i < netmask.length; i++) { + if (maskBits <= 0) { + netmask[i] = 0; + } else if (maskBits > 8) { + netmask[i] = (byte) 0Xff; + } else { + netmask[i] = (byte) ((byte) 0Xff << 8 - maskBits); + } + + maskBits = maskBits - 8; } break; case 1: // No explicit mask: fill the mask with 1s diff --git a/dspace-api/src/main/java/org/dspace/authorize/AuthorizeServiceImpl.java b/dspace-api/src/main/java/org/dspace/authorize/AuthorizeServiceImpl.java index 2384a260da..eb7d60d84c 100644 --- a/dspace-api/src/main/java/org/dspace/authorize/AuthorizeServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/authorize/AuthorizeServiceImpl.java @@ -430,7 +430,11 @@ public class AuthorizeServiceImpl implements AuthorizeService { public boolean isCommunityAdmin(Context c) throws SQLException { EPerson e = c.getCurrentUser(); + return isCommunityAdmin(c, e); + } + @Override + public boolean isCommunityAdmin(Context c, EPerson e) throws SQLException { if (e != null) { List policies = resourcePolicyService.find(c, e, groupService.allMemberGroups(c, e), @@ -446,7 +450,11 @@ public class AuthorizeServiceImpl implements AuthorizeService { public boolean isCollectionAdmin(Context c) throws SQLException { EPerson e = c.getCurrentUser(); + return isCollectionAdmin(c, e); + } + @Override + public boolean isCollectionAdmin(Context c, EPerson e) throws SQLException { if (e != null) { List policies = resourcePolicyService.find(c, e, groupService.allMemberGroups(c, e), @@ -606,6 +614,12 @@ public class AuthorizeServiceImpl implements AuthorizeService { resourcePolicyService.removeDsoEPersonPolicies(c, o, e); } + @Override + public void removeAllEPersonPolicies(Context c, EPerson e) + throws SQLException, AuthorizeException { + resourcePolicyService.removeAllEPersonPolicies(c, e); + } + @Override public List getAuthorizedGroups(Context c, DSpaceObject o, int actionID) throws java.sql.SQLException { diff --git a/dspace-api/src/main/java/org/dspace/authorize/ResourcePolicyServiceImpl.java b/dspace-api/src/main/java/org/dspace/authorize/ResourcePolicyServiceImpl.java index 74b3c0633f..4a2addf781 100644 --- a/dspace-api/src/main/java/org/dspace/authorize/ResourcePolicyServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/authorize/ResourcePolicyServiceImpl.java @@ -114,6 +114,11 @@ public class ResourcePolicyServiceImpl implements ResourcePolicyService { return resourcePolicyDAO.findByEPersonGroupTypeIdAction(c, e, groups, action, type_id); } + @Override + public List find(Context context, EPerson ePerson) throws SQLException { + return resourcePolicyDAO.findByEPerson(context, ePerson); + } + @Override public List findByTypeGroupActionExceptId(Context context, DSpaceObject dso, Group group, int action, int notPolicyID) @@ -246,6 +251,11 @@ public class ResourcePolicyServiceImpl implements ResourcePolicyService { } + @Override + public void removeAllEPersonPolicies(Context context, EPerson ePerson) throws SQLException, AuthorizeException { + resourcePolicyDAO.deleteByEPerson(context, ePerson); + } + @Override public void removeGroupPolicies(Context c, Group group) throws SQLException { resourcePolicyDAO.deleteByGroup(c, group); diff --git a/dspace-api/src/main/java/org/dspace/authorize/dao/ResourcePolicyDAO.java b/dspace-api/src/main/java/org/dspace/authorize/dao/ResourcePolicyDAO.java index fa3b38efc8..5c898a5bca 100644 --- a/dspace-api/src/main/java/org/dspace/authorize/dao/ResourcePolicyDAO.java +++ b/dspace-api/src/main/java/org/dspace/authorize/dao/ResourcePolicyDAO.java @@ -33,6 +33,8 @@ public interface ResourcePolicyDAO extends GenericDAO { public List findByDsoAndType(Context context, DSpaceObject dSpaceObject, String type) throws SQLException; + public List findByEPerson(Context context, EPerson ePerson) throws SQLException; + public List findByGroup(Context context, Group group) throws SQLException; public List findByDSoAndAction(Context context, DSpaceObject dso, int actionId) throws SQLException; @@ -66,6 +68,15 @@ public interface ResourcePolicyDAO extends GenericDAO { public void deleteByDsoEPersonPolicies(Context context, DSpaceObject dso, EPerson ePerson) throws SQLException; + /** + * Deletes all policies that belong to an EPerson + * + * @param context DSpace context object + * @param ePerson ePerson whose policies to delete + * @throws SQLException if database error + */ + public void deleteByEPerson(Context context, EPerson ePerson) throws SQLException; + public void deleteByDsoAndTypeNotEqualsTo(Context c, DSpaceObject o, String type) throws SQLException; /** @@ -101,7 +112,7 @@ public interface ResourcePolicyDAO extends GenericDAO { * @return total resource policies of the ePerson * @throws SQLException if database error */ - public int countByEPerson(Context context, EPerson eperson) throws SQLException; + public int countByEPerson(Context context, EPerson ePerson) throws SQLException; /** * Return a paginated list of policies related to a resourceUuid belong to an ePerson diff --git a/dspace-api/src/main/java/org/dspace/authorize/dao/impl/ResourcePolicyDAOImpl.java b/dspace-api/src/main/java/org/dspace/authorize/dao/impl/ResourcePolicyDAOImpl.java index 6aa5d2bb2e..9dd368d667 100644 --- a/dspace-api/src/main/java/org/dspace/authorize/dao/impl/ResourcePolicyDAOImpl.java +++ b/dspace-api/src/main/java/org/dspace/authorize/dao/impl/ResourcePolicyDAOImpl.java @@ -63,6 +63,16 @@ public class ResourcePolicyDAOImpl extends AbstractHibernateDAO return list(context, criteriaQuery, false, ResourcePolicy.class, -1, -1); } + @Override + public List findByEPerson(Context context, EPerson ePerson) throws SQLException { + CriteriaBuilder criteriaBuilder = getCriteriaBuilder(context); + CriteriaQuery criteriaQuery = getCriteriaQuery(criteriaBuilder, ResourcePolicy.class); + Root resourcePolicyRoot = criteriaQuery.from(ResourcePolicy.class); + criteriaQuery.select(resourcePolicyRoot); + criteriaQuery.where(criteriaBuilder.equal(resourcePolicyRoot.get(ResourcePolicy_.eperson), ePerson)); + return list(context, criteriaQuery, false, ResourcePolicy.class, -1, -1); + } + @Override public List findByGroup(Context context, Group group) throws SQLException { CriteriaBuilder criteriaBuilder = getCriteriaBuilder(context); @@ -194,6 +204,15 @@ public class ResourcePolicyDAOImpl extends AbstractHibernateDAO } + @Override + public void deleteByEPerson(Context context, EPerson ePerson) throws SQLException { + String queryString = "delete from ResourcePolicy where eperson= :eperson"; + Query query = createQuery(context, queryString); + query.setParameter("eperson", ePerson); + query.executeUpdate(); + + } + @Override public void deleteByDsoAndTypeNotEqualsTo(Context context, DSpaceObject dso, String type) throws SQLException { @@ -247,10 +266,10 @@ public class ResourcePolicyDAOImpl extends AbstractHibernateDAO } @Override - public int countByEPerson(Context context, EPerson eperson) throws SQLException { + public int countByEPerson(Context context, EPerson ePerson) throws SQLException { Query query = createQuery(context, "SELECT count(*) FROM " + ResourcePolicy.class.getSimpleName() + " WHERE eperson_id = (:epersonUuid) "); - query.setParameter("epersonUuid", eperson.getID()); + query.setParameter("epersonUuid", ePerson.getID()); return count(query); } diff --git a/dspace-api/src/main/java/org/dspace/authorize/service/AuthorizeService.java b/dspace-api/src/main/java/org/dspace/authorize/service/AuthorizeService.java index 9e739e2585..94a1c0297e 100644 --- a/dspace-api/src/main/java/org/dspace/authorize/service/AuthorizeService.java +++ b/dspace-api/src/main/java/org/dspace/authorize/service/AuthorizeService.java @@ -213,6 +213,26 @@ public interface AuthorizeService { public boolean isCollectionAdmin(Context c) throws SQLException; + /** + * Check to see if a specific user is Community admin + * + * @param c current context + * @param e the user to check + * @return true if user is an admin of some community + * @throws SQLException + */ + public boolean isCommunityAdmin(Context c, EPerson e) throws SQLException; + + /** + * Check to see if a specific user is Collection admin + * + * @param c current context + * @param e the user to check + * @return true if user is an admin of some collection + * @throws SQLException if database error + */ + public boolean isCollectionAdmin(Context c, EPerson e) throws SQLException; + /////////////////////////////////////////////// // policy manipulation methods /////////////////////////////////////////////// @@ -429,6 +449,16 @@ public interface AuthorizeService { */ public void removeEPersonPolicies(Context c, DSpaceObject o, EPerson e) throws SQLException, AuthorizeException; + /** + * Removes all policies from an eperson that belong to an EPerson. + * + * @param c current context + * @param e the eperson + * @throws SQLException if there's a database problem + * @throws AuthorizeException if authorization error + */ + public void removeAllEPersonPolicies(Context c, EPerson e) throws SQLException, AuthorizeException; + /** * Returns all groups authorized to perform an action on an object. Returns * empty array if no matches. diff --git a/dspace-api/src/main/java/org/dspace/authorize/service/ResourcePolicyService.java b/dspace-api/src/main/java/org/dspace/authorize/service/ResourcePolicyService.java index 48ec510c86..f1d8b30242 100644 --- a/dspace-api/src/main/java/org/dspace/authorize/service/ResourcePolicyService.java +++ b/dspace-api/src/main/java/org/dspace/authorize/service/ResourcePolicyService.java @@ -39,6 +39,16 @@ public interface ResourcePolicyService extends DSpaceCRUDService public List find(Context context, Group group) throws SQLException; + /** + * Retrieve a list of ResourcePolicies by EPerson + * + * @param c context + * @param ePerson the EPerson for which to look up the resource policies + * @return a list of ResourcePolicies for the provided EPerson + * @throws SQLException if there's a database problem + */ + public List find(Context c, EPerson ePerson) throws SQLException; + public List find(Context c, EPerson e, List groups, int action, int type_id) throws SQLException; @@ -72,6 +82,16 @@ public interface ResourcePolicyService extends DSpaceCRUDService public void removeDsoEPersonPolicies(Context context, DSpaceObject dso, EPerson ePerson) throws SQLException, AuthorizeException; + /** + * Removes all ResourcePolicies related to an EPerson + * + * @param context context + * @param ePerson the EPerson for which the ResourcePolicies will be deleted + * @throws SQLException if there's a database problem + * @throws AuthorizeException when the current user is not authorized + */ + public void removeAllEPersonPolicies(Context context, EPerson ePerson) throws SQLException, AuthorizeException; + public void removeGroupPolicies(Context c, Group group) throws SQLException; public void removeDsoAndTypeNotEqualsToPolicies(Context c, DSpaceObject o, String type) diff --git a/dspace-api/src/main/java/org/dspace/content/BitstreamFormatServiceImpl.java b/dspace-api/src/main/java/org/dspace/content/BitstreamFormatServiceImpl.java index 21d1fa4ba4..89bf74ece6 100644 --- a/dspace-api/src/main/java/org/dspace/content/BitstreamFormatServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/content/BitstreamFormatServiceImpl.java @@ -153,7 +153,7 @@ public class BitstreamFormatServiceImpl implements BitstreamFormatService { // If the exception was thrown, unknown will == null so goahead and // load s. If not, check that the unknown's registry's name is not // being reset. - if (unknown == null || unknown.getID() != bitstreamFormat.getID()) { + if (unknown == null || !unknown.getID().equals(bitstreamFormat.getID())) { bitstreamFormat.setShortDescriptionInternal(shortDescription); } } @@ -208,7 +208,7 @@ public class BitstreamFormatServiceImpl implements BitstreamFormatService { // Find "unknown" type BitstreamFormat unknown = findUnknown(context); - if (unknown.getID() == bitstreamFormat.getID()) { + if (unknown.getID().equals(bitstreamFormat.getID())) { throw new IllegalArgumentException("The Unknown bitstream format may not be deleted."); } @@ -270,4 +270,4 @@ public class BitstreamFormatServiceImpl implements BitstreamFormatService { } return null; } -} \ No newline at end of file +} diff --git a/dspace-api/src/main/java/org/dspace/content/CollectionServiceImpl.java b/dspace-api/src/main/java/org/dspace/content/CollectionServiceImpl.java index cc6d32b8c3..559b95edb8 100644 --- a/dspace-api/src/main/java/org/dspace/content/CollectionServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/content/CollectionServiceImpl.java @@ -17,11 +17,13 @@ import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.MissingResourceException; +import java.util.Set; import java.util.UUID; import org.apache.commons.collections4.CollectionUtils; import org.apache.commons.lang3.StringUtils; import org.apache.logging.log4j.Logger; +import org.apache.solr.client.solrj.util.ClientUtils; import org.dspace.app.util.AuthorizeUtil; import org.dspace.authorize.AuthorizeConfiguration; import org.dspace.authorize.AuthorizeException; @@ -40,6 +42,13 @@ import org.dspace.core.Context; import org.dspace.core.I18nUtil; import org.dspace.core.LogManager; import org.dspace.core.service.LicenseService; +import org.dspace.discovery.DiscoverQuery; +import org.dspace.discovery.DiscoverResult; +import org.dspace.discovery.IndexableObject; +import org.dspace.discovery.SearchService; +import org.dspace.discovery.SearchServiceException; +import org.dspace.discovery.indexobject.IndexableCollection; +import org.dspace.eperson.EPerson; import org.dspace.eperson.Group; import org.dspace.eperson.service.GroupService; import org.dspace.eperson.service.SubscribeService; @@ -48,7 +57,6 @@ import org.dspace.harvest.HarvestedCollection; import org.dspace.harvest.service.HarvestedCollectionService; import org.dspace.workflow.factory.WorkflowServiceFactory; import org.dspace.xmlworkflow.WorkflowConfigurationException; -import org.dspace.xmlworkflow.XmlWorkflowFactoryImpl; import org.dspace.xmlworkflow.factory.XmlWorkflowFactory; import org.dspace.xmlworkflow.state.Workflow; import org.dspace.xmlworkflow.storedcomponents.CollectionRole; @@ -100,6 +108,9 @@ public class CollectionServiceImpl extends DSpaceObjectServiceImpl i @Autowired(required = true) protected CollectionRoleService collectionRoleService; + @Autowired(required = true) + protected SearchService searchService; + protected CollectionServiceImpl() { super(); } @@ -375,7 +386,7 @@ public class CollectionServiceImpl extends DSpaceObjectServiceImpl i log.error(LogManager.getHeader(context, "setWorkflowGroup", "collection_id=" + collection.getID() + " " + e.getMessage()), e); } - if (!StringUtils.equals(XmlWorkflowFactoryImpl.LEGACY_WORKFLOW_NAME, workflow.getID())) { + if (!StringUtils.equals(workflowFactory.getDefaultWorkflow().getID(), workflow.getID())) { throw new IllegalArgumentException( "setWorkflowGroup can be used only on collection with the default basic dspace workflow. " + "Instead, the collection: " @@ -889,4 +900,95 @@ public class CollectionServiceImpl extends DSpaceObjectServiceImpl i throws SQLException { return collectionDAO.getCollectionsWithBitstreamSizesTotal(context); } + + @Override + public Group createDefaultReadGroup(Context context, Collection collection, String typeOfGroupString, + int defaultRead) + throws SQLException, AuthorizeException { + Group role = groupService.create(context); + groupService.setName(role, "COLLECTION_" + collection.getID().toString() + "_" + typeOfGroupString + + "_DEFAULT_READ"); + + // Remove existing privileges from the anonymous group. + authorizeService.removePoliciesActionFilter(context, collection, defaultRead); + + // Grant our new role the default privileges. + authorizeService.addPolicy(context, collection, defaultRead, role); + groupService.update(context, role); + return role; + } + + @Override + public List findCollectionsWithSubmit(String q, Context context, Community community, + int offset, int limit) throws SQLException, SearchServiceException { + + List collections = new ArrayList(); + DiscoverQuery discoverQuery = new DiscoverQuery(); + discoverQuery.setDSpaceObjectFilter(IndexableCollection.TYPE); + discoverQuery.setStart(offset); + discoverQuery.setMaxResults(limit); + DiscoverResult resp = retrieveCollectionsWithSubmit(context, discoverQuery,community, q); + for (IndexableObject solrCollections : resp.getIndexableObjects()) { + Collection c = ((IndexableCollection) solrCollections).getIndexedObject(); + collections.add(c); + } + return collections; + } + + @Override + public int countCollectionsWithSubmit(String q, Context context, Community community) + throws SQLException, SearchServiceException { + + DiscoverQuery discoverQuery = new DiscoverQuery(); + discoverQuery.setMaxResults(0); + discoverQuery.setDSpaceObjectFilter(IndexableCollection.TYPE); + DiscoverResult resp = retrieveCollectionsWithSubmit(context, discoverQuery,community,q); + return (int)resp.getTotalSearchResults(); + } + + /** + * Finds all Indexed Collections where the current user has submit rights. If the user is an Admin, + * this is all Indexed Collections. Otherwise, it includes those collections where + * an indexed "submit" policy lists either the eperson or one of the eperson's groups + * + * @param context DSpace context + * @param discoverQuery + * @param community parent community, could be null + * @param q limit the returned collection to those with metadata values matching the query + * terms. The terms are used to make also a prefix query on SOLR + * so it can be used to implement an autosuggest feature over the collection name + * @return discovery search result objects + * @throws SQLException if something goes wrong + * @throws SearchServiceException if search error + */ + private DiscoverResult retrieveCollectionsWithSubmit(Context context, DiscoverQuery discoverQuery, + Community community, String q) throws SQLException, SearchServiceException { + + StringBuilder query = new StringBuilder(); + EPerson currentUser = context.getCurrentUser(); + if (!authorizeService.isAdmin(context)) { + String userId = ""; + if (currentUser != null) { + userId = currentUser.getID().toString(); + } + query.append("submit:(e").append(userId); + Set groups = groupService.allMemberGroupsSet(context, currentUser); + for (Group group : groups) { + query.append(" OR g").append(group.getID()); + } + query.append(")"); + discoverQuery.addFilterQueries(query.toString()); + } + if (community != null) { + discoverQuery.addFilterQueries("location.comm:" + community.getID().toString()); + } + if (StringUtils.isNotBlank(q)) { + StringBuilder buildQuery = new StringBuilder(); + String escapedQuery = ClientUtils.escapeQueryChars(q); + buildQuery.append(escapedQuery).append(" OR ").append(escapedQuery).append("*"); + discoverQuery.setQuery(buildQuery.toString()); + } + DiscoverResult resp = searchService.search(context, discoverQuery); + return resp; + } } diff --git a/dspace-api/src/main/java/org/dspace/content/CommunityServiceImpl.java b/dspace-api/src/main/java/org/dspace/content/CommunityServiceImpl.java index c49442267a..2ad0c8c2bc 100644 --- a/dspace-api/src/main/java/org/dspace/content/CommunityServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/content/CommunityServiceImpl.java @@ -629,6 +629,10 @@ public class CommunityServiceImpl extends DSpaceObjectServiceImpl imp case Constants.DELETE: if (AuthorizeConfiguration.canCommunityAdminPerformSubelementDeletion()) { adminObject = getParentObject(context, community); + if (adminObject == null) { + //top-level community, has to be admin of the current community + adminObject = community; + } } break; case Constants.ADD: diff --git a/dspace-api/src/main/java/org/dspace/content/DSpaceObjectServiceImpl.java b/dspace-api/src/main/java/org/dspace/content/DSpaceObjectServiceImpl.java index 6886d41e1b..d33ad7e416 100644 --- a/dspace-api/src/main/java/org/dspace/content/DSpaceObjectServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/content/DSpaceObjectServiceImpl.java @@ -207,8 +207,8 @@ public abstract class DSpaceObjectServiceImpl implements } @Override - public void addMetadata(Context context, T dso, String schema, String element, String qualifier, String lang, - List values) throws SQLException { + public List addMetadata(Context context, T dso, String schema, String element, String qualifier, + String lang, List values) throws SQLException { MetadataField metadataField = metadataFieldService.findByElement(context, schema, element, qualifier); if (metadataField == null) { throw new SQLException( @@ -216,12 +216,12 @@ public abstract class DSpaceObjectServiceImpl implements "exist!"); } - addMetadata(context, dso, metadataField, lang, values); + return addMetadata(context, dso, metadataField, lang, values); } @Override - public void addMetadata(Context context, T dso, String schema, String element, String qualifier, String lang, - List values, List authorities, List confidences) + public List addMetadata(Context context, T dso, String schema, String element, String qualifier, + String lang, List values, List authorities, List confidences) throws SQLException { // We will not verify that they are valid entries in the registry // until update() is called. @@ -231,15 +231,16 @@ public abstract class DSpaceObjectServiceImpl implements "bad_dublin_core schema=" + schema + "." + element + "." + qualifier + ". Metadata field does not " + "exist!"); } - addMetadata(context, dso, metadataField, lang, values, authorities, confidences); + return addMetadata(context, dso, metadataField, lang, values, authorities, confidences); } @Override - public void addMetadata(Context context, T dso, MetadataField metadataField, String lang, List values, - List authorities, List confidences) + public List addMetadata(Context context, T dso, MetadataField metadataField, String lang, + List values, List authorities, List confidences) throws SQLException { boolean authorityControlled = metadataAuthorityService.isAuthorityControlled(metadataField); boolean authorityRequired = metadataAuthorityService.isAuthorityRequired(metadataField); + List newMetadata = new ArrayList<>(values.size()); // We will not verify that they are valid entries in the registry // until update() is called. for (int i = 0; i < values.size(); i++) { @@ -250,6 +251,7 @@ public abstract class DSpaceObjectServiceImpl implements } } MetadataValue metadataValue = metadataValueService.create(context, dso, metadataField); + newMetadata.add(metadataValue); //Set place to list length of all metadatavalues for the given schema.element.qualifier combination. // Subtract one to adhere to the 0 as first element rule metadataValue.setPlace( @@ -304,29 +306,31 @@ public abstract class DSpaceObjectServiceImpl implements // metadataValueService.update(context, metadataValue); dso.addDetails(metadataField.toString()); } + return newMetadata; } @Override - public void addMetadata(Context context, T dso, MetadataField metadataField, String language, String value, - String authority, int confidence) throws SQLException { - addMetadata(context, dso, metadataField, language, Arrays.asList(value), Arrays.asList(authority), - Arrays.asList(confidence)); + public MetadataValue addMetadata(Context context, T dso, MetadataField metadataField, String language, + String value, String authority, int confidence) throws SQLException { + return addMetadata(context, dso, metadataField, language, Arrays.asList(value), Arrays.asList(authority), + Arrays.asList(confidence)).get(0); } @Override - public void addMetadata(Context context, T dso, String schema, String element, String qualifier, String lang, - String value) throws SQLException { - addMetadata(context, dso, schema, element, qualifier, lang, Arrays.asList(value)); + public MetadataValue addMetadata(Context context, T dso, String schema, String element, String qualifier, + String lang, String value) throws SQLException { + return addMetadata(context, dso, schema, element, qualifier, lang, Arrays.asList(value)).get(0); } @Override - public void addMetadata(Context context, T dso, MetadataField metadataField, String language, String value) + public MetadataValue addMetadata(Context context, T dso, MetadataField metadataField, String language, String value) throws SQLException { - addMetadata(context, dso, metadataField, language, Arrays.asList(value)); + return addMetadata(context, dso, metadataField, language, Arrays.asList(value)).get(0); } @Override - public void addMetadata(Context context, T dso, MetadataField metadataField, String language, List values) + public List addMetadata(Context context, T dso, MetadataField metadataField, String language, + List values) throws SQLException { if (metadataField != null) { String fieldKey = metadataAuthorityService @@ -343,18 +347,19 @@ public abstract class DSpaceObjectServiceImpl implements getAuthoritiesAndConfidences(fieldKey, null, values, authorities, confidences, i); } } - addMetadata(context, dso, metadataField, language, values, authorities, confidences); + return addMetadata(context, dso, metadataField, language, values, authorities, confidences); } else { - addMetadata(context, dso, metadataField, language, values, null, null); + return addMetadata(context, dso, metadataField, language, values, null, null); } } + return new ArrayList<>(0); } @Override - public void addMetadata(Context context, T dso, String schema, String element, String qualifier, String lang, - String value, String authority, int confidence) throws SQLException { - addMetadata(context, dso, schema, element, qualifier, lang, Arrays.asList(value), Arrays.asList(authority), - Arrays.asList(confidence)); + public MetadataValue addMetadata(Context context, T dso, String schema, String element, String qualifier, + String lang, String value, String authority, int confidence) throws SQLException { + return addMetadata(context, dso, schema, element, qualifier, lang, Arrays.asList(value), + Arrays.asList(authority), Arrays.asList(confidence)).get(0); } @Override @@ -660,33 +665,35 @@ public abstract class DSpaceObjectServiceImpl implements @Override public void addAndShiftRightMetadata(Context context, T dso, String schema, String element, String qualifier, String lang, String value, String authority, int confidence, int index) - throws SQLException { + throws SQLException { List list = getMetadata(dso, schema, element, qualifier); - clearMetadata(context, dso, schema, element, qualifier, Item.ANY); - int idx = 0; + int place = 0; boolean last = true; for (MetadataValue rr : list) { if (idx == index) { - addMetadata(context, dso, schema, element, qualifier, - lang, value, authority, confidence); + MetadataValue newMetadata = addMetadata(context, dso, schema, element, qualifier, + lang, value, authority, confidence); + + moveSingleMetadataValue(context, dso, place, newMetadata); + place++; last = false; } - addMetadata(context, dso, schema, element, qualifier, - rr.getLanguage(), rr.getValue(), rr.getAuthority(), rr.getConfidence()); + moveSingleMetadataValue(context, dso, place, rr); + place++; idx++; } if (last) { addMetadata(context, dso, schema, element, qualifier, - lang, value, authority, confidence); + lang, value, authority, confidence); } } @Override public void moveMetadata(Context context, T dso, String schema, String element, String qualifier, int from, int to) - throws SQLException, IllegalArgumentException { + throws SQLException, IllegalArgumentException { if (from == to) { throw new IllegalArgumentException("The \"from\" location MUST be different from \"to\" location"); @@ -701,8 +708,6 @@ public abstract class DSpaceObjectServiceImpl implements "\n Idx from:" + from + " Idx to: " + to); } - clearMetadata(context, dso, schema, element, qualifier, Item.ANY); - int idx = 0; MetadataValue moved = null; for (MetadataValue md : list) { @@ -714,49 +719,46 @@ public abstract class DSpaceObjectServiceImpl implements } idx = 0; + int place = 0; boolean last = true; for (MetadataValue rr : list) { if (idx == to && to < from) { - addMetadata(context, dso, schema, element, qualifier, moved.getLanguage(), moved.getValue(), - moved.getAuthority(), moved.getConfidence()); + moveSingleMetadataValue(context, dso, place, moved); + place++; last = false; } if (idx != from) { - addMetadata(context, dso, schema, element, qualifier, rr.getLanguage(), rr.getValue(), - rr.getAuthority(), rr.getConfidence()); + moveSingleMetadataValue(context, dso, place, rr); + place++; } if (idx == to && to > from) { - addMetadata(context, dso, schema, element, qualifier, moved.getLanguage(), moved.getValue(), - moved.getAuthority(), moved.getConfidence()); + moveSingleMetadataValue(context, dso, place, moved); + place++; last = false; } idx++; } if (last) { - addMetadata(context, dso, schema, element, qualifier, moved.getLanguage(), moved.getValue(), - moved.getAuthority(), moved.getConfidence()); + moveSingleMetadataValue(context, dso, place, moved); } } + /** + * Supports moving metadata by updating the place of the metadata value + */ + protected void moveSingleMetadataValue(Context context, T dso, int place, MetadataValue rr) { + //just move the metadata + rr.setPlace(place); + } + @Override public void replaceMetadata(Context context, T dso, String schema, String element, String qualifier, String lang, String value, String authority, int confidence, int index) throws SQLException { List list = getMetadata(dso, schema, element, qualifier); - clearMetadata(context, dso, schema, element, qualifier, Item.ANY); - - int idx = 0; - for (MetadataValue rr : list) { - if (idx == index) { - addMetadata(context, dso, schema, element, qualifier, - lang, value, authority, confidence); - } else { - addMetadata(context, dso, schema, element, qualifier, - rr.getLanguage(), rr.getValue(), rr.getAuthority(), rr.getConfidence()); - } - idx++; - } + removeMetadataValues(context, dso, Arrays.asList(list.get(index))); + addAndShiftRightMetadata(context, dso, schema, element, qualifier, lang, value, authority, confidence, index); } @Override diff --git a/dspace-api/src/main/java/org/dspace/content/EntityType.java b/dspace-api/src/main/java/org/dspace/content/EntityType.java index 15fe1739e5..d44ec5a35d 100644 --- a/dspace-api/src/main/java/org/dspace/content/EntityType.java +++ b/dspace-api/src/main/java/org/dspace/content/EntityType.java @@ -7,6 +7,7 @@ */ package org.dspace.content; +import java.util.Objects; import javax.persistence.Column; import javax.persistence.Entity; import javax.persistence.GeneratedValue; @@ -15,6 +16,8 @@ import javax.persistence.Id; import javax.persistence.SequenceGenerator; import javax.persistence.Table; +import org.apache.commons.lang3.StringUtils; +import org.apache.commons.lang3.builder.HashCodeBuilder; import org.dspace.core.ReloadableEntity; /** @@ -45,7 +48,8 @@ public class EntityType implements ReloadableEntity { /** * The standard setter for the ID of this EntityType - * @param id The ID that this EntityType's ID will be set to + * + * @param id The ID that this EntityType's ID will be set to */ public void setId(Integer id) { this.id = id; @@ -53,7 +57,8 @@ public class EntityType implements ReloadableEntity { /** * The standard getter for the label of this EntityType - * @return The label for this EntityType + * + * @return The label for this EntityType */ public String getLabel() { return label; @@ -61,6 +66,7 @@ public class EntityType implements ReloadableEntity { /** * The standard setter for the label of this EntityType + * * @param label The label that this EntityType's label will be set to */ public void setLabel(String label) { @@ -69,9 +75,40 @@ public class EntityType implements ReloadableEntity { /** * The standard getter for the ID of this EntityType - * @return The ID for this EntityType + * + * @return The ID for this EntityType */ public Integer getID() { return id; } + + /** + * Determines whether two entity types are equal based on the id and the label + * @param obj object to be compared + * @return + */ + public boolean equals(Object obj) { + if (!(obj instanceof EntityType)) { + return false; + } + EntityType entityType = (EntityType) obj; + + if (!Objects.equals(this.getID(), entityType.getID())) { + return false; + } + + if (!StringUtils.equals(this.getLabel(), entityType.getLabel())) { + return false; + } + return true; + } + + /** + * Returns a hash code value for the object. + * @return hash code value + */ + @Override + public int hashCode() { + return new HashCodeBuilder().append(getID()).toHashCode(); + } } diff --git a/dspace-api/src/main/java/org/dspace/content/ItemServiceImpl.java b/dspace-api/src/main/java/org/dspace/content/ItemServiceImpl.java index 9502a2ca32..3b0253bf86 100644 --- a/dspace-api/src/main/java/org/dspace/content/ItemServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/content/ItemServiceImpl.java @@ -230,6 +230,12 @@ public class ItemServiceImpl extends DSpaceObjectServiceImpl implements It return itemDAO.findBySubmitter(context, eperson); } + @Override + public Iterator findBySubmitter(Context context, EPerson eperson, boolean retrieveAllItems) + throws SQLException { + return itemDAO.findBySubmitter(context, eperson, retrieveAllItems); + } + @Override public Iterator findBySubmitterDateSorted(Context context, EPerson eperson, Integer limit) throws SQLException { @@ -1100,19 +1106,7 @@ prevent the generation of resource policy entry values with null dspace_object a } break; case Constants.DELETE: - if (item.getOwningCollection() != null) { - if (AuthorizeConfiguration.canCollectionAdminPerformItemDeletion()) { - adminObject = collection; - } else if (AuthorizeConfiguration.canCommunityAdminPerformItemDeletion()) { - adminObject = community; - } - } else { - if (AuthorizeConfiguration.canCollectionAdminManageTemplateItem()) { - adminObject = collection; - } else if (AuthorizeConfiguration.canCommunityAdminManageCollectionTemplateItem()) { - adminObject = community; - } - } + adminObject = item; break; case Constants.WRITE: // if it is a template item we need to check the @@ -1372,6 +1366,32 @@ prevent the generation of resource policy entry values with null dspace_object a } + /** + * Supports moving metadata by adding the metadata value or updating the place of the relationship + */ + @Override + protected void moveSingleMetadataValue(Context context, Item dso, int place, MetadataValue rr) { + if (rr instanceof RelationshipMetadataValue) { + try { + //Retrieve the applicable relationship + Relationship rs = relationshipService.find(context, + ((RelationshipMetadataValue) rr).getRelationshipId()); + if (rs.getLeftItem() == dso) { + rs.setLeftPlace(place); + } else { + rs.setRightPlace(place); + } + relationshipService.update(context, rs); + } catch (Exception e) { + //should not occur, otherwise metadata can't be updated either + log.error("An error occurred while moving " + rr.getAuthority() + " for item " + dso.getID(), e); + } + } else { + //just move the metadata + rr.setPlace(place); + } + } + /** * This method will sort the List of MetadataValue objects based on the MetadataSchema, MetadataField Element, * MetadataField Qualifier and MetadataField Place in that order. diff --git a/dspace-api/src/main/java/org/dspace/content/MetadataDSpaceCsvExportServiceImpl.java b/dspace-api/src/main/java/org/dspace/content/MetadataDSpaceCsvExportServiceImpl.java new file mode 100644 index 0000000000..1750938937 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/content/MetadataDSpaceCsvExportServiceImpl.java @@ -0,0 +1,130 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.content; + +import java.sql.SQLException; +import java.util.ArrayList; +import java.util.Collections; +import java.util.Iterator; +import java.util.List; + +import com.google.common.collect.Iterators; +import org.dspace.app.bulkedit.DSpaceCSV; +import org.dspace.content.service.ItemService; +import org.dspace.content.service.MetadataDSpaceCsvExportService; +import org.dspace.core.Constants; +import org.dspace.core.Context; +import org.dspace.handle.factory.HandleServiceFactory; +import org.dspace.scripts.handler.DSpaceRunnableHandler; +import org.springframework.beans.factory.annotation.Autowired; + +/** + * Implementation of {@link MetadataDSpaceCsvExportService} + */ +public class MetadataDSpaceCsvExportServiceImpl implements MetadataDSpaceCsvExportService { + + @Autowired + private ItemService itemService; + + @Override + public DSpaceCSV handleExport(Context context, boolean exportAllItems, boolean exportAllMetadata, String handle, + DSpaceRunnableHandler handler) throws Exception { + Iterator toExport = null; + + if (exportAllItems) { + handler.logInfo("Exporting whole repository WARNING: May take some time!"); + toExport = itemService.findAll(context); + } else { + DSpaceObject dso = HandleServiceFactory.getInstance().getHandleService().resolveToObject(context, handle); + if (dso == null) { + throw new IllegalArgumentException( + "Item '" + handle + "' does not resolve to an item in your repository!"); + } + + if (dso.getType() == Constants.ITEM) { + handler.logInfo("Exporting item '" + dso.getName() + "' (" + handle + ")"); + List item = new ArrayList<>(); + item.add((Item) dso); + toExport = item.iterator(); + } else if (dso.getType() == Constants.COLLECTION) { + handler.logInfo("Exporting collection '" + dso.getName() + "' (" + handle + ")"); + Collection collection = (Collection) dso; + toExport = itemService.findByCollection(context, collection); + } else if (dso.getType() == Constants.COMMUNITY) { + handler.logInfo("Exporting community '" + dso.getName() + "' (" + handle + ")"); + toExport = buildFromCommunity(context, (Community) dso); + } else { + throw new IllegalArgumentException("Error identifying '" + handle + "'"); + } + } + + DSpaceCSV csv = this.export(context, toExport, exportAllMetadata); + return csv; + } + + @Override + public DSpaceCSV export(Context context, Iterator toExport, boolean exportAll) throws Exception { + Context.Mode originalMode = context.getCurrentMode(); + context.setMode(Context.Mode.READ_ONLY); + + // Process each item + DSpaceCSV csv = new DSpaceCSV(exportAll); + while (toExport.hasNext()) { + Item item = toExport.next(); + csv.addItem(item); + context.uncacheEntity(item); + } + + context.setMode(originalMode); + // Return the results + return csv; + } + + @Override + public DSpaceCSV export(Context context, Community community, boolean exportAll) throws Exception { + return export(context, buildFromCommunity(context, community), exportAll); + } + + /** + * Build an array list of item ids that are in a community (include sub-communities and collections) + * + * @param context DSpace context + * @param community The community to build from + * @return The list of item ids + * @throws SQLException if database error + */ + private Iterator buildFromCommunity(Context context, Community community) + throws SQLException { + // Add all the collections + List collections = community.getCollections(); + Iterator result = Collections.emptyIterator(); + for (Collection collection : collections) { + Iterator items = itemService.findByCollection(context, collection); + result = addItemsToResult(result, items); + + } + // Add all the sub-communities + List communities = community.getSubcommunities(); + for (Community subCommunity : communities) { + Iterator items = buildFromCommunity(context, subCommunity); + result = addItemsToResult(result, items); + } + + return result; + } + + private Iterator addItemsToResult(Iterator result, Iterator items) { + if (result == null) { + result = items; + } else { + result = Iterators.concat(result, items); + } + + return result; + } +} diff --git a/dspace-api/src/main/java/org/dspace/content/MetadataField.java b/dspace-api/src/main/java/org/dspace/content/MetadataField.java index 3f574dab0e..0ea176c751 100644 --- a/dspace-api/src/main/java/org/dspace/content/MetadataField.java +++ b/dspace-api/src/main/java/org/dspace/content/MetadataField.java @@ -168,11 +168,11 @@ public class MetadataField implements ReloadableEntity { return false; } Class objClass = HibernateProxyHelper.getClassWithoutInitializingProxy(obj); - if (getClass() != objClass) { + if (!getClass().equals(objClass)) { return false; } final MetadataField other = (MetadataField) obj; - if (this.getID() != other.getID()) { + if (!this.getID().equals(other.getID())) { return false; } if (!getMetadataSchema().equals(other.getMetadataSchema())) { diff --git a/dspace-api/src/main/java/org/dspace/content/MetadataFieldServiceImpl.java b/dspace-api/src/main/java/org/dspace/content/MetadataFieldServiceImpl.java index c71db2d131..569b5840c6 100644 --- a/dspace-api/src/main/java/org/dspace/content/MetadataFieldServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/content/MetadataFieldServiceImpl.java @@ -9,6 +9,8 @@ package org.dspace.content; import java.io.IOException; import java.sql.SQLException; +import java.util.ArrayList; +import java.util.Arrays; import java.util.List; import org.apache.commons.collections4.CollectionUtils; @@ -20,8 +22,12 @@ import org.dspace.content.dao.MetadataFieldDAO; import org.dspace.content.service.MetadataFieldService; import org.dspace.content.service.MetadataSchemaService; import org.dspace.content.service.MetadataValueService; +import org.dspace.content.service.SiteService; +import org.dspace.core.Constants; import org.dspace.core.Context; import org.dspace.core.LogManager; +import org.dspace.discovery.indexobject.IndexableMetadataField; +import org.dspace.event.Event; import org.springframework.beans.factory.annotation.Autowired; /** @@ -46,6 +52,8 @@ public class MetadataFieldServiceImpl implements MetadataFieldService { protected MetadataValueService metadataValueService; @Autowired(required = true) protected MetadataSchemaService metadataSchemaService; + @Autowired + protected SiteService siteService; protected MetadataFieldServiceImpl() { @@ -77,6 +85,8 @@ public class MetadataFieldServiceImpl implements MetadataFieldService { log.info(LogManager.getHeader(context, "create_metadata_field", "metadata_field_id=" + metadataField.getID())); + // Update the index of type metadatafield + this.triggerEventToUpdateIndex(context, metadataField.getID()); return metadataField; } @@ -149,6 +159,8 @@ public class MetadataFieldServiceImpl implements MetadataFieldService { "metadata_field_id=" + metadataField.getID() + "element=" + metadataField .getElement() + "qualifier=" + metadataField.getQualifier())); + // Update the index of type metadatafield + this.triggerEventToUpdateIndex(context, metadataField.getID()); } @Override @@ -177,6 +189,21 @@ public class MetadataFieldServiceImpl implements MetadataFieldService { log.info(LogManager.getHeader(context, "delete_metadata_field", "metadata_field_id=" + metadataField.getID())); + // Update the index of type metadatafield + this.triggerEventToUpdateIndex(context, metadataField.getID()); + } + + /** + * Calls a MODIFY SITE event with the identifier of the changed mdField, so it can be indexed in + * {@link org.dspace.discovery.IndexEventConsumer}, with type of {@link org.dspace.discovery.IndexableObject} in + * {@link Event}.detail and the identifiers of the changed mdFields in {@link Event}.identifiers + * + * @param context DSpace context + * @param mdFieldId ID of the metadata field that needs to be (re)indexed + */ + private void triggerEventToUpdateIndex(Context context, int mdFieldId) { + context.addEvent(new Event(Event.MODIFY, Constants.SITE, null, IndexableMetadataField.TYPE, new ArrayList<>( + Arrays.asList(Integer.toString(mdFieldId))))); } /** diff --git a/dspace-api/src/main/java/org/dspace/content/MetadataSchema.java b/dspace-api/src/main/java/org/dspace/content/MetadataSchema.java index 96bef0fa2c..727181ee9d 100644 --- a/dspace-api/src/main/java/org/dspace/content/MetadataSchema.java +++ b/dspace-api/src/main/java/org/dspace/content/MetadataSchema.java @@ -67,11 +67,11 @@ public class MetadataSchema implements ReloadableEntity { return false; } Class objClass = HibernateProxyHelper.getClassWithoutInitializingProxy(obj); - if (getClass() != objClass) { + if (!getClass().equals(objClass)) { return false; } final MetadataSchema other = (MetadataSchema) obj; - if (this.id != other.id) { + if (!this.id.equals(other.id)) { return false; } if ((this.namespace == null) ? (other.namespace != null) : !this.namespace.equals(other.namespace)) { diff --git a/dspace-api/src/main/java/org/dspace/content/MetadataValue.java b/dspace-api/src/main/java/org/dspace/content/MetadataValue.java index 4ce0c291f7..2d9808ae45 100644 --- a/dspace-api/src/main/java/org/dspace/content/MetadataValue.java +++ b/dspace-api/src/main/java/org/dspace/content/MetadataValue.java @@ -239,17 +239,17 @@ public class MetadataValue implements ReloadableEntity { return false; } Class objClass = HibernateProxyHelper.getClassWithoutInitializingProxy(obj); - if (getClass() != objClass) { + if (!getClass().equals(objClass)) { return false; } final MetadataValue other = (MetadataValue) obj; - if (this.id != other.id) { + if (!this.id.equals(other.id)) { return false; } - if (this.getID() != other.getID()) { + if (!this.getID().equals(other.getID())) { return false; } - if (this.getDSpaceObject().getID() != other.getDSpaceObject().getID()) { + if (!this.getDSpaceObject().getID().equals(other.getDSpaceObject().getID())) { return false; } return true; diff --git a/dspace-api/src/main/java/org/dspace/content/RelationshipMetadataValue.java b/dspace-api/src/main/java/org/dspace/content/RelationshipMetadataValue.java index 88d2e38beb..637d1c094b 100644 --- a/dspace-api/src/main/java/org/dspace/content/RelationshipMetadataValue.java +++ b/dspace-api/src/main/java/org/dspace/content/RelationshipMetadataValue.java @@ -7,6 +7,8 @@ */ package org.dspace.content; +import org.dspace.core.Constants; + /** * This class is used as a representation of MetadataValues for the MetadataValues that are derived from the * Relationships that the item has. This includes the useForPlace property which we'll have to use to determine @@ -57,4 +59,13 @@ public class RelationshipMetadataValue extends MetadataValue { } return super.equals(obj); } + + /** + * Retrieves the Relationship ID from which the current RelationshipMetadataValue is derived + * + * @return the relationship ID + */ + public int getRelationshipId() { + return Integer.parseInt(getAuthority().substring(Constants.VIRTUAL_AUTHORITY_PREFIX.length())); + } } diff --git a/dspace-api/src/main/java/org/dspace/content/WorkspaceItem.java b/dspace-api/src/main/java/org/dspace/content/WorkspaceItem.java index f55dfaf2da..8049aa976c 100644 --- a/dspace-api/src/main/java/org/dspace/content/WorkspaceItem.java +++ b/dspace-api/src/main/java/org/dspace/content/WorkspaceItem.java @@ -156,11 +156,11 @@ public class WorkspaceItem return true; } Class objClass = HibernateProxyHelper.getClassWithoutInitializingProxy(o); - if (getClass() != objClass) { + if (!getClass().equals(objClass)) { return false; } final WorkspaceItem that = (WorkspaceItem) o; - if (this.getID() != that.getID()) { + if (!this.getID().equals(that.getID())) { return false; } diff --git a/dspace-api/src/main/java/org/dspace/content/WorkspaceItemServiceImpl.java b/dspace-api/src/main/java/org/dspace/content/WorkspaceItemServiceImpl.java index c45f6c737c..8fc302f8bf 100644 --- a/dspace-api/src/main/java/org/dspace/content/WorkspaceItemServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/content/WorkspaceItemServiceImpl.java @@ -212,9 +212,8 @@ public class WorkspaceItemServiceImpl implements WorkspaceItemService { */ Item item = workspaceItem.getItem(); if (!authorizeService.isAdmin(context) - && ((context.getCurrentUser() == null) || (context - .getCurrentUser().getID() != item.getSubmitter() - .getID()))) { + && (item.getSubmitter() == null || (context.getCurrentUser() == null) + || (context.getCurrentUser().getID() != item.getSubmitter().getID()))) { // Not an admit, not the submitter throw new AuthorizeException("Must be an administrator or the " + "original submitter to delete a workspace item"); @@ -265,7 +264,12 @@ public class WorkspaceItemServiceImpl implements WorkspaceItemService { // Need to delete the workspaceitem row first since it refers // to item ID - workspaceItem.getSupervisorGroups().clear(); + try { + workspaceItem.getSupervisorGroups().clear(); + } catch (Exception e) { + log.error("failed to clear supervisor group", e); + } + workspaceItemDAO.delete(context, workspaceItem); } diff --git a/dspace-api/src/main/java/org/dspace/content/authority/Choice.java b/dspace-api/src/main/java/org/dspace/content/authority/Choice.java index 9b68c75d28..6d73bdb5ea 100644 --- a/dspace-api/src/main/java/org/dspace/content/authority/Choice.java +++ b/dspace-api/src/main/java/org/dspace/content/authority/Choice.java @@ -33,21 +33,62 @@ public class Choice { */ public String value = null; + /** + * A boolean representing if choice entry value can selected (usually true). + * Hierarchical authority can flag some choice as not selectable to force the + * use to choice a more detailed terms in the tree, such a leaf or a deeper + * branch + */ + public boolean selectable = true; + public Map extras = new HashMap(); public Choice() { } + /** + * Minimal constructor for this data object. It assumes an empty map of extras + * information and a selected choice + * + * @param authority the authority key + * @param value the text value to store in the metadata + * @param label the value to display to the user + */ public Choice(String authority, String value, String label) { this.authority = authority; this.value = value; this.label = label; } + /** + * Constructor to quickly setup the data object for basic authorities. The choice is assumed to be selectable. + * + * @param authority the authority key + * @param value the text value to store in the metadata + * @param label the value to display to the user + * @param extras a key value map of extra information related to this choice + */ public Choice(String authority, String label, String value, Map extras) { this.authority = authority; this.label = label; this.value = value; this.extras = extras; } + + /** + * Constructor for common need of Hierarchical authorities that want to + * explicitely set the selectable flag + * + * @param authority the authority key + * @param value the text value to store in the metadata + * @param label the value to display to the user + * @param selectable true if the choice can be selected, false if the a more + * accurate choice should be preferred + */ + public Choice(String authority, String label, String value, boolean selectable) { + this.authority = authority; + this.label = label; + this.value = value; + this.selectable = selectable; + } } diff --git a/dspace-api/src/main/java/org/dspace/content/authority/ChoiceAuthority.java b/dspace-api/src/main/java/org/dspace/content/authority/ChoiceAuthority.java index d2d06fe983..750e761f3d 100644 --- a/dspace-api/src/main/java/org/dspace/content/authority/ChoiceAuthority.java +++ b/dspace-api/src/main/java/org/dspace/content/authority/ChoiceAuthority.java @@ -7,7 +7,10 @@ */ package org.dspace.content.authority; -import org.dspace.content.Collection; +import java.util.HashMap; +import java.util.Map; + +import org.dspace.core.NameAwarePlugin; /** * Plugin interface that supplies an authority control mechanism for @@ -17,7 +20,7 @@ import org.dspace.content.Collection; * @see ChoiceAuthorityServiceImpl * @see MetadataAuthorityServiceImpl */ -public interface ChoiceAuthority { +public interface ChoiceAuthority extends NameAwarePlugin { /** * Get all values from the authority that match the preferred value. * Note that the offering was entered by the user and may contain @@ -32,15 +35,13 @@ public interface ChoiceAuthority { * defaultSelected index in the Choices instance to the choice, if any, * that matches the value. * - * @param field being matched for * @param text user's value to match - * @param collection database ID of Collection for context (owner of Item) * @param start choice at which to start, 0 is first. * @param limit maximum number of choices to return, 0 for no limit. * @param locale explicit localization key if available, or null * @return a Choices object (never null). */ - public Choices getMatches(String field, String text, Collection collection, int start, int limit, String locale); + public Choices getMatches(String text, int start, int limit, String locale); /** * Get the single "best" match (if any) of a value in the authority @@ -51,13 +52,11 @@ public interface ChoiceAuthority { * This call is typically used in non-interactive metadata ingest * where there is no interactive agent to choose from among options. * - * @param field being matched for * @param text user's value to match - * @param collection database ID of Collection for context (owner of Item) * @param locale explicit localization key if available, or null * @return a Choices object (never null) with 1 or 0 values. */ - public Choices getBestMatch(String field, String text, Collection collection, String locale); + public Choices getBestMatch(String text, String locale); /** * Get the canonical user-visible "label" (i.e. short descriptive text) @@ -67,31 +66,97 @@ public interface ChoiceAuthority { * This may get called many times while populating a Web page so it should * be implemented as efficiently as possible. * - * @param field being matched for * @param key authority key known to this authority. * @param locale explicit localization key if available, or null * @return descriptive label - should always return something, never null. */ - public String getLabel(String field, String key, String locale); + public String getLabel(String key, String locale); + /** + * Get the canonical value to store for a key in the authority. Can be localized + * given the implicit or explicit locale specification. + * + * @param key authority key known to this authority. + * @param locale explicit localization key if available, or null + * @return value to store - should always return something, never null. + */ + default String getValue(String key, String locale) { + return getLabel(key, locale); + } + + /** + * Get a map of additional information related to the specified key in the + * authority. + * + * @param key the key of the entry + * @param locale explicit localization key if available, or null + * @return a map of additional information related to the key + */ + default Map getExtra(String key, String locale) { + return new HashMap(); + } + + /** + * Return true for hierarchical authorities + * + * @return true if hierarchical, default false + */ default boolean isHierarchical() { return false; } + /** + * Scrollable authorities allows the scroll of the entries without applying + * filter/query to the + * {@link #getMatches(String, String, Collection, int, int, String)} + * + * @return true if scrollable, default false + */ default boolean isScrollable() { return false; } - default boolean hasIdentifier() { - return true; + /** + * Hierarchical authority can provide an hint for the UI about how many levels + * preload to improve the UX. It provides a valid default for hierarchical + * authorities + * + * @return 0 if hierarchical, null otherwise + */ + default Integer getPreloadLevel() { + return isHierarchical() ? 0 : null; } - default public Choice getChoice(String fieldKey, String authKey, String locale) { + /** + * Build the preferred choice associated with the authKey. The default + * implementation delegate the creato to the {@link #getLabel(String, String)} + * {@link #getValue(String, String)} and {@link #getExtra(String, String)} + * methods but can be directly overriden for better efficiency or special + * scenario + * + * @param authKey authority key known to this authority. + * @param locale explicit localization key if available, or null + * @return the preferred choice for this authKey and locale + */ + default public Choice getChoice(String authKey, String locale) { Choice result = new Choice(); result.authority = authKey; - result.label = getLabel(fieldKey, authKey, locale); - result.value = getLabel(fieldKey, authKey, locale); + result.label = getLabel(authKey, locale); + result.value = getValue(authKey, locale); + result.extras.putAll(getExtra(authKey, locale)); return result; } + /** + * Provide a recommendation to store the authority in the metadata value if + * available in the in the provided choice(s). Usually ChoiceAuthority should + * recommend that so the default is true and it only need to be implemented in + * the unusual scenario + * + * @return true if the authority provided in any choice of this + * authority should be stored in the metadata value + */ + default public boolean storeAuthorityInMetadata() { + return true; + } } diff --git a/dspace-api/src/main/java/org/dspace/content/authority/ChoiceAuthorityServiceImpl.java b/dspace-api/src/main/java/org/dspace/content/authority/ChoiceAuthorityServiceImpl.java index 4cc3f9d6db..0e05852af0 100644 --- a/dspace-api/src/main/java/org/dspace/content/authority/ChoiceAuthorityServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/content/authority/ChoiceAuthorityServiceImpl.java @@ -7,10 +7,13 @@ */ package org.dspace.content.authority; +import java.util.ArrayList; import java.util.HashMap; +import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Map; +import java.util.Map.Entry; import java.util.Set; import org.apache.commons.lang3.StringUtils; @@ -19,6 +22,9 @@ import org.dspace.app.util.DCInput; import org.dspace.app.util.DCInputSet; import org.dspace.app.util.DCInputsReader; import org.dspace.app.util.DCInputsReaderException; +import org.dspace.app.util.SubmissionConfig; +import org.dspace.app.util.SubmissionConfigReader; +import org.dspace.app.util.SubmissionConfigReaderException; import org.dspace.content.Collection; import org.dspace.content.MetadataValue; import org.dspace.content.authority.service.ChoiceAuthorityService; @@ -54,23 +60,37 @@ public final class ChoiceAuthorityServiceImpl implements ChoiceAuthorityService // map of field key to authority plugin protected Map controller = new HashMap(); + // map of field key, form definition to authority plugin + protected Map> controllerFormDefinitions = + new HashMap>(); + // map of field key to presentation type protected Map presentation = new HashMap(); // map of field key to closed value protected Map closed = new HashMap(); - // map of authority name to field key - protected Map authorities = new HashMap(); + // flag to track the initialization status of the service + private boolean initialized = false; + + // map of authority name to field keys (the same authority can be configured over multiple metadata) + protected Map> authorities = new HashMap>(); + + // map of authority name to form definition and field keys + protected Map>> authoritiesFormDefinitions = + new HashMap>>(); + + // the item submission reader + private SubmissionConfigReader itemSubmissionConfigReader; @Autowired(required = true) protected ConfigurationService configurationService; @Autowired(required = true) protected PluginService pluginService; - private final String CHOICES_PLUGIN_PREFIX = "choices.plugin."; - private final String CHOICES_PRESENTATION_PREFIX = "choices.presentation."; - private final String CHOICES_CLOSED_PREFIX = "choices.closed."; + final static String CHOICES_PLUGIN_PREFIX = "choices.plugin."; + final static String CHOICES_PRESENTATION_PREFIX = "choices.presentation."; + final static String CHOICES_CLOSED_PREFIX = "choices.closed."; protected ChoiceAuthorityServiceImpl() { } @@ -96,10 +116,25 @@ public final class ChoiceAuthorityServiceImpl implements ChoiceAuthorityService @Override public Set getChoiceAuthoritiesNames() { - if (authorities.keySet().isEmpty()) { + init(); + Set authoritiesNames = new HashSet(); + authoritiesNames.addAll(authorities.keySet()); + authoritiesNames.addAll(authoritiesFormDefinitions.keySet()); + return authoritiesNames; + } + + private synchronized void init() { + if (!initialized) { + try { + itemSubmissionConfigReader = new SubmissionConfigReader(); + } catch (SubmissionConfigReaderException e) { + // the system is in an illegal state as the submission definition is not valid + throw new IllegalStateException("Error reading the item submission configuration: " + e.getMessage(), + e); + } loadChoiceAuthorityConfigurations(); + initialized = true; } - return authorities.keySet(); } @Override @@ -112,59 +147,62 @@ public final class ChoiceAuthorityServiceImpl implements ChoiceAuthorityService @Override public Choices getMatches(String fieldKey, String query, Collection collection, int start, int limit, String locale) { - ChoiceAuthority ma = getChoiceAuthorityMap().get(fieldKey); + ChoiceAuthority ma = getAuthorityByFieldKeyCollection(fieldKey, collection); if (ma == null) { throw new IllegalArgumentException( "No choices plugin was configured for field \"" + fieldKey - + "\"."); + + "\", collection=" + collection.getID().toString() + "."); } - return ma.getMatches(fieldKey, query, collection, start, limit, locale); + return ma.getMatches(query, start, limit, locale); } + @Override public Choices getMatches(String fieldKey, String query, Collection collection, int start, int limit, String locale, boolean externalInput) { - ChoiceAuthority ma = getChoiceAuthorityMap().get(fieldKey); + ChoiceAuthority ma = getAuthorityByFieldKeyCollection(fieldKey, collection); if (ma == null) { throw new IllegalArgumentException( "No choices plugin was configured for field \"" + fieldKey - + "\"."); + + "\", collection=" + collection.getID().toString() + "."); } if (externalInput && ma instanceof SolrAuthority) { ((SolrAuthority) ma).addExternalResultsInNextMatches(); } - return ma.getMatches(fieldKey, query, collection, start, limit, locale); + return ma.getMatches(query, start, limit, locale); } @Override public Choices getBestMatch(String fieldKey, String query, Collection collection, String locale) { - ChoiceAuthority ma = getChoiceAuthorityMap().get(fieldKey); + ChoiceAuthority ma = getAuthorityByFieldKeyCollection(fieldKey, collection); if (ma == null) { throw new IllegalArgumentException( "No choices plugin was configured for field \"" + fieldKey - + "\"."); + + "\", collection=" + collection.getID().toString() + "."); } - return ma.getBestMatch(fieldKey, query, collection, locale); + return ma.getBestMatch(query, locale); } @Override - public String getLabel(MetadataValue metadataValue, String locale) { - return getLabel(metadataValue.getMetadataField().toString(), metadataValue.getAuthority(), locale); + public String getLabel(MetadataValue metadataValue, Collection collection, String locale) { + return getLabel(metadataValue.getMetadataField().toString(), collection, metadataValue.getAuthority(), locale); } @Override - public String getLabel(String fieldKey, String authKey, String locale) { - ChoiceAuthority ma = getChoiceAuthorityMap().get(fieldKey); + public String getLabel(String fieldKey, Collection collection, String authKey, String locale) { + ChoiceAuthority ma = getAuthorityByFieldKeyCollection(fieldKey, collection); if (ma == null) { - throw new IllegalArgumentException("No choices plugin was configured for field \"" + fieldKey + "\"."); + throw new IllegalArgumentException( + "No choices plugin was configured for field \"" + fieldKey + + "\", collection=" + collection.getID().toString() + "."); } - return ma.getLabel(fieldKey, authKey, locale); + return ma.getLabel(authKey, locale); } @Override - public boolean isChoicesConfigured(String fieldKey) { - return getChoiceAuthorityMap().containsKey(fieldKey); + public boolean isChoicesConfigured(String fieldKey, Collection collection) { + return getAuthorityByFieldKeyCollection(fieldKey, collection) != null; } @Override @@ -178,8 +216,14 @@ public final class ChoiceAuthorityServiceImpl implements ChoiceAuthorityService } @Override - public List getVariants(MetadataValue metadataValue) { - ChoiceAuthority ma = getChoiceAuthorityMap().get(metadataValue.getMetadataField().toString()); + public List getVariants(MetadataValue metadataValue, Collection collection) { + String fieldKey = metadataValue.getMetadataField().toString(); + ChoiceAuthority ma = getAuthorityByFieldKeyCollection(fieldKey, collection); + if (ma == null) { + throw new IllegalArgumentException( + "No choices plugin was configured for field \"" + fieldKey + + "\", collection=" + collection.getID().toString() + "."); + } if (ma instanceof AuthorityVariantsSupport) { AuthorityVariantsSupport avs = (AuthorityVariantsSupport) ma; return avs.getVariants(metadataValue.getAuthority(), metadataValue.getLanguage()); @@ -189,42 +233,53 @@ public final class ChoiceAuthorityServiceImpl implements ChoiceAuthorityService @Override - public String getChoiceAuthorityName(String schema, String element, String qualifier) { - String makeFieldKey = makeFieldKey(schema, element, qualifier); - if (getChoiceAuthorityMap().containsKey(makeFieldKey)) { - for (String key : this.authorities.keySet()) { - if (this.authorities.get(key).equals(makeFieldKey)) { - return key; + public String getChoiceAuthorityName(String schema, String element, String qualifier, Collection collection) { + init(); + String fieldKey = makeFieldKey(schema, element, qualifier); + // check if there is an authority configured for the metadata valid for all the collections + if (controller.containsKey(fieldKey)) { + for (Entry> authority2md : authorities.entrySet()) { + if (authority2md.getValue().contains(fieldKey)) { + return authority2md.getKey(); + } + } + } else if (collection != null && controllerFormDefinitions.containsKey(fieldKey)) { + // there is an authority configured for the metadata valid for some collections, + // check if it is the requested collection + Map controllerFormDef = controllerFormDefinitions.get(fieldKey); + SubmissionConfig submissionConfig = itemSubmissionConfigReader + .getSubmissionConfigByCollection(collection.getHandle()); + String submissionName = submissionConfig.getSubmissionName(); + // check if the requested collection has a submission definition that use an authority for the metadata + if (controllerFormDef.containsKey(submissionName)) { + for (Entry>> authority2defs2md : + authoritiesFormDefinitions.entrySet()) { + List mdByDefinition = authority2defs2md.getValue().get(submissionName); + if (mdByDefinition != null && mdByDefinition.contains(fieldKey)) { + return authority2defs2md.getKey(); + } } } } - return configurationService.getProperty( - CHOICES_PLUGIN_PREFIX + schema + "." + element + (qualifier != null ? "." + qualifier : "")); + return null; } protected String makeFieldKey(String schema, String element, String qualifier) { return Utils.standardize(schema, element, qualifier, "_"); } - /** - * Return map of key to ChoiceAuthority plugin - * - * @return - */ - private Map getChoiceAuthorityMap() { - // If empty, load from configuration - if (controller.isEmpty()) { - loadChoiceAuthorityConfigurations(); - } - - return controller; - } - @Override public void clearCache() { controller.clear(); authorities.clear(); + presentation.clear(); + closed.clear(); + controllerFormDefinitions.clear(); + authoritiesFormDefinitions.clear(); + itemSubmissionConfigReader = null; + initialized = false; } + private void loadChoiceAuthorityConfigurations() { // Get all configuration keys starting with a given prefix List propKeys = configurationService.getPropertyKeys(CHOICES_PLUGIN_PREFIX); @@ -249,71 +304,127 @@ public final class ChoiceAuthorityServiceImpl implements ChoiceAuthorityService "Skipping invalid configuration for " + key + " because named plugin not found: " + authorityName); continue; } - if (!authorities.containsKey(authorityName)) { - controller.put(fkey, ma); - authorities.put(authorityName, fkey); - } else { - log.warn( - "Skipping invalid configuration for " + key + " because plugin is alredy in use: " + - authorityName + " used by " + authorities - .get(authorityName)); - continue; - } + controller.put(fkey, ma); + List fkeys; + if (authorities.containsKey(authorityName)) { + fkeys = authorities.get(authorityName); + } else { + fkeys = new ArrayList(); + } + fkeys.add(fkey); + authorities.put(authorityName, fkeys); log.debug("Choice Control: For field=" + fkey + ", Plugin=" + ma); } autoRegisterChoiceAuthorityFromInputReader(); } + /** + * This method will register all the authorities that are required due to the + * submission forms configuration. This includes authorities for value pairs and + * xml vocabularies + */ private void autoRegisterChoiceAuthorityFromInputReader() { try { + List submissionConfigs = itemSubmissionConfigReader + .getAllSubmissionConfigs(Integer.MAX_VALUE, 0); DCInputsReader dcInputsReader = new DCInputsReader(); - for (DCInputSet dcinputSet : dcInputsReader.getAllInputs(Integer.MAX_VALUE, 0)) { - DCInput[][] dcinputs = dcinputSet.getFields(); - for (DCInput[] dcrows : dcinputs) { - for (DCInput dcinput : dcrows) { - if (StringUtils.isNotBlank(dcinput.getPairsType()) - || StringUtils.isNotBlank(dcinput.getVocabulary())) { - String authorityName = dcinput.getPairsType(); - if (StringUtils.isBlank(authorityName)) { + + // loop over all the defined item submission configuration + for (SubmissionConfig subCfg : submissionConfigs) { + String submissionName = subCfg.getSubmissionName(); + List inputsBySubmissionName = dcInputsReader.getInputsBySubmissionName(submissionName); + // loop over the submission forms configuration eventually associated with the submission panel + for (DCInputSet dcinputSet : inputsBySubmissionName) { + DCInput[][] dcinputs = dcinputSet.getFields(); + for (DCInput[] dcrows : dcinputs) { + for (DCInput dcinput : dcrows) { + // for each input in the form check if it is associated with a real value pairs + // or an xml vocabulary + String authorityName = null; + if (StringUtils.isNotBlank(dcinput.getPairsType()) + && !StringUtils.equals(dcinput.getInputType(), "qualdrop_value")) { + authorityName = dcinput.getPairsType(); + } else if (StringUtils.isNotBlank(dcinput.getVocabulary())) { authorityName = dcinput.getVocabulary(); } - if (!StringUtils.equals(dcinput.getInputType(), "qualdrop_value")) { + + // do we have an authority? + if (StringUtils.isNotBlank(authorityName)) { String fieldKey = makeFieldKey(dcinput.getSchema(), dcinput.getElement(), dcinput.getQualifier()); ChoiceAuthority ca = controller.get(authorityName); if (ca == null) { - InputFormSelfRegisterWrapperAuthority ifa = new - InputFormSelfRegisterWrapperAuthority(); - if (controller.containsKey(fieldKey)) { - ifa = (InputFormSelfRegisterWrapperAuthority) controller.get(fieldKey); - } - - ChoiceAuthority ma = (ChoiceAuthority) pluginService + ca = (ChoiceAuthority) pluginService .getNamedPlugin(ChoiceAuthority.class, authorityName); - if (ma == null) { - log.warn("Skipping invalid configuration for " + fieldKey - + " because named plugin not found: " + authorityName); - continue; + if (ca == null) { + throw new IllegalStateException("Invalid configuration for " + fieldKey + + " in submission definition " + submissionName + + ", form definition " + dcinputSet.getFormName() + + " no named plugin found: " + authorityName); } - ifa.getDelegates().put(dcinputSet.getFormName(), ma); - controller.put(fieldKey, ifa); - } - - if (!authorities.containsKey(authorityName)) { - authorities.put(authorityName, fieldKey); } + addAuthorityToFormCacheMap(submissionName, fieldKey, ca); + addFormDetailsToAuthorityCacheMap(submissionName, authorityName, fieldKey); } } } } } } catch (DCInputsReaderException e) { - throw new IllegalStateException(e.getMessage(), e); + // the system is in an illegal state as the submission definition is not valid + throw new IllegalStateException("Error reading the item submission configuration: " + e.getMessage(), + e); } } + /** + * Add the form/field to the cache map keeping track of which form/field are + * associated with the specific authority name + * + * @param submissionName the form definition name + * @param authorityName the name of the authority plugin + * @param fieldKey the field key that use the authority + */ + private void addFormDetailsToAuthorityCacheMap(String submissionName, String authorityName, String fieldKey) { + Map> submissionDefinitionNames2fieldKeys; + if (authoritiesFormDefinitions.containsKey(authorityName)) { + submissionDefinitionNames2fieldKeys = authoritiesFormDefinitions.get(authorityName); + } else { + submissionDefinitionNames2fieldKeys = new HashMap>(); + } + + List fields; + if (submissionDefinitionNames2fieldKeys.containsKey(submissionName)) { + fields = submissionDefinitionNames2fieldKeys.get(submissionName); + } else { + fields = new ArrayList(); + } + fields.add(fieldKey); + submissionDefinitionNames2fieldKeys.put(submissionName, fields); + authoritiesFormDefinitions.put(authorityName, submissionDefinitionNames2fieldKeys); + } + + /** + * Add the authority plugin to the cache map keeping track of which authority is + * used by a specific form/field + * + * @param submissionName the submission definition name + * @param fieldKey the field key that require the authority + * @param ca the authority plugin + */ + private void addAuthorityToFormCacheMap(String submissionName, String fieldKey, ChoiceAuthority ca) { + Map definition2authority; + if (controllerFormDefinitions.containsKey(fieldKey)) { + definition2authority = controllerFormDefinitions.get(fieldKey); + } else { + definition2authority = new HashMap(); + } + definition2authority.put(submissionName, ca); + controllerFormDefinitions.put(fieldKey, definition2authority); + } + /** * Return map of key to presentation * @@ -370,26 +481,6 @@ public final class ChoiceAuthorityServiceImpl implements ChoiceAuthorityService return closed; } - @Override - public String getChoiceMetadatabyAuthorityName(String name) { - if (authorities.isEmpty()) { - loadChoiceAuthorityConfigurations(); - } - if (authorities.containsKey(name)) { - return authorities.get(name); - } - return null; - } - - @Override - public Choice getChoice(String fieldKey, String authKey, String locale) { - ChoiceAuthority ma = getChoiceAuthorityMap().get(fieldKey); - if (ma == null) { - throw new IllegalArgumentException("No choices plugin was configured for field \"" + fieldKey + "\"."); - } - return ma.getChoice(fieldKey, authKey, locale); - } - @Override public ChoiceAuthority getChoiceAuthorityByAuthorityName(String authorityName) { ChoiceAuthority ma = (ChoiceAuthority) @@ -401,4 +492,68 @@ public final class ChoiceAuthorityServiceImpl implements ChoiceAuthorityService } return ma; } + + private ChoiceAuthority getAuthorityByFieldKeyCollection(String fieldKey, Collection collection) { + init(); + ChoiceAuthority ma = controller.get(fieldKey); + if (ma == null && collection != null) { + SubmissionConfigReader configReader; + try { + configReader = new SubmissionConfigReader(); + SubmissionConfig submissionName = configReader.getSubmissionConfigByCollection(collection.getHandle()); + ma = controllerFormDefinitions.get(fieldKey).get(submissionName.getSubmissionName()); + } catch (SubmissionConfigReaderException e) { + // the system is in an illegal state as the submission definition is not valid + throw new IllegalStateException("Error reading the item submission configuration: " + e.getMessage(), + e); + } + } + return ma; + } + + @Override + public boolean storeAuthority(String fieldKey, Collection collection) { + // currently only named authority can eventually provide real authority + return controller.containsKey(fieldKey); + } + + /** + * Wrapper that calls getChoicesByParent method of the plugin. + * + * @param authorityName authority name + * @param parentId parent Id + * @param start choice at which to start, 0 is first. + * @param limit maximum number of choices to return, 0 for no limit. + * @param locale explicit localization key if available, or null + * @return a Choices object (never null). + * @see org.dspace.content.authority.ChoiceAuthority#getChoicesByParent(java.lang.String, java.lang.String, + * int, int, java.lang.String) + */ + @Override + public Choices getChoicesByParent(String authorityName, String parentId, int start, int limit, String locale) { + HierarchicalAuthority ma = (HierarchicalAuthority) getChoiceAuthorityByAuthorityName(authorityName); + return ma.getChoicesByParent(authorityName, parentId, start, limit, locale); + } + + /** + * Wrapper that calls getTopChoices method of the plugin. + * + * @param authorityName authority name + * @param start choice at which to start, 0 is first. + * @param limit maximum number of choices to return, 0 for no limit. + * @param locale explicit localization key if available, or null + * @return a Choices object (never null). + * @see org.dspace.content.authority.ChoiceAuthority#getTopChoices(java.lang.String, int, int, java.lang.String) + */ + @Override + public Choices getTopChoices(String authorityName, int start, int limit, String locale) { + HierarchicalAuthority ma = (HierarchicalAuthority) getChoiceAuthorityByAuthorityName(authorityName); + return ma.getTopChoices(authorityName, start, limit, locale); + } + + @Override + public Choice getParentChoice(String authorityName, String vocabularyId, String locale) { + HierarchicalAuthority ma = (HierarchicalAuthority) getChoiceAuthorityByAuthorityName(authorityName); + return ma.getParentChoice(authorityName, vocabularyId, locale); + } } diff --git a/dspace-api/src/main/java/org/dspace/content/authority/DCInputAuthority.java b/dspace-api/src/main/java/org/dspace/content/authority/DCInputAuthority.java index a64ebdd971..b1d8cf36a5 100644 --- a/dspace-api/src/main/java/org/dspace/content/authority/DCInputAuthority.java +++ b/dspace-api/src/main/java/org/dspace/content/authority/DCInputAuthority.java @@ -9,14 +9,20 @@ package org.dspace.content.authority; import java.util.ArrayList; import java.util.Arrays; +import java.util.HashMap; +import java.util.HashSet; import java.util.Iterator; import java.util.List; +import java.util.Locale; +import java.util.Map; +import java.util.Set; import org.apache.commons.lang3.ArrayUtils; +import org.apache.commons.lang3.StringUtils; import org.apache.logging.log4j.Logger; import org.dspace.app.util.DCInputsReader; import org.dspace.app.util.DCInputsReaderException; -import org.dspace.content.Collection; +import org.dspace.core.I18nUtil; import org.dspace.core.SelfNamedPlugin; /** @@ -44,16 +50,38 @@ import org.dspace.core.SelfNamedPlugin; public class DCInputAuthority extends SelfNamedPlugin implements ChoiceAuthority { private static Logger log = org.apache.logging.log4j.LogManager.getLogger(DCInputAuthority.class); - private String values[] = null; - private String labels[] = null; + /** + * The map of the values available for a specific language. Examples of keys are + * "en", "it", "uk" + */ + private Map values = null; - private static DCInputsReader dci = null; + /** + * The map of the labels available for a specific language. Examples of keys are + * "en", "it", "uk" + */ + private Map labels = null; + + /** + * The map of the input form reader associated to use for a specific java locale + */ + private static Map dcis = null; private static String pluginNames[] = null; public DCInputAuthority() { super(); } + @Override + public boolean storeAuthorityInMetadata() { + // For backward compatibility value pairs don't store authority in + // the metadatavalue + return false; + } + public static void reset() { + pluginNames = null; + } + public static String[] getPluginNames() { if (pluginNames == null) { initPluginNames(); @@ -63,20 +91,28 @@ public class DCInputAuthority extends SelfNamedPlugin implements ChoiceAuthority } private static synchronized void initPluginNames() { + Locale[] locales = I18nUtil.getSupportedLocales(); + Set names = new HashSet(); if (pluginNames == null) { try { - if (dci == null) { - dci = new DCInputsReader(); + dcis = new HashMap(); + for (Locale locale : locales) { + dcis.put(locale, new DCInputsReader(I18nUtil.getInputFormsFileName(locale))); + } + for (Locale l : locales) { + Iterator pi = dcis.get(l).getPairsNameIterator(); + while (pi.hasNext()) { + names.add((String) pi.next()); + } + } + DCInputsReader dcirDefault = new DCInputsReader(); + Iterator pi = dcirDefault.getPairsNameIterator(); + while (pi.hasNext()) { + names.add((String) pi.next()); } } catch (DCInputsReaderException e) { log.error("Failed reading DCInputs initialization: ", e); } - List names = new ArrayList(); - Iterator pi = dci.getPairsNameIterator(); - while (pi.hasNext()) { - names.add((String) pi.next()); - } - pluginNames = names.toArray(new String[names.size()]); log.debug("Got plugin names = " + Arrays.deepToString(pluginNames)); } @@ -85,45 +121,65 @@ public class DCInputAuthority extends SelfNamedPlugin implements ChoiceAuthority // once-only load of values and labels private void init() { if (values == null) { + values = new HashMap(); + labels = new HashMap(); String pname = this.getPluginInstanceName(); - List pairs = dci.getPairs(pname); - if (pairs != null) { - values = new String[pairs.size() / 2]; - labels = new String[pairs.size() / 2]; - for (int i = 0; i < pairs.size(); i += 2) { - labels[i / 2] = pairs.get(i); - values[i / 2] = pairs.get(i + 1); + for (Locale l : dcis.keySet()) { + DCInputsReader dci = dcis.get(l); + List pairs = dci.getPairs(pname); + if (pairs != null) { + String[] valuesLocale = new String[pairs.size() / 2]; + String[]labelsLocale = new String[pairs.size() / 2]; + for (int i = 0; i < pairs.size(); i += 2) { + labelsLocale[i / 2] = pairs.get(i); + valuesLocale[i / 2] = pairs.get(i + 1); + } + values.put(l.getLanguage(), valuesLocale); + labels.put(l.getLanguage(), labelsLocale); + log.debug("Found pairs for name=" + pname + ",locale=" + l); + } else { + log.error("Failed to find any pairs for name=" + pname, new IllegalStateException()); } - log.debug("Found pairs for name=" + pname); - } else { - log.error("Failed to find any pairs for name=" + pname, new IllegalStateException()); } + } } @Override - public Choices getMatches(String field, String query, Collection collection, int start, int limit, String locale) { + public Choices getMatches(String query, int start, int limit, String locale) { init(); - + Locale currentLocale = I18nUtil.getSupportedLocale(locale); + String[] valuesLocale = values.get(currentLocale.getLanguage()); + String[] labelsLocale = labels.get(currentLocale.getLanguage()); int dflt = -1; - Choice v[] = new Choice[values.length]; - for (int i = 0; i < values.length; ++i) { - v[i] = new Choice(values[i], values[i], labels[i]); - if (values[i].equalsIgnoreCase(query)) { - dflt = i; + int found = 0; + List v = new ArrayList(); + for (int i = 0; i < valuesLocale.length; ++i) { + if (query == null || StringUtils.containsIgnoreCase(valuesLocale[i], query)) { + if (found >= start && v.size() < limit) { + v.add(new Choice(null, valuesLocale[i], labelsLocale[i])); + if (valuesLocale[i].equalsIgnoreCase(query)) { + dflt = i; + } + } + found++; } } - return new Choices(v, 0, v.length, Choices.CF_AMBIGUOUS, false, dflt); + Choice[] vArray = new Choice[v.size()]; + return new Choices(v.toArray(vArray), start, found, Choices.CF_AMBIGUOUS, false, dflt); } @Override - public Choices getBestMatch(String field, String text, Collection collection, String locale) { + public Choices getBestMatch(String text, String locale) { init(); - for (int i = 0; i < values.length; ++i) { - if (text.equalsIgnoreCase(values[i])) { + Locale currentLocale = I18nUtil.getSupportedLocale(locale); + String[] valuesLocale = values.get(currentLocale.getLanguage()); + String[] labelsLocale = labels.get(currentLocale.getLanguage()); + for (int i = 0; i < valuesLocale.length; ++i) { + if (text.equalsIgnoreCase(valuesLocale[i])) { Choice v[] = new Choice[1]; - v[0] = new Choice(String.valueOf(i), values[i], labels[i]); + v[0] = new Choice(String.valueOf(i), valuesLocale[i], labelsLocale[i]); return new Choices(v, 0, v.length, Choices.CF_UNCERTAIN, false, 0); } } @@ -131,19 +187,31 @@ public class DCInputAuthority extends SelfNamedPlugin implements ChoiceAuthority } @Override - public String getLabel(String field, String key, String locale) { + public String getLabel(String key, String locale) { init(); + + // Get default if locale is empty + if (StringUtils.isBlank(locale)) { + locale = I18nUtil.getDefaultLocale().getLanguage(); + } + + String[] labelsLocale = labels.get(locale); int pos = -1; - for (int i = 0; i < values.length; i++) { - if (values[i].equals(key)) { + for (int i = 0; i < labelsLocale.length; i++) { + if (labelsLocale[i].equals(key)) { pos = i; break; } } if (pos != -1) { - return labels[pos]; + return labelsLocale[pos]; } else { return "UNKNOWN KEY " + key; } } + + @Override + public boolean isScrollable() { + return true; + } } diff --git a/dspace-api/src/main/java/org/dspace/content/authority/DSpaceControlledVocabulary.java b/dspace-api/src/main/java/org/dspace/content/authority/DSpaceControlledVocabulary.java index 097a19eb13..00c74bea9d 100644 --- a/dspace-api/src/main/java/org/dspace/content/authority/DSpaceControlledVocabulary.java +++ b/dspace-api/src/main/java/org/dspace/content/authority/DSpaceControlledVocabulary.java @@ -10,7 +10,9 @@ package org.dspace.content.authority; import java.io.File; import java.util.ArrayList; import java.util.Arrays; +import java.util.HashMap; import java.util.List; +import java.util.Map; import javax.xml.xpath.XPath; import javax.xml.xpath.XPathConstants; import javax.xml.xpath.XPathExpressionException; @@ -19,7 +21,6 @@ import javax.xml.xpath.XPathFactory; import org.apache.commons.lang3.ArrayUtils; import org.apache.commons.lang3.StringUtils; import org.apache.logging.log4j.Logger; -import org.dspace.content.Collection; import org.dspace.core.SelfNamedPlugin; import org.dspace.services.ConfigurationService; import org.dspace.services.factory.DSpaceServicesFactory; @@ -54,25 +55,35 @@ import org.xml.sax.InputSource; * @author Michael B. Klein */ -public class DSpaceControlledVocabulary extends SelfNamedPlugin implements ChoiceAuthority { +public class DSpaceControlledVocabulary extends SelfNamedPlugin implements HierarchicalAuthority { private static Logger log = org.apache.logging.log4j.LogManager.getLogger(DSpaceControlledVocabulary.class); protected static String xpathTemplate = "//node[contains(translate(@label,'ABCDEFGHIJKLMNOPQRSTUVWXYZ'," + "'abcdefghijklmnopqrstuvwxyz'),'%s')]"; protected static String idTemplate = "//node[@id = '%s']"; - protected static String idParentTemplate = "//node[@id = '%s']/parent::isComposedBy"; + protected static String labelTemplate = "//node[@label = '%s']"; + protected static String idParentTemplate = "//node[@id = '%s']/parent::isComposedBy/parent::node"; + protected static String rootTemplate = "/node"; protected static String pluginNames[] = null; protected String vocabularyName = null; protected InputSource vocabulary = null; - protected Boolean suggestHierarchy = true; + protected Boolean suggestHierarchy = false; protected Boolean storeHierarchy = true; protected String hierarchyDelimiter = "::"; + protected Integer preloadLevel = 1; public DSpaceControlledVocabulary() { super(); } + @Override + public boolean storeAuthorityInMetadata() { + // For backward compatibility controlled vocabularies don't store the node id in + // the metadatavalue + return false; + } + public static String[] getPluginNames() { if (pluginNames == null) { initPluginNames(); @@ -112,6 +123,7 @@ public class DSpaceControlledVocabulary extends SelfNamedPlugin implements Choic String configurationPrefix = "vocabulary.plugin." + vocabularyName; storeHierarchy = config.getBooleanProperty(configurationPrefix + ".hierarchy.store", storeHierarchy); suggestHierarchy = config.getBooleanProperty(configurationPrefix + ".hierarchy.suggest", suggestHierarchy); + preloadLevel = config.getIntProperty(configurationPrefix + ".hierarchy.preloadLevel", preloadLevel); String configuredDelimiter = config.getProperty(configurationPrefix + ".delimiter"); if (configuredDelimiter != null) { hierarchyDelimiter = configuredDelimiter.replaceAll("(^\"|\"$)", ""); @@ -142,7 +154,7 @@ public class DSpaceControlledVocabulary extends SelfNamedPlugin implements Choic } @Override - public Choices getMatches(String field, String text, Collection collection, int start, int limit, String locale) { + public Choices getMatches(String text, int start, int limit, String locale) { init(); log.debug("Getting matches for '" + text + "'"); String xpathExpression = ""; @@ -151,59 +163,60 @@ public class DSpaceControlledVocabulary extends SelfNamedPlugin implements Choic xpathExpression += String.format(xpathTemplate, textHierarchy[i].replaceAll("'", "'").toLowerCase()); } XPath xpath = XPathFactory.newInstance().newXPath(); - Choice[] choices; + int total = 0; + List choices = new ArrayList(); try { NodeList results = (NodeList) xpath.evaluate(xpathExpression, vocabulary, XPathConstants.NODESET); - String[] authorities = new String[results.getLength()]; - String[] values = new String[results.getLength()]; - String[] labels = new String[results.getLength()]; - String[] parent = new String[results.getLength()]; - String[] notes = new String[results.getLength()]; - for (int i = 0; i < results.getLength(); i++) { - Node node = results.item(i); - readNode(authorities, values, labels, parent, notes, i, node); - } - int resultCount = labels.length - start; - // limit = 0 means no limit - if ((limit > 0) && (resultCount > limit)) { - resultCount = limit; - } - choices = new Choice[resultCount]; - if (resultCount > 0) { - for (int i = 0; i < resultCount; i++) { - choices[i] = new Choice(authorities[start + i], values[start + i], labels[start + i]); - if (StringUtils.isNotBlank(parent[i])) { - choices[i].extras.put("parent", parent[i]); - } - if (StringUtils.isNotBlank(notes[i])) { - choices[i].extras.put("note", notes[i]); - } - } - } + total = results.getLength(); + choices = getChoicesFromNodeList(results, start, limit); } catch (XPathExpressionException e) { - choices = new Choice[0]; + log.warn(e.getMessage(), e); + return new Choices(true); } - return new Choices(choices, 0, choices.length, Choices.CF_AMBIGUOUS, false); + return new Choices(choices.toArray(new Choice[choices.size()]), start, total, Choices.CF_AMBIGUOUS, + total > start + limit); } @Override - public Choices getBestMatch(String field, String text, Collection collection, String locale) { + public Choices getBestMatch(String text, String locale) { init(); - log.debug("Getting best match for '" + text + "'"); - return getMatches(field, text, collection, 0, 2, locale); - } - - @Override - public String getLabel(String field, String key, String locale) { - init(); - String xpathExpression = String.format(idTemplate, key); + log.debug("Getting best matches for '" + text + "'"); + String xpathExpression = ""; + String[] textHierarchy = text.split(hierarchyDelimiter, -1); + for (int i = 0; i < textHierarchy.length; i++) { + xpathExpression += String.format(labelTemplate, textHierarchy[i].replaceAll("'", "'")); + } XPath xpath = XPathFactory.newInstance().newXPath(); + List choices = new ArrayList(); try { - Node node = (Node) xpath.evaluate(xpathExpression, vocabulary, XPathConstants.NODE); - return node.getAttributes().getNamedItem("label").getNodeValue(); + NodeList results = (NodeList) xpath.evaluate(xpathExpression, vocabulary, XPathConstants.NODESET); + choices = getChoicesFromNodeList(results, 0, 1); } catch (XPathExpressionException e) { - return (""); + log.warn(e.getMessage(), e); + return new Choices(true); } + return new Choices(choices.toArray(new Choice[choices.size()]), 0, choices.size(), Choices.CF_AMBIGUOUS, false); + } + + @Override + public String getLabel(String key, String locale) { + return getNodeLabel(key, this.suggestHierarchy); + } + + @Override + public String getValue(String key, String locale) { + return getNodeLabel(key, this.storeHierarchy); + } + + @Override + public Choice getChoice(String authKey, String locale) { + Node node; + try { + node = getNode(authKey); + } catch (XPathExpressionException e) { + return null; + } + return createChoiceFromNode(node); } @Override @@ -212,81 +225,227 @@ public class DSpaceControlledVocabulary extends SelfNamedPlugin implements Choic } @Override - public Choice getChoice(String fieldKey, String authKey, String locale) { + public Choices getTopChoices(String authorityName, int start, int limit, String locale) { init(); - log.debug("Getting matches for '" + authKey + "'"); - String xpathExpression = String.format(idTemplate, authKey); - XPath xpath = XPathFactory.newInstance().newXPath(); - try { - Node node = (Node) xpath.evaluate(xpathExpression, vocabulary, XPathConstants.NODE); - if (node != null) { - String[] authorities = new String[1]; - String[] values = new String[1]; - String[] labels = new String[1]; - String[] parent = new String[1]; - String[] note = new String[1]; - readNode(authorities, values, labels, parent, note, 0, node); - - if (values.length > 0) { - Choice choice = new Choice(authorities[0], values[0], labels[0]); - if (StringUtils.isNotBlank(parent[0])) { - choice.extras.put("parent", parent[0]); - } - if (StringUtils.isNotBlank(note[0])) { - choice.extras.put("note", note[0]); - } - return choice; - } - } - } catch (XPathExpressionException e) { - log.warn(e.getMessage(), e); - } - return null; + String xpathExpression = rootTemplate; + return getChoicesByXpath(xpathExpression, start, limit); } - private void readNode(String[] authorities, String[] values, String[] labels, String[] parent, String[] notes, - int i, Node node) { + @Override + public Choices getChoicesByParent(String authorityName, String parentId, int start, int limit, String locale) { + init(); + String xpathExpression = String.format(idTemplate, parentId); + return getChoicesByXpath(xpathExpression, start, limit); + } + + @Override + public Choice getParentChoice(String authorityName, String childId, String locale) { + init(); + try { + String xpathExpression = String.format(idParentTemplate, childId); + Choice choice = createChoiceFromNode(getNodeFromXPath(xpathExpression)); + return choice; + } catch (XPathExpressionException e) { + log.error(e.getMessage(), e); + return null; + } + } + + @Override + public Integer getPreloadLevel() { + return preloadLevel; + } + + private boolean isRootElement(Node node) { + if (node != null && node.getOwnerDocument().getDocumentElement().equals(node)) { + return true; + } + return false; + } + + private Node getNode(String key) throws XPathExpressionException { + init(); + String xpathExpression = String.format(idTemplate, key); + Node node = getNodeFromXPath(xpathExpression); + return node; + } + + private Node getNodeFromXPath(String xpathExpression) throws XPathExpressionException { + XPath xpath = XPathFactory.newInstance().newXPath(); + Node node = (Node) xpath.evaluate(xpathExpression, vocabulary, XPathConstants.NODE); + return node; + } + + private List getChoicesFromNodeList(NodeList results, int start, int limit) { + List choices = new ArrayList(); + for (int i = 0; i < results.getLength(); i++) { + if (i < start) { + continue; + } + if (choices.size() == limit) { + break; + } + Node node = results.item(i); + Choice choice = new Choice(getAuthority(node), getLabel(node), getValue(node), + isSelectable(node)); + choice.extras = addOtherInformation(getParent(node), getNote(node), getChildren(node), getAuthority(node)); + choices.add(choice); + } + return choices; + } + + private Map addOtherInformation(String parentCurr, String noteCurr, + List childrenCurr, String authorityCurr) { + Map extras = new HashMap(); + if (StringUtils.isNotBlank(parentCurr)) { + extras.put("parent", parentCurr); + } + if (StringUtils.isNotBlank(noteCurr)) { + extras.put("note", noteCurr); + } + if (childrenCurr.size() > 0) { + extras.put("hasChildren", "true"); + } else { + extras.put("hasChildren", "false"); + } + extras.put("id", authorityCurr); + return extras; + } + + private String getNodeLabel(String key, boolean useHierarchy) { + try { + Node node = getNode(key); + if (useHierarchy) { + return this.buildString(node); + } else { + return node.getAttributes().getNamedItem("label").getNodeValue(); + } + } catch (XPathExpressionException e) { + return (""); + } + } + + private String getLabel(Node node) { String hierarchy = this.buildString(node); if (this.suggestHierarchy) { - labels[i] = hierarchy; + return hierarchy; } else { - labels[i] = node.getAttributes().getNamedItem("label").getNodeValue(); - } - if (this.storeHierarchy) { - values[i] = hierarchy; - } else { - values[i] = node.getAttributes().getNamedItem("label").getNodeValue(); + return node.getAttributes().getNamedItem("label").getNodeValue(); } + } + private String getValue(Node node) { + String hierarchy = this.buildString(node); + if (this.storeHierarchy) { + return hierarchy; + } else { + return node.getAttributes().getNamedItem("label").getNodeValue(); + } + } + + private String getNote(Node node) { NodeList childNodes = node.getChildNodes(); for (int ci = 0; ci < childNodes.getLength(); ci++) { Node firstChild = childNodes.item(ci); if (firstChild != null && "hasNote".equals(firstChild.getNodeName())) { String nodeValue = firstChild.getTextContent(); if (StringUtils.isNotBlank(nodeValue)) { - notes[i] = nodeValue; + return nodeValue; } } } - Node idAttr = node.getAttributes().getNamedItem("id"); - if (null != idAttr) { // 'id' is optional - authorities[i] = idAttr.getNodeValue(); - if (isHierarchical()) { - Node parentN = node.getParentNode(); - if (parentN != null) { - parentN = parentN.getParentNode(); - if (parentN != null) { - Node parentIdAttr = parentN.getAttributes().getNamedItem("id"); - if (null != parentIdAttr) { - parent[i] = parentIdAttr.getNodeValue(); + return null; + } + + private List getChildren(Node node) { + List children = new ArrayList(); + NodeList childNodes = node.getChildNodes(); + for (int ci = 0; ci < childNodes.getLength(); ci++) { + Node firstChild = childNodes.item(ci); + if (firstChild != null && "isComposedBy".equals(firstChild.getNodeName())) { + for (int cii = 0; cii < firstChild.getChildNodes().getLength(); cii++) { + Node childN = firstChild.getChildNodes().item(cii); + if (childN != null && "node".equals(childN.getNodeName())) { + Node childIdAttr = childN.getAttributes().getNamedItem("id"); + if (null != childIdAttr) { + children.add(childIdAttr.getNodeValue()); } } } + break; } - } else { - authorities[i] = null; - parent[i] = null; + } + return children; + } + + private boolean isSelectable(Node node) { + Node selectableAttr = node.getAttributes().getNamedItem("selectable"); + if (null != selectableAttr) { + return Boolean.valueOf(selectableAttr.getNodeValue()); + } else { // Default is true + return true; } } + private String getParent(Node node) { + Node parentN = node.getParentNode(); + if (parentN != null) { + parentN = parentN.getParentNode(); + if (parentN != null && !isRootElement(parentN)) { + return buildString(parentN); + } + } + return null; + } + + private String getAuthority(Node node) { + Node idAttr = node.getAttributes().getNamedItem("id"); + if (null != idAttr) { // 'id' is optional + return idAttr.getNodeValue(); + } else { + return null; + } + } + + private Choices getChoicesByXpath(String xpathExpression, int start, int limit) { + List choices = new ArrayList(); + XPath xpath = XPathFactory.newInstance().newXPath(); + try { + Node parentNode = (Node) xpath.evaluate(xpathExpression, vocabulary, XPathConstants.NODE); + int count = 0; + if (parentNode != null) { + NodeList childNodes = (NodeList) xpath.evaluate(".//isComposedBy", parentNode, XPathConstants.NODE); + if (null != childNodes) { + for (int i = 0; i < childNodes.getLength(); i++) { + Node childNode = childNodes.item(i); + if (childNode != null && "node".equals(childNode.getNodeName())) { + if (count < start || choices.size() >= limit) { + count++; + continue; + } + count++; + choices.add(createChoiceFromNode(childNode)); + } + } + } + return new Choices(choices.toArray(new Choice[choices.size()]), start, count, + Choices.CF_AMBIGUOUS, false); + } + } catch (XPathExpressionException e) { + log.warn(e.getMessage(), e); + return new Choices(true); + } + return new Choices(false); + } + + private Choice createChoiceFromNode(Node node) { + if (node != null && !isRootElement(node)) { + Choice choice = new Choice(getAuthority(node), getLabel(node), getValue(node), + isSelectable(node)); + choice.extras = addOtherInformation(getParent(node), getNote(node),getChildren(node), getAuthority(node)); + return choice; + } + return null; + } + } diff --git a/dspace-api/src/main/java/org/dspace/content/authority/HierarchicalAuthority.java b/dspace-api/src/main/java/org/dspace/content/authority/HierarchicalAuthority.java new file mode 100644 index 0000000000..c25b74d354 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/content/authority/HierarchicalAuthority.java @@ -0,0 +1,85 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.content.authority; + +/** + * Plugin interface that supplies an authority control mechanism for + * one metadata field. + * + * @author Larry Stone + * @see ChoiceAuthority + */ +public interface HierarchicalAuthority extends ChoiceAuthority { + + /** + * Get all values from the authority that match the preferred value. + * Note that the offering was entered by the user and may contain + * mixed/incorrect case, whitespace, etc so the plugin should be careful + * to clean up user data before making comparisons. + * + * Value of a "Name" field will be in canonical DSpace person name format, + * which is "Lastname, Firstname(s)", e.g. "Smith, John Q.". + * + * Some authorities with a small set of values may simply return the whole + * set for any sample value, although it's a good idea to set the + * defaultSelected index in the Choices instance to the choice, if any, + * that matches the value. + * + * @param authorityName authority name + * @param start choice at which to start, 0 is first. + * @param limit maximum number of choices to return, 0 for no limit. + * @param locale explicit localization key if available, or null + * @return a Choices object (never null). + */ + public Choices getTopChoices(String authorityName, int start, int limit, String locale); + + /** + * Get all values from the authority that match the preferred value. + * Note that the offering was entered by the user and may contain + * mixed/incorrect case, whitespace, etc so the plugin should be careful + * to clean up user data before making comparisons. + * + * Value of a "Name" field will be in canonical DSpace person name format, + * which is "Lastname, Firstname(s)", e.g. "Smith, John Q.". + * + * Some authorities with a small set of values may simply return the whole + * set for any sample value, although it's a good idea to set the + * defaultSelected index in the Choices instance to the choice, if any, + * that matches the value. + * + * @param authorityName authority name + * @param parentId user's value to match + * @param start choice at which to start, 0 is first. + * @param limit maximum number of choices to return, 0 for no limit. + * @param locale explicit localization key if available, or null + * @return a Choices object (never null). + */ + public Choices getChoicesByParent(String authorityName, String parentId, int start, int limit, String locale); + + /** + * It returns the parent choice in the hierarchy if any + * + * @param authorityName authority name + * @param vocabularyId user's value to match + * @param locale explicit localization key if available, or null + * @return a Choice object + */ + public Choice getParentChoice(String authorityName, String vocabularyId, String locale); + + /** + * Provides an hint for the UI to preload some levels to improve the UX. It + * usually mean that these preloaded level will be shown expanded by default + */ + public Integer getPreloadLevel(); + + @Override + default boolean isHierarchical() { + return true; + } + +} \ No newline at end of file diff --git a/dspace-api/src/main/java/org/dspace/content/authority/InputFormSelfRegisterWrapperAuthority.java b/dspace-api/src/main/java/org/dspace/content/authority/InputFormSelfRegisterWrapperAuthority.java deleted file mode 100644 index 8716ef38b9..0000000000 --- a/dspace-api/src/main/java/org/dspace/content/authority/InputFormSelfRegisterWrapperAuthority.java +++ /dev/null @@ -1,166 +0,0 @@ -/** - * The contents of this file are subject to the license and copyright - * detailed in the LICENSE and NOTICE files at the root of the source - * tree and available online at - * - * http://www.dspace.org/license/ - */ -package org.dspace.content.authority; - -import java.util.Arrays; -import java.util.HashMap; -import java.util.HashSet; -import java.util.Map; -import java.util.Set; - -import org.apache.commons.lang3.StringUtils; -import org.apache.logging.log4j.Logger; -import org.dspace.app.util.DCInputsReader; -import org.dspace.app.util.DCInputsReaderException; -import org.dspace.content.Collection; - -/** - * This authority is registered automatically by the ChoiceAuthorityService for - * all the metadata that use a value-pair or a vocabulary in the submission-form.xml - * - * It keeps a map of form-name vs ChoiceAuthority to delegate the execution of - * the method to the specific ChoiceAuthority configured for the collection when - * the same metadata have different vocabulary or value-pair on a collection - * basis - * - * @author Andrea Bollini (andrea.bollini at 4science.it) - */ -public class InputFormSelfRegisterWrapperAuthority implements ChoiceAuthority { - - private static Logger log = - org.apache.logging.log4j.LogManager.getLogger(InputFormSelfRegisterWrapperAuthority.class); - - private Map delegates = new HashMap(); - - private static DCInputsReader dci = null; - - private void init() { - try { - if (dci == null) { - dci = new DCInputsReader(); - } - } catch (DCInputsReaderException e) { - log.error("Failed reading DCInputs initialization: ", e); - } - } - - @Override - public Choices getMatches(String field, String query, Collection collection, int start, int limit, String locale) { - String formName; - try { - init(); - if (collection == null) { - Set choices = new HashSet(); - //workaround search in all authority configured - for (ChoiceAuthority ca : delegates.values()) { - Choices tmp = ca.getMatches(field, query, null, start, limit, locale); - if (tmp.total > 0) { - Set mySet = new HashSet(Arrays.asList(tmp.values)); - choices.addAll(mySet); - } - } - if (!choices.isEmpty()) { - Choice[] results = new Choice[choices.size()]; - choices.toArray(results); - return new Choices(results, 0, choices.size(), Choices.CF_AMBIGUOUS, false); - } - } else { - formName = dci.getInputFormNameByCollectionAndField(collection, field); - return delegates.get(formName).getMatches(field, query, collection, start, limit, locale); - } - } catch (DCInputsReaderException e) { - log.error(e.getMessage(), e); - } - return new Choices(Choices.CF_NOTFOUND); - } - - @Override - public Choices getBestMatch(String field, String text, Collection collection, String locale) { - String formName; - try { - init(); - if (collection == null) { - Set choices = new HashSet(); - //workaround search in all authority configured - for (ChoiceAuthority ca : delegates.values()) { - Choices tmp = ca.getBestMatch(field, text, null, locale); - if (tmp.total > 0) { - Set mySet = new HashSet(Arrays.asList(tmp.values)); - choices.addAll(mySet); - } - } - if (!choices.isEmpty()) { - Choice[] results = new Choice[choices.size() - 1]; - choices.toArray(results); - return new Choices(results, 0, choices.size(), Choices.CF_UNCERTAIN, false); - } - } else { - formName = dci.getInputFormNameByCollectionAndField(collection, field); - return delegates.get(formName).getBestMatch(field, text, collection, locale); - } - } catch (DCInputsReaderException e) { - log.error(e.getMessage(), e); - } - return new Choices(Choices.CF_NOTFOUND); - } - - @Override - public String getLabel(String field, String key, String locale) { - // TODO we need to manage REALLY the authority - // WRONG BEHAVIOUR: now in each delegates can exists the same key with - // different value - for (ChoiceAuthority delegate : delegates.values()) { - String label = delegate.getLabel(field, key, locale); - if (StringUtils.isNotBlank(label)) { - return label; - } - } - return "UNKNOWN KEY " + key; - } - - @Override - public boolean isHierarchical() { - // TODO we need to manage REALLY the authority - // WRONG BEHAVIOUR: now in each delegates can exists the same key with - // different value - for (ChoiceAuthority delegate : delegates.values()) { - return delegate.isHierarchical(); - } - return false; - } - - @Override - public boolean isScrollable() { - // TODO we need to manage REALLY the authority - // WRONG BEHAVIOUR: now in each delegates can exists the same key with - // different value - for (ChoiceAuthority delegate : delegates.values()) { - return delegate.isScrollable(); - } - return false; - } - - @Override - public boolean hasIdentifier() { - // TODO we need to manage REALLY the authority - // WRONG BEHAVIOUR: now in each delegates can exists the same key with - // different value - for (ChoiceAuthority delegate : delegates.values()) { - return delegate.hasIdentifier(); - } - return false; - } - - public Map getDelegates() { - return delegates; - } - - public void setDelegates(Map delegates) { - this.delegates = delegates; - } -} diff --git a/dspace-api/src/main/java/org/dspace/content/authority/MetadataAuthorityServiceImpl.java b/dspace-api/src/main/java/org/dspace/content/authority/MetadataAuthorityServiceImpl.java index 6a5b17a029..c542c6a89e 100644 --- a/dspace-api/src/main/java/org/dspace/content/authority/MetadataAuthorityServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/content/authority/MetadataAuthorityServiceImpl.java @@ -14,12 +14,7 @@ import java.util.HashMap; import java.util.List; import java.util.Map; -import org.apache.commons.lang3.StringUtils; import org.apache.logging.log4j.Logger; -import org.dspace.app.util.DCInput; -import org.dspace.app.util.DCInputSet; -import org.dspace.app.util.DCInputsReader; -import org.dspace.app.util.DCInputsReaderException; import org.dspace.content.MetadataField; import org.dspace.content.authority.service.MetadataAuthorityService; import org.dspace.content.service.MetadataFieldService; @@ -144,8 +139,6 @@ public class MetadataAuthorityServiceImpl implements MetadataAuthorityService { if (dmc >= Choices.CF_UNSET) { defaultMinConfidence = dmc; } - - autoRegisterAuthorityFromInputReader(); } } @@ -205,7 +198,6 @@ public class MetadataAuthorityServiceImpl implements MetadataAuthorityService { } } - /** * Give the minimal level of confidence required to consider valid an authority value * for the given metadata. @@ -229,35 +221,4 @@ public class MetadataAuthorityServiceImpl implements MetadataAuthorityService { } return copy; } - - - private void autoRegisterAuthorityFromInputReader() { - try { - DCInputsReader dcInputsReader = new DCInputsReader(); - for (DCInputSet dcinputSet : dcInputsReader.getAllInputs(Integer.MAX_VALUE, 0)) { - DCInput[][] dcinputs = dcinputSet.getFields(); - for (DCInput[] dcrows : dcinputs) { - for (DCInput dcinput : dcrows) { - if (StringUtils.isNotBlank(dcinput.getPairsType()) - || StringUtils.isNotBlank(dcinput.getVocabulary())) { - String authorityName = dcinput.getPairsType(); - if (StringUtils.isBlank(authorityName)) { - authorityName = dcinput.getVocabulary(); - } - if (!StringUtils.equals(dcinput.getInputType(), "qualdrop_value")) { - String fieldKey = makeFieldKey(dcinput.getSchema(), dcinput.getElement(), - dcinput.getQualifier()); - boolean req = ConfigurationManager - .getBooleanProperty("authority.required." + fieldKey, false); - controlled.put(fieldKey, true); - isAuthorityRequired.put(fieldKey, req); - } - } - } - } - } - } catch (DCInputsReaderException e) { - throw new IllegalStateException(e.getMessage(), e); - } - } } diff --git a/dspace-api/src/main/java/org/dspace/content/authority/SampleAuthority.java b/dspace-api/src/main/java/org/dspace/content/authority/SampleAuthority.java index 8197f180af..e6cc9b9d44 100644 --- a/dspace-api/src/main/java/org/dspace/content/authority/SampleAuthority.java +++ b/dspace-api/src/main/java/org/dspace/content/authority/SampleAuthority.java @@ -7,13 +7,13 @@ */ package org.dspace.content.authority; -import org.dspace.content.Collection; - /** * This is a *very* stupid test fixture for authority control, and also * serves as a trivial example of an authority plugin implementation. */ public class SampleAuthority implements ChoiceAuthority { + private String pluginInstanceName; + protected static String values[] = { "sun", "mon", @@ -35,7 +35,7 @@ public class SampleAuthority implements ChoiceAuthority { }; @Override - public Choices getMatches(String field, String query, Collection collection, int start, int limit, String locale) { + public Choices getMatches(String query, int start, int limit, String locale) { int dflt = -1; Choice v[] = new Choice[values.length]; for (int i = 0; i < values.length; ++i) { @@ -48,7 +48,7 @@ public class SampleAuthority implements ChoiceAuthority { } @Override - public Choices getBestMatch(String field, String text, Collection collection, String locale) { + public Choices getBestMatch(String text, String locale) { for (int i = 0; i < values.length; ++i) { if (text.equalsIgnoreCase(values[i])) { Choice v[] = new Choice[1]; @@ -60,7 +60,17 @@ public class SampleAuthority implements ChoiceAuthority { } @Override - public String getLabel(String field, String key, String locale) { + public String getLabel(String key, String locale) { return labels[Integer.parseInt(key)]; } + + @Override + public String getPluginInstanceName() { + return pluginInstanceName; + } + + @Override + public void setPluginInstanceName(String name) { + this.pluginInstanceName = name; + } } diff --git a/dspace-api/src/main/java/org/dspace/content/authority/SolrAuthority.java b/dspace-api/src/main/java/org/dspace/content/authority/SolrAuthority.java index 5e913430b7..c93e6db786 100644 --- a/dspace-api/src/main/java/org/dspace/content/authority/SolrAuthority.java +++ b/dspace-api/src/main/java/org/dspace/content/authority/SolrAuthority.java @@ -11,6 +11,7 @@ import java.util.ArrayList; import java.util.Iterator; import java.util.List; import java.util.Map; +import java.util.Map.Entry; import org.apache.commons.lang3.StringUtils; import org.apache.logging.log4j.Logger; @@ -24,8 +25,9 @@ import org.dspace.authority.AuthorityValue; import org.dspace.authority.SolrAuthorityInterface; import org.dspace.authority.factory.AuthorityServiceFactory; import org.dspace.authority.service.AuthorityValueService; -import org.dspace.content.Collection; import org.dspace.core.ConfigurationManager; +import org.dspace.core.NameAwarePlugin; +import org.dspace.services.ConfigurationService; import org.dspace.services.factory.DSpaceServicesFactory; /** @@ -35,7 +37,14 @@ import org.dspace.services.factory.DSpaceServicesFactory; * @author Mark Diggory (markd at atmire dot com) */ public class SolrAuthority implements ChoiceAuthority { + /** the name assigned to the specific instance by the PluginService, @see {@link NameAwarePlugin} **/ + private String authorityName; + /** + * the metadata managed by the plugin instance, derived from its authority name + * in the form schema_element_qualifier + */ + private String field; protected SolrAuthorityInterface source = DSpaceServicesFactory.getInstance().getServiceManager() .getServiceByName("AuthoritySource", SolrAuthorityInterface.class); @@ -45,8 +54,9 @@ public class SolrAuthority implements ChoiceAuthority { protected boolean externalResults = false; protected final AuthorityValueService authorityValueService = AuthorityServiceFactory.getInstance() .getAuthorityValueService(); - - public Choices getMatches(String field, String text, Collection collection, int start, int limit, String locale, + protected final ConfigurationService configurationService = DSpaceServicesFactory.getInstance() + .getConfigurationService(); + public Choices getMatches(String text, int start, int limit, String locale, boolean bestMatch) { if (limit == 0) { limit = 10; @@ -193,13 +203,13 @@ public class SolrAuthority implements ChoiceAuthority { } @Override - public Choices getMatches(String field, String text, Collection collection, int start, int limit, String locale) { - return getMatches(field, text, collection, start, limit, locale, true); + public Choices getMatches(String text, int start, int limit, String locale) { + return getMatches(text, start, limit, locale, true); } @Override - public Choices getBestMatch(String field, String text, Collection collection, String locale) { - Choices matches = getMatches(field, text, collection, 0, 1, locale, false); + public Choices getBestMatch(String text, String locale) { + Choices matches = getMatches(text, 0, 1, locale, false); if (matches.values.length != 0 && !matches.values[0].value.equalsIgnoreCase(text)) { matches = new Choices(false); } @@ -207,7 +217,7 @@ public class SolrAuthority implements ChoiceAuthority { } @Override - public String getLabel(String field, String key, String locale) { + public String getLabel(String key, String locale) { try { if (log.isDebugEnabled()) { log.debug("requesting label for key " + key + " using locale " + locale); @@ -276,4 +286,23 @@ public class SolrAuthority implements ChoiceAuthority { public void addExternalResultsInNextMatches() { this.externalResults = true; } + + @Override + public void setPluginInstanceName(String name) { + authorityName = name; + for (Entry conf : configurationService.getProperties().entrySet()) { + if (StringUtils.startsWith((String) conf.getKey(), ChoiceAuthorityServiceImpl.CHOICES_PLUGIN_PREFIX) + && StringUtils.equals((String) conf.getValue(), authorityName)) { + field = ((String) conf.getKey()).substring(ChoiceAuthorityServiceImpl.CHOICES_PLUGIN_PREFIX.length()) + .replace(".", "_"); + // exit the look immediately as we have found it + break; + } + } + } + + @Override + public String getPluginInstanceName() { + return authorityName; + } } diff --git a/dspace-api/src/main/java/org/dspace/content/authority/TestAuthority.java b/dspace-api/src/main/java/org/dspace/content/authority/TestAuthority.java index a017e8fe28..15c000e978 100644 --- a/dspace-api/src/main/java/org/dspace/content/authority/TestAuthority.java +++ b/dspace-api/src/main/java/org/dspace/content/authority/TestAuthority.java @@ -11,7 +11,6 @@ import java.util.ArrayList; import java.util.List; import org.apache.commons.lang3.StringUtils; -import org.dspace.content.Collection; /** * This is a *very* stupid test fixture for authority control with AuthorityVariantsSupport. @@ -19,6 +18,7 @@ import org.dspace.content.Collection; * @author Andrea Bollini (CILEA) */ public class TestAuthority implements ChoiceAuthority, AuthorityVariantsSupport { + private String pluginInstanceName; @Override public List getVariants(String key, String locale) { @@ -33,8 +33,7 @@ public class TestAuthority implements ChoiceAuthority, AuthorityVariantsSupport } @Override - public Choices getMatches(String field, String text, Collection collection, - int start, int limit, String locale) { + public Choices getMatches(String text, int start, int limit, String locale) { Choices choices = new Choices(false); if (StringUtils.isNotBlank(text)) { @@ -52,8 +51,7 @@ public class TestAuthority implements ChoiceAuthority, AuthorityVariantsSupport } @Override - public Choices getBestMatch(String field, String text, Collection collection, - String locale) { + public Choices getBestMatch(String text, String locale) { Choices choices = new Choices(false); if (StringUtils.isNotBlank(text)) { @@ -70,10 +68,20 @@ public class TestAuthority implements ChoiceAuthority, AuthorityVariantsSupport } @Override - public String getLabel(String field, String key, String locale) { + public String getLabel(String key, String locale) { if (StringUtils.isNotBlank(key)) { return key.replaceAll("authority", "label"); } return "Unknown"; } + + @Override + public String getPluginInstanceName() { + return pluginInstanceName; + } + + @Override + public void setPluginInstanceName(String name) { + this.pluginInstanceName = name; + } } diff --git a/dspace-api/src/main/java/org/dspace/content/authority/service/ChoiceAuthorityService.java b/dspace-api/src/main/java/org/dspace/content/authority/service/ChoiceAuthorityService.java index 83db9a734e..1cc5075d02 100644 --- a/dspace-api/src/main/java/org/dspace/content/authority/service/ChoiceAuthorityService.java +++ b/dspace-api/src/main/java/org/dspace/content/authority/service/ChoiceAuthorityService.java @@ -48,10 +48,10 @@ public interface ChoiceAuthorityService { * @param element element of metadata field * @param qualifier qualifier of metadata field * @return the name of the choice authority associated with the specified - * metadata. Throw IllegalArgumentException if the supplied metadat + * metadata. Throw IllegalArgumentException if the supplied metadata * is not associated with an authority choice */ - public String getChoiceAuthorityName(String schema, String element, String qualifier); + public String getChoiceAuthorityName(String schema, String element, String qualifier, Collection collection); /** * Wrapper that calls getMatches method of the plugin corresponding to @@ -112,30 +112,33 @@ public interface ChoiceAuthorityService { * the metadata field defined by schema,element,qualifier. * * @param metadataValue metadata value + * @param collection Collection owner of Item * @param locale explicit localization key if available * @return label */ - public String getLabel(MetadataValue metadataValue, String locale); + public String getLabel(MetadataValue metadataValue, Collection collection, String locale); /** * Wrapper that calls getLabel method of the plugin corresponding to * the metadata field defined by single field key. * * @param fieldKey single string identifying metadata field + * @param collection Collection owner of Item * @param locale explicit localization key if available * @param authKey authority key * @return label */ - public String getLabel(String fieldKey, String authKey, String locale); + public String getLabel(String fieldKey, Collection collection, String authKey, String locale); /** * Predicate, is there a Choices configuration of any kind for the * given metadata field? * * @param fieldKey single string identifying metadata field + * @param collection Collection owner of Item * @return true if choices are configured for this field. */ - public boolean isChoicesConfigured(String fieldKey); + public boolean isChoicesConfigured(String fieldKey, Collection collection); /** * Get the presentation keyword (should be "lookup", "select" or "suggest", but this @@ -160,12 +163,14 @@ public interface ChoiceAuthorityService { * @param metadataValue metadata value * @return List of variants */ - public List getVariants(MetadataValue metadataValue); - - public String getChoiceMetadatabyAuthorityName(String name); - - public Choice getChoice(String fieldKey, String authKey, String locale); + public List getVariants(MetadataValue metadataValue, Collection collection); + /** + * Return the ChoiceAuthority instance identified by the specified name + * + * @param authorityName the ChoiceAuthority instance name + * @return the ChoiceAuthority identified by the specified name + */ public ChoiceAuthority getChoiceAuthorityByAuthorityName(String authorityName); /** @@ -173,4 +178,49 @@ public interface ChoiceAuthorityService { */ public void clearCache(); + /** + * Should we store the authority key (if any) for such field key and collection? + * + * @param fieldKey single string identifying metadata field + * @param collection Collection owner of Item or where the item is submitted to + * @return true if the configuration allows to store the authority value + */ + public boolean storeAuthority(String fieldKey, Collection collection); + + /** + * Wrapper that calls getChoicesByParent method of the plugin. + * + * @param authorityName authority name + * @param parentId parent Id + * @param start choice at which to start, 0 is first. + * @param limit maximum number of choices to return, 0 for no limit. + * @param locale explicit localization key if available, or null + * @return a Choices object (never null). + * @see org.dspace.content.authority.ChoiceAuthority#getChoicesByParent(java.lang.String, java.lang.String, + * int, int, java.lang.String) + */ + public Choices getChoicesByParent(String authorityName, String parentId, int start, int limit, String locale); + + /** + * Wrapper that calls getTopChoices method of the plugin. + * + * @param authorityName authority name + * @param start choice at which to start, 0 is first. + * @param limit maximum number of choices to return, 0 for no limit. + * @param locale explicit localization key if available, or null + * @return a Choices object (never null). + * @see org.dspace.content.authority.ChoiceAuthority#getTopChoices(java.lang.String, int, int, java.lang.String) + */ + public Choices getTopChoices(String authorityName, int start, int limit, String locale); + + /** + * Return the direct parent of an entry identified by its id in an hierarchical + * authority. + * + * @param authorityName authority name + * @param vocabularyId child id + * @param locale explicit localization key if available, or null + * @return the parent Choice object if any + */ + public Choice getParentChoice(String authorityName, String vocabularyId, String locale); } diff --git a/dspace-api/src/main/java/org/dspace/content/dao/ItemDAO.java b/dspace-api/src/main/java/org/dspace/content/dao/ItemDAO.java index 979f42836a..4c391d973b 100644 --- a/dspace-api/src/main/java/org/dspace/content/dao/ItemDAO.java +++ b/dspace-api/src/main/java/org/dspace/content/dao/ItemDAO.java @@ -47,6 +47,19 @@ public interface ItemDAO extends DSpaceObjectLegacySupportDAO { public Iterator findBySubmitter(Context context, EPerson eperson) throws SQLException; + /** + * Find all the items by a given submitter. The order is + * indeterminate. All items are included. + * + * @param context DSpace context object + * @param eperson the submitter + * @param retrieveAllItems flag to determine if only archive should be returned + * @return an iterator over the items submitted by eperson + * @throws SQLException if database error + */ + public Iterator findBySubmitter(Context context, EPerson eperson, boolean retrieveAllItems) + throws SQLException; + public Iterator findBySubmitter(Context context, EPerson eperson, MetadataField metadataField, int limit) throws SQLException; diff --git a/dspace-api/src/main/java/org/dspace/content/dao/ProcessDAO.java b/dspace-api/src/main/java/org/dspace/content/dao/ProcessDAO.java index f20225a202..4ef26cffcb 100644 --- a/dspace-api/src/main/java/org/dspace/content/dao/ProcessDAO.java +++ b/dspace-api/src/main/java/org/dspace/content/dao/ProcessDAO.java @@ -13,6 +13,7 @@ import java.util.List; import org.dspace.core.Context; import org.dspace.core.GenericDAO; import org.dspace.scripts.Process; +import org.dspace.scripts.ProcessQueryParameterContainer; /** * This is the Data Access Object for the {@link Process} object @@ -54,4 +55,30 @@ public interface ProcessDAO extends GenericDAO { */ int countRows(Context context) throws SQLException; + /** + * Returns a list of all Processes in the database which match the given field requirements. If the + * requirements are not null, they will be combined with an AND operation. + * @param context The relevant DSpace context + * @param processQueryParameterContainer The {@link ProcessQueryParameterContainer} containing all the values + * that the returned {@link Process} objects must adhere to + * @param limit The limit for the amount of Processes returned + * @param offset The offset for the Processes to be returned + * @return The list of all Processes which match the metadata requirements + * @throws SQLException If something goes wrong + */ + List search(Context context, ProcessQueryParameterContainer processQueryParameterContainer, int limit, + int offset) throws SQLException; + + /** + * Count all the processes which match the requirements. The requirements are evaluated like the search + * method. + * @param context The relevant DSpace context + * @param processQueryParameterContainer The {@link ProcessQueryParameterContainer} containing all the values + * that the returned {@link Process} objects must adhere to + * @return The number of results matching the query + * @throws SQLException If something goes wrong + */ + + int countTotalWithParameters(Context context, ProcessQueryParameterContainer processQueryParameterContainer) + throws SQLException; } diff --git a/dspace-api/src/main/java/org/dspace/content/dao/impl/ItemDAOImpl.java b/dspace-api/src/main/java/org/dspace/content/dao/impl/ItemDAOImpl.java index b935812c8c..683a6502c5 100644 --- a/dspace-api/src/main/java/org/dspace/content/dao/impl/ItemDAOImpl.java +++ b/dspace-api/src/main/java/org/dspace/content/dao/impl/ItemDAOImpl.java @@ -108,6 +108,17 @@ public class ItemDAOImpl extends AbstractHibernateDSODAO implements ItemDA return iterate(query); } + @Override + public Iterator findBySubmitter(Context context, EPerson eperson, boolean retrieveAllItems) + throws SQLException { + if (!retrieveAllItems) { + return findBySubmitter(context, eperson); + } + Query query = createQuery(context, "FROM Item WHERE submitter= :submitter"); + query.setParameter("submitter", eperson); + return iterate(query); + } + @Override public Iterator findBySubmitter(Context context, EPerson eperson, MetadataField metadataField, int limit) throws SQLException { diff --git a/dspace-api/src/main/java/org/dspace/content/dao/impl/ProcessDAOImpl.java b/dspace-api/src/main/java/org/dspace/content/dao/impl/ProcessDAOImpl.java index 4c10387d93..5c8083a86b 100644 --- a/dspace-api/src/main/java/org/dspace/content/dao/impl/ProcessDAOImpl.java +++ b/dspace-api/src/main/java/org/dspace/content/dao/impl/ProcessDAOImpl.java @@ -8,15 +8,20 @@ package org.dspace.content.dao.impl; import java.sql.SQLException; +import java.util.LinkedList; import java.util.List; +import java.util.Map; import javax.persistence.criteria.CriteriaBuilder; import javax.persistence.criteria.CriteriaQuery; +import javax.persistence.criteria.Predicate; import javax.persistence.criteria.Root; +import org.apache.commons.lang3.StringUtils; import org.dspace.content.dao.ProcessDAO; import org.dspace.core.AbstractHibernateDAO; import org.dspace.core.Context; import org.dspace.scripts.Process; +import org.dspace.scripts.ProcessQueryParameterContainer; import org.dspace.scripts.Process_; /** @@ -56,6 +61,7 @@ public class ProcessDAOImpl extends AbstractHibernateDAO implements Pro CriteriaQuery criteriaQuery = getCriteriaQuery(criteriaBuilder, Process.class); Root processRoot = criteriaQuery.from(Process.class); criteriaQuery.select(processRoot); + criteriaQuery.orderBy(criteriaBuilder.desc(processRoot.get(Process_.processId))); return list(context, criteriaQuery, false, Process.class, limit, offset); } @@ -71,6 +77,76 @@ public class ProcessDAOImpl extends AbstractHibernateDAO implements Pro return count(context, criteriaQuery, criteriaBuilder, processRoot); } + + @Override + public List search(Context context, ProcessQueryParameterContainer processQueryParameterContainer, + int limit, int offset) throws SQLException { + CriteriaBuilder criteriaBuilder = getCriteriaBuilder(context); + CriteriaQuery criteriaQuery = getCriteriaQuery(criteriaBuilder, Process.class); + Root processRoot = criteriaQuery.from(Process.class); + criteriaQuery.select(processRoot); + + handleProcessQueryParameters(processQueryParameterContainer, criteriaBuilder, criteriaQuery, processRoot); + return list(context, criteriaQuery, false, Process.class, limit, offset); + + } + + /** + * This method will ensure that the params contained in the {@link ProcessQueryParameterContainer} are transferred + * to the ProcessRoot and that the correct conditions apply to the query + * @param processQueryParameterContainer The object containing the conditions that need to be met + * @param criteriaBuilder The criteriaBuilder to be used + * @param criteriaQuery The criteriaQuery to be used + * @param processRoot The processRoot to be used + */ + private void handleProcessQueryParameters(ProcessQueryParameterContainer processQueryParameterContainer, + CriteriaBuilder criteriaBuilder, CriteriaQuery criteriaQuery, + Root processRoot) { + addProcessQueryParameters(processQueryParameterContainer, criteriaBuilder, criteriaQuery, processRoot); + if (StringUtils.equalsIgnoreCase(processQueryParameterContainer.getSortOrder(), "asc")) { + criteriaQuery + .orderBy(criteriaBuilder.asc(processRoot.get(processQueryParameterContainer.getSortProperty()))); + } else if (StringUtils.equalsIgnoreCase(processQueryParameterContainer.getSortOrder(), "desc")) { + criteriaQuery + .orderBy(criteriaBuilder.desc(processRoot.get(processQueryParameterContainer.getSortProperty()))); + } + } + + /** + * This method will apply the variables in the {@link ProcessQueryParameterContainer} as criteria for the + * {@link Process} objects to the given CriteriaQuery. + * They'll need to adhere to these variables in order to be eligible for return + * @param processQueryParameterContainer The object containing the variables for the {@link Process} + * to adhere to + * @param criteriaBuilder The current CriteriaBuilder + * @param criteriaQuery The current CriteriaQuery + * @param processRoot The processRoot + */ + private void addProcessQueryParameters(ProcessQueryParameterContainer processQueryParameterContainer, + CriteriaBuilder criteriaBuilder, CriteriaQuery criteriaQuery, + Root processRoot) { + List andPredicates = new LinkedList<>(); + + for (Map.Entry entry : processQueryParameterContainer.getQueryParameterMap().entrySet()) { + andPredicates.add(criteriaBuilder.equal(processRoot.get(entry.getKey()), entry.getValue())); + } + criteriaQuery.where(criteriaBuilder.and(andPredicates.toArray(new Predicate[]{}))); + } + + @Override + public int countTotalWithParameters(Context context, ProcessQueryParameterContainer processQueryParameterContainer) + throws SQLException { + + CriteriaBuilder criteriaBuilder = getCriteriaBuilder(context); + CriteriaQuery criteriaQuery = getCriteriaQuery(criteriaBuilder, Process.class); + Root processRoot = criteriaQuery.from(Process.class); + criteriaQuery.select(processRoot); + + addProcessQueryParameters(processQueryParameterContainer, criteriaBuilder, criteriaQuery, processRoot); + return count(context, criteriaQuery, criteriaBuilder, processRoot); + } + + } diff --git a/dspace-api/src/main/java/org/dspace/content/packager/METSManifest.java b/dspace-api/src/main/java/org/dspace/content/packager/METSManifest.java index 53a8678df2..ed15037c11 100644 --- a/dspace-api/src/main/java/org/dspace/content/packager/METSManifest.java +++ b/dspace-api/src/main/java/org/dspace/content/packager/METSManifest.java @@ -272,12 +272,16 @@ public class METSManifest { // Set validation feature if (validate) { builder.setFeature("http://apache.org/xml/features/validation/schema", true); - } - // Tell the parser where local copies of schemas are, to speed up - // validation. Local XSDs are identified in the configuration file. - if (localSchemas.length() > 0) { - builder.setProperty("http://apache.org/xml/properties/schema/external-schemaLocation", localSchemas); + // Tell the parser where local copies of schemas are, to speed up + // validation & avoid XXE attacks from remote schemas. Local XSDs are identified in the configuration file. + if (localSchemas.length() > 0) { + builder.setProperty("http://apache.org/xml/properties/schema/external-schemaLocation", localSchemas); + } + } else { + // disallow DTD parsing to ensure no XXE attacks can occur. + // See https://cheatsheetseries.owasp.org/cheatsheets/XML_External_Entity_Prevention_Cheat_Sheet.html + builder.setFeature("http://apache.org/xml/features/disallow-doctype-decl", true); } // Parse the METS file diff --git a/dspace-api/src/main/java/org/dspace/content/service/CollectionService.java b/dspace-api/src/main/java/org/dspace/content/service/CollectionService.java index aa8bc94d25..8637b61703 100644 --- a/dspace-api/src/main/java/org/dspace/content/service/CollectionService.java +++ b/dspace-api/src/main/java/org/dspace/content/service/CollectionService.java @@ -20,8 +20,10 @@ import org.dspace.content.Collection; import org.dspace.content.Community; import org.dspace.content.Item; import org.dspace.core.Context; +import org.dspace.discovery.SearchServiceException; import org.dspace.eperson.Group; + /** * Service interface class for the Collection object. * The implementation of this class is responsible for all business logic calls for the Collection object and is @@ -339,4 +341,57 @@ public interface CollectionService * @throws SQLException if database error */ List> getCollectionsWithBitstreamSizesTotal(Context context) throws SQLException; + + /** + * This method will create a default read group for the given Collection. It'll create either a defaultItemRead or + * a defaultBitstreamRead group depending on the given parameters + * + * @param context The relevant DSpace context + * @param collection The collection for which it'll be created + * @param typeOfGroupString The type of group to be made, item or bitstream + * @param defaultRead The defaultRead int, item or bitstream + * @return The created Group + * @throws SQLException If something goes wrong + * @throws AuthorizeException If something goes wrong + */ + Group createDefaultReadGroup(Context context, Collection collection, String typeOfGroupString, int defaultRead) + throws SQLException, AuthorizeException; + + /** + * Returns Collections for which the current user has 'submit' privileges. + * NOTE: for better performance, this method retrieves its results from an + * index (cache) and does not query the database directly. + * This means that results may be stale or outdated until DS-4524 is resolved" + * + * @param q limit the returned collection to those with metadata values matching the query terms. + * The terms are used to make also a prefix query on SOLR so it can be used to implement + * an autosuggest feature over the collection name + * @param context DSpace Context + * @param community parent community + * @param offset the position of the first result to return + * @param limit paging limit + * @return discovery search result objects + * @throws SQLException if something goes wrong + * @throws SearchServiceException if search error + */ + public List findCollectionsWithSubmit(String q, Context context, Community community, + int offset, int limit) throws SQLException, SearchServiceException; + + /** + * Counts the number of Collection for which the current user has 'submit' privileges. + * NOTE: for better performance, this method retrieves its results from an index (cache) + * and does not query the database directly. + * This means that results may be stale or outdated until DS-4524 is resolved." + * + * @param q limit the returned collection to those with metadata values matching the query terms. + * The terms are used to make also a prefix query on SOLR so it can be used to implement + * an autosuggest feature over the collection name + * @param context DSpace Context + * @param community parent community + * @return total collections found + * @throws SQLException if something goes wrong + * @throws SearchServiceException if search error + */ + public int countCollectionsWithSubmit(String q, Context context, Community community) + throws SQLException, SearchServiceException; } diff --git a/dspace-api/src/main/java/org/dspace/content/service/DSpaceObjectService.java b/dspace-api/src/main/java/org/dspace/content/service/DSpaceObjectService.java index 203d2a1787..ff44713b38 100644 --- a/dspace-api/src/main/java/org/dspace/content/service/DSpaceObjectService.java +++ b/dspace-api/src/main/java/org/dspace/content/service/DSpaceObjectService.java @@ -200,10 +200,11 @@ public interface DSpaceObjectService { * and the ISO3166 country code. null means the * value has no language (for example, a date). * @param values the values to add. + * @return the list of MetadataValues added to the object * @throws SQLException if database error */ - public void addMetadata(Context context, T dso, String schema, String element, String qualifier, String lang, - List values) throws SQLException; + public List addMetadata(Context context, T dso, String schema, String element, String qualifier, + String lang, List values) throws SQLException; /** * Add metadata fields. These are appended to existing values. @@ -223,10 +224,11 @@ public interface DSpaceObjectService { * @param values the values to add. * @param authorities the external authority key for this value (or null) * @param confidences the authority confidence (default 0) + * @return the list of MetadataValues added to the object * @throws SQLException if database error */ - public void addMetadata(Context context, T dso, String schema, String element, String qualifier, String lang, - List values, List authorities, List confidences) + public List addMetadata(Context context, T dso, String schema, String element, String qualifier, + String lang, List values, List authorities, List confidences) throws SQLException; /** @@ -243,32 +245,64 @@ public interface DSpaceObjectService { * @param values the values to add. * @param authorities the external authority key for this value (or null) * @param confidences the authority confidence (default 0) + * @return the list of MetadataValues added to the object * @throws SQLException if database error */ - public void addMetadata(Context context, T dso, MetadataField metadataField, String lang, List values, - List authorities, List confidences) throws SQLException; + public List addMetadata(Context context, T dso, MetadataField metadataField, String lang, + List values, List authorities, List confidences) throws SQLException; /** * Shortcut for {@link #addMetadata(Context, DSpaceObject, MetadataField, String, List, List, List)} when a single * value need to be added - * - * @param context - * @param dso - * @param metadataField - * @param language - * @param value - * @param authority - * @param confidence + * + * @param context DSpace context + * @param dso DSpaceObject + * @param metadataField the metadata field to which the value is to be set + * @param language the ISO639 language code, optionally followed by an underscore + * and the ISO3166 country code. null means the + * value has no language (for example, a date). + * @param value the value to add. + * @param authority the external authority key for this value (or null) + * @param confidence the authority confidence (default 0) + * @return the MetadataValue added ot the object * @throws SQLException */ - public void addMetadata(Context context, T dso, MetadataField metadataField, String language, String value, - String authority, int confidence) throws SQLException; + public MetadataValue addMetadata(Context context, T dso, MetadataField metadataField, String language, + String value, String authority, int confidence) throws SQLException; - public void addMetadata(Context context, T dso, MetadataField metadataField, String language, String value) + /** + * Add a metadatafield. These are appended to existing values. + * Use clearMetadata to remove values. + * + * @param context DSpace context + * @param dso DSpaceObject + * @param metadataField the metadata field to which the value is to be set + * @param language the ISO639 language code, optionally followed by an underscore + * and the ISO3166 country code. null means the + * value has no language (for example, a date). + * @param value the value to add. + * @return the MetadataValue added ot the object + * @throws SQLException if database error + */ + public MetadataValue addMetadata(Context context, T dso, MetadataField metadataField, String language, String value) throws SQLException; - public void addMetadata(Context context, T dso, MetadataField metadataField, String language, List values) - throws SQLException; + /** + * Add a metadatafields. These are appended to existing values. + * Use clearMetadata to remove values. + * + * @param context DSpace context + * @param dso DSpaceObject + * @param metadataField the metadata field to which the value is to be set + * @param language the ISO639 language code, optionally followed by an underscore + * and the ISO3166 country code. null means the + * value has no language (for example, a date). + * @param values the values to add. + * @return the list of MetadataValues added to the object + * @throws SQLException if database error + */ + public List addMetadata(Context context, T dso, MetadataField metadataField, String language, + List values) throws SQLException; /** * Add a single metadata field. This is appended to existing @@ -285,10 +319,11 @@ public interface DSpaceObjectService { * and the ISO3166 country code. null means the * value has no language (for example, a date). * @param value the value to add. + * @return the MetadataValue added ot the object * @throws SQLException if database error */ - public void addMetadata(Context context, T dso, String schema, String element, String qualifier, String lang, - String value) throws SQLException; + public MetadataValue addMetadata(Context context, T dso, String schema, String element, String qualifier, + String lang, String value) throws SQLException; /** * Add a single metadata field. This is appended to existing @@ -307,10 +342,11 @@ public interface DSpaceObjectService { * @param value the value to add. * @param authority the external authority key for this value (or null) * @param confidence the authority confidence (default 0) + * @return the MetadataValue added ot the object * @throws SQLException if database error */ - public void addMetadata(Context context, T dso, String schema, String element, String qualifier, String lang, - String value, String authority, int confidence) throws SQLException; + public MetadataValue addMetadata(Context context, T dso, String schema, String element, String qualifier, + String lang, String value, String authority, int confidence) throws SQLException; /** * Clear metadata values. As with getDC above, diff --git a/dspace-api/src/main/java/org/dspace/content/service/ItemService.java b/dspace-api/src/main/java/org/dspace/content/service/ItemService.java index 71b736f1bd..ff30ffe0e0 100644 --- a/dspace-api/src/main/java/org/dspace/content/service/ItemService.java +++ b/dspace-api/src/main/java/org/dspace/content/service/ItemService.java @@ -113,6 +113,21 @@ public interface ItemService public Iterator findBySubmitter(Context context, EPerson eperson) throws SQLException; + /** + * Find all the items by a given submitter. The order is + * indeterminate. All items are included. + * + * @param context DSpace context object + * @param eperson the submitter + * @param retrieveAllItems flag to determine if all items should be returned or only archived items. + * If true, all items (regardless of status) are returned. + * If false, only archived items will be returned. + * @return an iterator over the items submitted by eperson + * @throws SQLException if database error + */ + public Iterator findBySubmitter(Context context, EPerson eperson, boolean retrieveAllItems) + throws SQLException; + /** * Retrieve the list of items submitted by eperson, ordered by recently submitted, optionally limitable * diff --git a/dspace-api/src/main/java/org/dspace/content/service/MetadataDSpaceCsvExportService.java b/dspace-api/src/main/java/org/dspace/content/service/MetadataDSpaceCsvExportService.java new file mode 100644 index 0000000000..aeb956fc49 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/content/service/MetadataDSpaceCsvExportService.java @@ -0,0 +1,58 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.content.service; + +import java.util.Iterator; + +import org.dspace.app.bulkedit.DSpaceCSV; +import org.dspace.content.Community; +import org.dspace.content.Item; +import org.dspace.core.Context; +import org.dspace.scripts.handler.DSpaceRunnableHandler; + +/** + * This is the interface to be implemented by a Service that deals with the exporting of Metadata + */ +public interface MetadataDSpaceCsvExportService { + + /** + * This method will export DSpaceObject objects depending on the parameters it gets. It can export all the items + * in the repository, all the items in a community, all the items in a collection or a specific item. The latter + * three are specified by the handle parameter. The entire repository can be exported by defining the + * exportAllItems parameter as true + * @param context The relevant DSpace context + * @param exportAllItems A boolean indicating whether or not the entire repository should be exported + * @param exportAllMetadata Defines if all metadata should be exported or only the allowed ones + * @param handle The handle for the DSpaceObject to be exported, can be a Community, Collection or Item + * @return A DSpaceCSV object containing the exported information + * @throws Exception If something goes wrong + */ + public DSpaceCSV handleExport(Context context, boolean exportAllItems, boolean exportAllMetadata, + String handle, DSpaceRunnableHandler dSpaceRunnableHandler) throws Exception; + + /** + * This method will export all the Items in the given toExport iterator to a DSpaceCSV + * @param context The relevant DSpace context + * @param toExport The iterator containing the items to export + * @param exportAll Defines if all metadata should be exported or only the allowed ones + * @return A DSpaceCSV object containing the exported information + * @throws Exception If something goes wrong + */ + public DSpaceCSV export(Context context, Iterator toExport, boolean exportAll) throws Exception; + + /** + * This method will export all the Items within the given Community to a DSpaceCSV + * @param context The relevant DSpace context + * @param community The Community that contains the Items to be exported + * @param exportAll Defines if all metadata should be exported or only the allowed ones + * @return A DSpaceCSV object containing the exported information + * @throws Exception If something goes wrong + */ + public DSpaceCSV export(Context context, Community community, boolean exportAll) throws Exception; + +} \ No newline at end of file diff --git a/dspace-api/src/main/java/org/dspace/core/Context.java b/dspace-api/src/main/java/org/dspace/core/Context.java index ecfc29d29d..e878367ec4 100644 --- a/dspace-api/src/main/java/org/dspace/core/Context.java +++ b/dspace-api/src/main/java/org/dspace/core/Context.java @@ -179,7 +179,7 @@ public class Context implements AutoCloseable { } currentUser = null; - currentLocale = I18nUtil.DEFAULTLOCALE; + currentLocale = I18nUtil.getDefaultLocale(); extraLogInfo = ""; ignoreAuth = false; @@ -190,7 +190,15 @@ public class Context implements AutoCloseable { setMode(this.mode); } - public static boolean updateDatabase() { + /** + * Update the DSpace database, ensuring that any necessary migrations are run prior to initializing + * Hibernate. + *

+ * This is synchronized as it only needs to be run successfully *once* (for the first Context initialized). + * + * @return true/false, based on whether database was successfully updated + */ + public static synchronized boolean updateDatabase() { //If the database has not been updated yet, update it and remember that. if (databaseUpdated.compareAndSet(false, true)) { @@ -200,7 +208,7 @@ public class Context implements AutoCloseable { try { DatabaseUtils.updateDatabase(); } catch (SQLException sqle) { - log.fatal("Cannot initialize database via Flyway!", sqle); + log.fatal("Cannot update or initialize database via Flyway!", sqle); databaseUpdated.set(false); } } @@ -641,9 +649,9 @@ public class Context implements AutoCloseable { /** * Temporary change the user bound to the context, empty the special groups that * are retained to allow subsequent restore - * + * * @param newUser the EPerson to bound to the context - * + * * @throws IllegalStateException if the switch was already performed without be * restored */ @@ -661,7 +669,7 @@ public class Context implements AutoCloseable { /** * Restore the user bound to the context and his special groups - * + * * @throws IllegalStateException if no switch was performed before */ public void restoreContextUser() { @@ -876,4 +884,5 @@ public class Context implements AutoCloseable { private void reloadContextBoundEntities() throws SQLException { currentUser = reloadEntity(currentUser); } + } diff --git a/dspace-api/src/main/java/org/dspace/core/I18nUtil.java b/dspace-api/src/main/java/org/dspace/core/I18nUtil.java index 37e48c4a4f..cd0609e29f 100644 --- a/dspace-api/src/main/java/org/dspace/core/I18nUtil.java +++ b/dspace-api/src/main/java/org/dspace/core/I18nUtil.java @@ -37,9 +37,6 @@ import org.dspace.services.factory.DSpaceServicesFactory; public class I18nUtil { private static final Logger log = org.apache.logging.log4j.LogManager.getLogger(I18nUtil.class); - // the default Locale of this DSpace Instance - public static final Locale DEFAULTLOCALE = getDefaultLocale(); - // delimiters between elements of UNIX/POSIX locale spec, e.g. en_US.UTF-8 private static final String LOCALE_DELIMITERS = " _."; @@ -127,7 +124,7 @@ public class I18nUtil { return parseLocales(locales); } else { Locale[] availableLocales = new Locale[1]; - availableLocales[0] = DEFAULTLOCALE; + availableLocales[0] = getDefaultLocale(); return availableLocales; } } @@ -148,7 +145,7 @@ public class I18nUtil { Locale supportedLocale = null; String testLocale = ""; if (availableLocales == null) { - supportedLocale = DEFAULTLOCALE; + supportedLocale = getDefaultLocale(); } else { if (!locale.getVariant().equals("")) { testLocale = locale.toString(); @@ -188,12 +185,29 @@ public class I18nUtil { } } if (!isSupported) { - supportedLocale = DEFAULTLOCALE; + supportedLocale = getDefaultLocale(); } } return supportedLocale; } + /** + * Gets the appropriate supported Locale according for a given Locale If + * no appropriate supported locale is found, the DEFAULTLOCALE is used + * + * @param locale String to find the corresponding Locale + * @return supportedLocale + * Locale for session according to locales supported by this DSpace instance as set in dspace.cfg + */ + public static Locale getSupportedLocale(String locale) { + Locale currentLocale = null; + if (locale != null) { + currentLocale = I18nUtil.getSupportedLocale(new Locale(locale)); + } else { + currentLocale = I18nUtil.getDefaultLocale(); + } + return currentLocale; + } /** * Get the appropriate localized version of submission-forms.xml according to language settings @@ -220,7 +234,7 @@ public class I18nUtil { * String of the message */ public static String getMessage(String key) { - return getMessage(key.trim(), DEFAULTLOCALE); + return getMessage(key.trim(), getDefaultLocale()); } /** @@ -233,7 +247,7 @@ public class I18nUtil { */ public static String getMessage(String key, Locale locale) { if (locale == null) { - locale = DEFAULTLOCALE; + locale = getDefaultLocale(); } ResourceBundle.Control control = ResourceBundle.Control.getNoFallbackControl( @@ -384,4 +398,23 @@ public class I18nUtil { } return resultList.toArray(new Locale[resultList.size()]); } + + /** + * Check if the input locale is in the list of supported locales + * @param locale + * @return true if locale is supported, false otherwise + */ + public static boolean isSupportedLocale(Locale locale) { + boolean isSupported = false; + Locale[] supportedLocales = getSupportedLocales(); + if (supportedLocales != null) { + for (Locale sLocale: supportedLocales) { + if (locale.getLanguage().equals(sLocale.getLanguage()) ) { + isSupported = true; + break; + } + } + } + return isSupported; + } } diff --git a/dspace-api/src/main/java/org/dspace/core/LegacyPluginServiceImpl.java b/dspace-api/src/main/java/org/dspace/core/LegacyPluginServiceImpl.java index f8291dc977..ea8cdc1403 100644 --- a/dspace-api/src/main/java/org/dspace/core/LegacyPluginServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/core/LegacyPluginServiceImpl.java @@ -345,8 +345,8 @@ public class LegacyPluginServiceImpl implements PluginService { " for interface=" + iname + " pluginName=" + name); Object result = pluginClass.newInstance(); - if (result instanceof SelfNamedPlugin) { - ((SelfNamedPlugin) result).setPluginInstanceName(name); + if (result instanceof NameAwarePlugin) { + ((NameAwarePlugin) result).setPluginInstanceName(name); } return result; } diff --git a/dspace-api/src/main/java/org/dspace/core/NameAwarePlugin.java b/dspace-api/src/main/java/org/dspace/core/NameAwarePlugin.java new file mode 100644 index 0000000000..6c562ea04c --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/core/NameAwarePlugin.java @@ -0,0 +1,42 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.core; + +/** + * This is the interface that should be implemented by all the named plugin that + * like to be aware of their name + * + * @author Andrea Bollini (andrea.bollini at 4science.it) + * @version $Revision$ + * @see org.dspace.core.service.PluginService + */ +public interface NameAwarePlugin { + + /** + * Get the instance's particular name. + * Returns the name by which the class was chosen when + * this instance was created. Only works for instances created + * by PluginService, or if someone remembers to call setPluginName. + *

+ * Useful when the implementation class wants to be configured differently + * when it is invoked under different names. + * + * @return name or null if not available. + */ + public String getPluginInstanceName(); + + /** + * Set the name under which this plugin was instantiated. + * Not to be invoked by application code, it is + * called automatically by PluginService.getNamedPlugin() + * when the plugin is instantiated. + * + * @param name -- name used to select this class. + */ + public void setPluginInstanceName(String name); +} diff --git a/dspace-api/src/main/java/org/dspace/core/SelfNamedPlugin.java b/dspace-api/src/main/java/org/dspace/core/SelfNamedPlugin.java index 2bdcf830e7..680fa15c80 100644 --- a/dspace-api/src/main/java/org/dspace/core/SelfNamedPlugin.java +++ b/dspace-api/src/main/java/org/dspace/core/SelfNamedPlugin.java @@ -28,7 +28,7 @@ package org.dspace.core; * @version $Revision$ * @see org.dspace.core.service.PluginService */ -public abstract class SelfNamedPlugin { +public abstract class SelfNamedPlugin implements NameAwarePlugin { // the specific alias used to find the class that created this instance. private String myName = null; @@ -52,30 +52,13 @@ public abstract class SelfNamedPlugin { return null; } - /** - * Get an instance's particular name. - * Returns the name by which the class was chosen when - * this instance was created. Only works for instances created - * by PluginService, or if someone remembers to call setPluginName. - *

- * Useful when the implementation class wants to be configured differently - * when it is invoked under different names. - * - * @return name or null if not available. - */ + @Override public String getPluginInstanceName() { return myName; } - /** - * Set the name under which this plugin was instantiated. - * Not to be invoked by application code, it is - * called automatically by PluginService.getNamedPlugin() - * when the plugin is instantiated. - * - * @param name -- name used to select this class. - */ - protected void setPluginInstanceName(String name) { + @Override + public void setPluginInstanceName(String name) { myName = name; } } diff --git a/dspace-api/src/main/java/org/dspace/ctask/general/MetadataWebService.java b/dspace-api/src/main/java/org/dspace/ctask/general/MetadataWebService.java index 2b6c52d0d6..754f3b4ab3 100644 --- a/dspace-api/src/main/java/org/dspace/ctask/general/MetadataWebService.java +++ b/dspace-api/src/main/java/org/dspace/ctask/general/MetadataWebService.java @@ -199,6 +199,9 @@ public class MetadataWebService extends AbstractCurationTask implements Namespac DocumentBuilderFactory factory = DocumentBuilderFactory.newInstance(); factory.setNamespaceAware(true); try { + // disallow DTD parsing to ensure no XXE attacks can occur. + // See https://cheatsheetseries.owasp.org/cheatsheets/XML_External_Entity_Prevention_Cheat_Sheet.html + factory.setFeature("http://apache.org/xml/features/disallow-doctype-decl", true); docBuilder = factory.newDocumentBuilder(); } catch (ParserConfigurationException pcE) { log.error("caught exception: " + pcE); diff --git a/dspace-api/src/main/java/org/dspace/curate/Curation.java b/dspace-api/src/main/java/org/dspace/curate/Curation.java new file mode 100644 index 0000000000..44cbb24ed9 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/curate/Curation.java @@ -0,0 +1,371 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.curate; + +import java.io.BufferedReader; +import java.io.File; +import java.io.FileNotFoundException; +import java.io.FileReader; +import java.io.IOException; +import java.io.OutputStream; +import java.io.OutputStreamWriter; +import java.io.PrintStream; +import java.io.Writer; +import java.sql.SQLException; +import java.util.HashMap; +import java.util.Iterator; +import java.util.Map; +import java.util.UUID; + +import org.apache.commons.cli.ParseException; +import org.apache.commons.io.output.NullOutputStream; +import org.dspace.authorize.AuthorizeException; +import org.dspace.content.DSpaceObject; +import org.dspace.content.factory.ContentServiceFactory; +import org.dspace.core.Context; +import org.dspace.core.factory.CoreServiceFactory; +import org.dspace.curate.factory.CurateServiceFactory; +import org.dspace.eperson.EPerson; +import org.dspace.eperson.factory.EPersonServiceFactory; +import org.dspace.eperson.service.EPersonService; +import org.dspace.handle.factory.HandleServiceFactory; +import org.dspace.handle.service.HandleService; +import org.dspace.scripts.DSpaceRunnable; +import org.dspace.utils.DSpace; + +/** + * CurationCli provides command-line access to Curation tools and processes. + * + * @author richardrodgers + */ +public class Curation extends DSpaceRunnable { + + protected EPersonService ePersonService = EPersonServiceFactory.getInstance().getEPersonService(); + + protected Context context; + private CurationClientOptions curationClientOptions; + + private String task; + private String taskFile; + private String id; + private String queue; + private String scope; + private String reporter; + private Map parameters; + private boolean verbose; + + @Override + public void internalRun() throws Exception { + if (curationClientOptions == CurationClientOptions.HELP) { + printHelp(); + return; + } + + Curator curator = initCurator(); + + // load curation tasks + if (curationClientOptions == CurationClientOptions.TASK) { + long start = System.currentTimeMillis(); + handleCurationTask(curator); + this.endScript(start); + } + + // process task queue + if (curationClientOptions == CurationClientOptions.QUEUE) { + // process the task queue + TaskQueue taskQueue = (TaskQueue) CoreServiceFactory.getInstance().getPluginService() + .getSinglePlugin(TaskQueue.class); + if (taskQueue == null) { + super.handler.logError("No implementation configured for queue"); + throw new UnsupportedOperationException("No queue service available"); + } + long timeRun = this.runQueue(taskQueue, curator); + this.endScript(timeRun); + } + } + + /** + * Does the curation task (-t) or the task in the given file (-T). + * Checks: + * - if required option -i is missing. + * - if option -t has a valid task option + */ + private void handleCurationTask(Curator curator) throws IOException, SQLException { + String taskName; + if (commandLine.hasOption('t')) { + if (verbose) { + handler.logInfo("Adding task: " + this.task); + } + curator.addTask(this.task); + if (verbose && !curator.hasTask(this.task)) { + handler.logInfo("Task: " + this.task + " not resolved"); + } + } else if (commandLine.hasOption('T')) { + // load taskFile + BufferedReader reader = null; + try { + reader = new BufferedReader(new FileReader(this.taskFile)); + while ((taskName = reader.readLine()) != null) { + if (verbose) { + super.handler.logInfo("Adding task: " + taskName); + } + curator.addTask(taskName); + } + } finally { + if (reader != null) { + reader.close(); + } + } + } + // run tasks against object + if (verbose) { + super.handler.logInfo("Starting curation"); + super.handler.logInfo("Curating id: " + this.id); + } + if ("all".equals(this.id)) { + // run on whole Site + curator.curate(context, + ContentServiceFactory.getInstance().getSiteService().findSite(context).getHandle()); + } else { + curator.curate(context, this.id); + } + } + + /** + * Runs task queue (-q set) + * + * @param queue The task queue + * @param curator The curator + * @return Time when queue started + */ + private long runQueue(TaskQueue queue, Curator curator) throws SQLException, AuthorizeException, IOException { + // use current time as our reader 'ticket' + long ticket = System.currentTimeMillis(); + Iterator entryIter = queue.dequeue(this.queue, ticket).iterator(); + while (entryIter.hasNext()) { + TaskQueueEntry entry = entryIter.next(); + if (verbose) { + super.handler.logInfo("Curating id: " + entry.getObjectId()); + } + curator.clear(); + // does entry relate to a DSO or workflow object? + if (entry.getObjectId().indexOf('/') > 0) { + for (String taskName : entry.getTaskNames()) { + curator.addTask(taskName); + } + curator.curate(context, entry.getObjectId()); + } else { + // make eperson who queued task the effective user + EPerson agent = ePersonService.findByEmail(context, entry.getEpersonId()); + if (agent != null) { + context.setCurrentUser(agent); + } + CurateServiceFactory.getInstance().getWorkflowCuratorService() + .curate(curator, context, entry.getObjectId()); + } + } + queue.release(this.queue, ticket, true); + return ticket; + } + + /** + * End of curation script; logs script time if -v verbose is set + * + * @param timeRun Time script was started + * @throws SQLException If DSpace contextx can't complete + */ + private void endScript(long timeRun) throws SQLException { + context.complete(); + if (verbose) { + long elapsed = System.currentTimeMillis() - timeRun; + this.handler.logInfo("Ending curation. Elapsed time: " + elapsed); + } + } + + /** + * Initialize the curator with command line variables + * + * @return Initialised curator + * @throws FileNotFoundException If file of command line variable -r reporter is not found + */ + private Curator initCurator() throws FileNotFoundException { + Curator curator = new Curator(); + OutputStream reporterStream; + if (null == this.reporter) { + reporterStream = new NullOutputStream(); + } else if ("-".equals(this.reporter)) { + reporterStream = System.out; + } else { + reporterStream = new PrintStream(this.reporter); + } + Writer reportWriter = new OutputStreamWriter(reporterStream); + curator.setReporter(reportWriter); + + if (this.scope != null) { + Curator.TxScope txScope = Curator.TxScope.valueOf(this.scope.toUpperCase()); + curator.setTransactionScope(txScope); + } + + curator.addParameters(parameters); + // we are operating in batch mode, if anyone cares. + curator.setInvoked(Curator.Invoked.BATCH); + return curator; + } + + @Override + public void printHelp() { + super.printHelp(); + super.handler.logInfo("\nwhole repo: CurationCli -t estimate -i all"); + super.handler.logInfo("single item: CurationCli -t generate -i itemId"); + super.handler.logInfo("task queue: CurationCli -q monthly"); + } + + @Override + public CurationScriptConfiguration getScriptConfiguration() { + return new DSpace().getServiceManager().getServiceByName("curate", CurationScriptConfiguration.class); + } + + @Override + public void setup() throws ParseException { + assignCurrentUserInContext(); + this.curationClientOptions = CurationClientOptions.getClientOption(commandLine); + + if (this.curationClientOptions != null) { + this.initGeneralLineOptionsAndCheckIfValid(); + if (curationClientOptions == CurationClientOptions.TASK) { + this.initTaskLineOptionsAndCheckIfValid(); + } else if (curationClientOptions == CurationClientOptions.QUEUE) { + this.queue = this.commandLine.getOptionValue('q'); + } + } else { + throw new IllegalArgumentException("[--help || --task|--taskfile <> -identifier <> || -queue <> ] must be" + + " specified"); + } + } + + /** + * This method will assign the currentUser to the {@link Context} variable which is also created in this method. + * The instance of the method in this class will fetch the EPersonIdentifier from this class, this identifier + * was given to this class upon instantiation, it'll then be used to find the {@link EPerson} associated with it + * and this {@link EPerson} will be set as the currentUser of the created {@link Context} + * @throws ParseException If something went wrong with the retrieval of the EPerson Identifier + */ + protected void assignCurrentUserInContext() throws ParseException { + UUID currentUserUuid = this.getEpersonIdentifier(); + try { + this.context = new Context(Context.Mode.BATCH_EDIT); + EPerson eperson = ePersonService.find(context, currentUserUuid); + if (eperson == null) { + super.handler.logError("EPerson not found: " + currentUserUuid); + throw new IllegalArgumentException("Unable to find a user with uuid: " + currentUserUuid); + } + this.context.setCurrentUser(eperson); + } catch (SQLException e) { + handler.handleException("Something went wrong trying to fetch eperson for uuid: " + currentUserUuid, e); + } + } + + /** + * Fills in some optional command line options. + * Checks if there are missing required options or invalid values for options. + */ + private void initGeneralLineOptionsAndCheckIfValid() { + // report file + if (this.commandLine.hasOption('r')) { + this.reporter = this.commandLine.getOptionValue('r'); + } + + // parameters + this.parameters = new HashMap<>(); + if (this.commandLine.hasOption('p')) { + for (String parameter : this.commandLine.getOptionValues('p')) { + String[] parts = parameter.split("=", 2); + String name = parts[0].trim(); + String value; + if (parts.length > 1) { + value = parts[1].trim(); + } else { + value = "true"; + } + this.parameters.put(name, value); + } + } + + // verbose + verbose = false; + if (commandLine.hasOption('v')) { + verbose = true; + } + + // scope + if (this.commandLine.getOptionValue('s') != null) { + this.scope = this.commandLine.getOptionValue('s'); + if (this.scope != null && Curator.TxScope.valueOf(this.scope.toUpperCase()) == null) { + this.handler.logError("Bad transaction scope '" + this.scope + "': only 'object', 'curation' or " + + "'open' recognized"); + throw new IllegalArgumentException( + "Bad transaction scope '" + this.scope + "': only 'object', 'curation' or " + + "'open' recognized"); + } + } + } + + /** + * Fills in required command line options for the task or taskFile option. + * Checks if there are is a missing required -i option and if -i is either 'all' or a valid dso handle. + * Checks if -t task has a valid task option. + * Checks if -T taskfile is a valid file. + */ + private void initTaskLineOptionsAndCheckIfValid() { + // task or taskFile + if (this.commandLine.hasOption('t')) { + this.task = this.commandLine.getOptionValue('t'); + if (!CurationClientOptions.getTaskOptions().contains(this.task)) { + super.handler + .logError("-t task must be one of: " + CurationClientOptions.getTaskOptions()); + throw new IllegalArgumentException( + "-t task must be one of: " + CurationClientOptions.getTaskOptions()); + } + } else if (this.commandLine.hasOption('T')) { + this.taskFile = this.commandLine.getOptionValue('T'); + if (!(new File(this.taskFile).isFile())) { + super.handler + .logError("-T taskFile must be valid file: " + this.taskFile); + throw new IllegalArgumentException("-T taskFile must be valid file: " + this.taskFile); + } + } + + if (this.commandLine.hasOption('i')) { + this.id = this.commandLine.getOptionValue('i').toLowerCase(); + if (!this.id.equalsIgnoreCase("all")) { + HandleService handleService = HandleServiceFactory.getInstance().getHandleService(); + DSpaceObject dso; + try { + dso = handleService.resolveToObject(this.context, id); + } catch (SQLException e) { + super.handler.logError("SQLException trying to resolve handle " + id + " to a valid dso"); + throw new IllegalArgumentException( + "SQLException trying to resolve handle " + id + " to a valid dso"); + } + if (dso == null) { + super.handler.logError("Id must be specified: a valid dso handle or 'all'; " + this.id + " could " + + "not be resolved to valid dso handle"); + throw new IllegalArgumentException( + "Id must be specified: a valid dso handle or 'all'; " + this.id + " could " + + "not be resolved to valid dso handle"); + } + } + } else { + super.handler.logError("Id must be specified: a handle, 'all', or no -i and a -q task queue (-h for " + + "help)"); + throw new IllegalArgumentException( + "Id must be specified: a handle, 'all', or no -i and a -q task queue (-h for " + + "help)"); + } + } +} diff --git a/dspace-api/src/main/java/org/dspace/curate/CurationCli.java b/dspace-api/src/main/java/org/dspace/curate/CurationCli.java index 3832ddf3ec..f70aea5b1d 100644 --- a/dspace-api/src/main/java/org/dspace/curate/CurationCli.java +++ b/dspace-api/src/main/java/org/dspace/curate/CurationCli.java @@ -7,269 +7,42 @@ */ package org.dspace.curate; -import java.io.BufferedReader; -import java.io.FileReader; -import java.io.OutputStream; -import java.io.OutputStreamWriter; -import java.io.PrintStream; -import java.io.Writer; -import java.util.HashMap; -import java.util.Iterator; -import java.util.Map; +import java.sql.SQLException; -import org.apache.commons.cli.CommandLine; -import org.apache.commons.cli.CommandLineParser; -import org.apache.commons.cli.HelpFormatter; -import org.apache.commons.cli.Options; -import org.apache.commons.cli.PosixParser; -import org.apache.commons.io.output.NullOutputStream; -import org.dspace.content.factory.ContentServiceFactory; +import org.apache.commons.cli.ParseException; import org.dspace.core.Context; -import org.dspace.core.factory.CoreServiceFactory; -import org.dspace.curate.factory.CurateServiceFactory; import org.dspace.eperson.EPerson; -import org.dspace.eperson.factory.EPersonServiceFactory; -import org.dspace.eperson.service.EPersonService; /** - * CurationCli provides command-line access to Curation tools and processes. - * - * @author richardrodgers + * This is the CLI version of the {@link Curation} script. + * This will only be called when the curate script is called from a commandline instance. */ -public class CurationCli { +public class CurationCli extends Curation { /** - * Default constructor + * This is the overridden instance of the {@link Curation#assignCurrentUserInContext()} method in the parent class + * {@link Curation}. + * This is done so that the CLI version of the Script is able to retrieve its currentUser from the -e flag given + * with the parameters of the Script. + * @throws ParseException If the e flag was not given to the parameters when calling the script */ - private CurationCli() { } - - public static void main(String[] args) throws Exception { - // create an options object and populate it - CommandLineParser parser = new PosixParser(); - - Options options = new Options(); - - options.addOption("t", "task", true, - "curation task name"); - options.addOption("T", "taskfile", true, - "file containing curation task names"); - options.addOption("i", "id", true, - "Id (handle) of object to perform task on, or 'all' to perform on whole repository"); - options.addOption("p", "parameter", true, - "a task parameter 'NAME=VALUE'"); - options.addOption("q", "queue", true, - "name of task queue to process"); - options.addOption("e", "eperson", true, - "email address of curating eperson"); - options.addOption("r", "reporter", true, - "relative or absolute path to the desired report file. " - + "Use '-' to report to console. " - + "If absent, no reporting"); - options.addOption("s", "scope", true, - "transaction scope to impose: use 'object', 'curation', or 'open'. If absent, 'open' " + - "applies"); - options.addOption("v", "verbose", false, - "report activity to stdout"); - options.addOption("h", "help", false, "help"); - - CommandLine line = parser.parse(options, args); - - String taskName = null; - String taskFileName = null; - String idName = null; - String taskQueueName = null; - String ePersonName = null; - String reporterName = null; - String scope = null; - boolean verbose = false; - final Map parameters = new HashMap<>(); - - if (line.hasOption('h')) { - HelpFormatter help = new HelpFormatter(); - help.printHelp("CurationCli\n", options); - System.out - .println("\nwhole repo: CurationCli -t estimate -i all"); - System.out - .println("single item: CurationCli -t generate -i itemId"); - System.out - .println("task queue: CurationCli -q monthly"); - System.exit(0); - } - - if (line.hasOption('t')) { // task - taskName = line.getOptionValue('t'); - } - - if (line.hasOption('T')) { // task file - taskFileName = line.getOptionValue('T'); - } - - if (line.hasOption('i')) { // id - idName = line.getOptionValue('i'); - } - - if (line.hasOption('q')) { // task queue - taskQueueName = line.getOptionValue('q'); - } - - if (line.hasOption('e')) { // eperson - ePersonName = line.getOptionValue('e'); - } - - if (line.hasOption('p')) { // parameter - for (String parameter : line.getOptionValues('p')) { - String[] parts = parameter.split("=", 2); - String name = parts[0].trim(); - String value; - if (parts.length > 1) { - value = parts[1].trim(); - } else { - value = "true"; - } - parameters.put(name, value); - } - } - if (line.hasOption('r')) { // report file - reporterName = line.getOptionValue('r'); - } - - - if (line.hasOption('s')) { // transaction scope - scope = line.getOptionValue('s'); - } - - if (line.hasOption('v')) { // verbose - verbose = true; - } - - // now validate the args - if (idName == null && taskQueueName == null) { - System.out.println("Id must be specified: a handle, 'all', or a task queue (-h for help)"); - System.exit(1); - } - - if (taskName == null && taskFileName == null && taskQueueName == null) { - System.out.println("A curation task or queue must be specified (-h for help)"); - System.exit(1); - } - - if (scope != null && Curator.TxScope.valueOf(scope.toUpperCase()) == null) { - System.out.println("Bad transaction scope '" + scope + "': only 'object', 'curation' or 'open' recognized"); - System.exit(1); - } - EPersonService ePersonService = EPersonServiceFactory.getInstance().getEPersonService(); - - Context c = new Context(Context.Mode.BATCH_EDIT); - if (ePersonName != null) { - EPerson ePerson = ePersonService.findByEmail(c, ePersonName); - if (ePerson == null) { - System.out.println("EPerson not found: " + ePersonName); - System.exit(1); - } - c.setCurrentUser(ePerson); - } else { - c.turnOffAuthorisationSystem(); - } - - Curator curator = new Curator(); - OutputStream reporter; - if (null == reporterName) { - reporter = new NullOutputStream(); - } else if ("-".equals(reporterName)) { - reporter = System.out; - } else { - reporter = new PrintStream(reporterName); - } - Writer reportWriter = new OutputStreamWriter(reporter); - curator.setReporter(reportWriter); - - if (scope != null) { - Curator.TxScope txScope = Curator.TxScope.valueOf(scope.toUpperCase()); - curator.setTransactionScope(txScope); - } - curator.addParameters(parameters); - // we are operating in batch mode, if anyone cares. - curator.setInvoked(Curator.Invoked.BATCH); - // load curation tasks - if (taskName != null) { - if (verbose) { - System.out.println("Adding task: " + taskName); - } - curator.addTask(taskName); - if (verbose && !curator.hasTask(taskName)) { - System.out.println("Task: " + taskName + " not resolved"); - } - } else if (taskQueueName == null) { - // load taskFile - BufferedReader reader = null; + @Override + protected void assignCurrentUserInContext() throws ParseException { + if (this.commandLine.hasOption('e')) { + String ePersonEmail = this.commandLine.getOptionValue('e'); + this.context = new Context(Context.Mode.BATCH_EDIT); try { - reader = new BufferedReader(new FileReader(taskFileName)); - while ((taskName = reader.readLine()) != null) { - if (verbose) { - System.out.println("Adding task: " + taskName); - } - curator.addTask(taskName); + EPerson ePerson = ePersonService.findByEmail(this.context, ePersonEmail); + if (ePerson == null) { + super.handler.logError("EPerson not found: " + ePersonEmail); + throw new IllegalArgumentException("Unable to find a user with email: " + ePersonEmail); } - } finally { - if (reader != null) { - reader.close(); - } - } - } - // run tasks against object - long start = System.currentTimeMillis(); - if (verbose) { - System.out.println("Starting curation"); - } - if (idName != null) { - if (verbose) { - System.out.println("Curating id: " + idName); - } - if ("all".equals(idName)) { - // run on whole Site - curator.curate(c, ContentServiceFactory.getInstance().getSiteService().findSite(c).getHandle()); - } else { - curator.curate(c, idName); + this.context.setCurrentUser(ePerson); + } catch (SQLException e) { + throw new IllegalArgumentException("SQLException trying to find user with email: " + ePersonEmail); } } else { - // process the task queue - TaskQueue queue = (TaskQueue) CoreServiceFactory.getInstance().getPluginService() - .getSinglePlugin(TaskQueue.class); - if (queue == null) { - System.out.println("No implementation configured for queue"); - throw new UnsupportedOperationException("No queue service available"); - } - // use current time as our reader 'ticket' - long ticket = System.currentTimeMillis(); - Iterator entryIter = queue.dequeue(taskQueueName, ticket).iterator(); - while (entryIter.hasNext()) { - TaskQueueEntry entry = entryIter.next(); - if (verbose) { - System.out.println("Curating id: " + entry.getObjectId()); - } - curator.clear(); - // does entry relate to a DSO or workflow object? - if (entry.getObjectId().indexOf("/") > 0) { - for (String task : entry.getTaskNames()) { - curator.addTask(task); - } - curator.curate(c, entry.getObjectId()); - } else { - // make eperson who queued task the effective user - EPerson agent = ePersonService.findByEmail(c, entry.getEpersonId()); - if (agent != null) { - c.setCurrentUser(agent); - } - CurateServiceFactory.getInstance().getWorkflowCuratorService() - .curate(curator, c, entry.getObjectId()); - } - } - queue.release(taskQueueName, ticket, true); - } - c.complete(); - if (verbose) { - long elapsed = System.currentTimeMillis() - start; - System.out.println("Ending curation. Elapsed time: " + elapsed); + throw new ParseException("Required parameter -e missing!"); } } } diff --git a/dspace-api/src/main/java/org/dspace/curate/CurationCliScriptConfiguration.java b/dspace-api/src/main/java/org/dspace/curate/CurationCliScriptConfiguration.java new file mode 100644 index 0000000000..5e1d014873 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/curate/CurationCliScriptConfiguration.java @@ -0,0 +1,26 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.curate; + +import org.apache.commons.cli.Options; + +/** + * This is the CLI version of the {@link CurationScriptConfiguration} class that handles the configuration for the + * {@link CurationCli} script + */ +public class CurationCliScriptConfiguration extends CurationScriptConfiguration { + + @Override + public Options getOptions() { + options = super.getOptions(); + options.addOption("e", "eperson", true, "email address of curating eperson"); + options.getOption("e").setType(String.class); + options.getOption("e").setRequired(true); + return options; + } +} diff --git a/dspace-api/src/main/java/org/dspace/curate/CurationClientOptions.java b/dspace-api/src/main/java/org/dspace/curate/CurationClientOptions.java new file mode 100644 index 0000000000..8ec0f14697 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/curate/CurationClientOptions.java @@ -0,0 +1,89 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.curate; + +import java.util.ArrayList; +import java.util.List; + +import org.apache.commons.cli.CommandLine; +import org.apache.commons.cli.Options; +import org.apache.commons.lang3.StringUtils; +import org.dspace.services.ConfigurationService; +import org.dspace.services.factory.DSpaceServicesFactory; + +/** + * This Enum holds all the possible options and combinations for the Curation script + * + * @author Maria Verdonck (Atmire) on 23/06/2020 + */ +public enum CurationClientOptions { + TASK, + QUEUE, + HELP; + + private static List taskOptions; + + /** + * This method resolves the CommandLine parameters to figure out which action the curation script should perform + * + * @param commandLine The relevant CommandLine for the curation script + * @return The curation option to be ran, parsed from the CommandLine + */ + protected static CurationClientOptions getClientOption(CommandLine commandLine) { + if (commandLine.hasOption("h")) { + return CurationClientOptions.HELP; + } else if (commandLine.hasOption("t") || commandLine.hasOption("T")) { + return CurationClientOptions.TASK; + } else if (commandLine.hasOption("q")) { + return CurationClientOptions.QUEUE; + } + return null; + } + + /** + * This method will create all the possible Options for the {@link Curation} script. + * This will be used by {@link CurationScriptConfiguration} + * @return The options for the {@link Curation} script + */ + protected static Options constructOptions() { + Options options = new Options(); + + options.addOption("t", "task", true, "curation task name; options: " + getTaskOptions()); + options.addOption("T", "taskfile", true, "file containing curation task names"); + options.addOption("i", "id", true, + "Id (handle) of object to perform task on, or 'all' to perform on whole repository"); + options.addOption("p", "parameter", true, "a task parameter 'NAME=VALUE'"); + options.addOption("q", "queue", true, "name of task queue to process"); + options.addOption("r", "reporter", true, + "relative or absolute path to the desired report file. Use '-' to report to console. If absent, no " + + "reporting"); + options.addOption("s", "scope", true, + "transaction scope to impose: use 'object', 'curation', or 'open'. If absent, 'open' applies"); + options.addOption("v", "verbose", false, "report activity to stdout"); + options.addOption("h", "help", false, "help"); + + return options; + } + + /** + * Creates list of the taskOptions' keys from the configs of plugin.named.org.dspace.curate.CurationTask + * + * @return List of the taskOptions' keys from the configs of plugin.named.org.dspace.curate.CurationTask + */ + public static List getTaskOptions() { + if (taskOptions == null) { + ConfigurationService configurationService = DSpaceServicesFactory.getInstance().getConfigurationService(); + String[] taskConfigs = configurationService.getArrayProperty("plugin.named.org.dspace.curate.CurationTask"); + taskOptions = new ArrayList<>(); + for (String taskConfig : taskConfigs) { + taskOptions.add(StringUtils.substringAfterLast(taskConfig, "=").trim()); + } + } + return taskOptions; + } +} diff --git a/dspace-api/src/main/java/org/dspace/curate/CurationScriptConfiguration.java b/dspace-api/src/main/java/org/dspace/curate/CurationScriptConfiguration.java new file mode 100644 index 0000000000..fefb4eb768 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/curate/CurationScriptConfiguration.java @@ -0,0 +1,61 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.curate; + +import java.sql.SQLException; + +import org.apache.commons.cli.Options; +import org.dspace.authorize.service.AuthorizeService; +import org.dspace.core.Context; +import org.dspace.scripts.configuration.ScriptConfiguration; +import org.springframework.beans.factory.annotation.Autowired; + +/** + * The {@link ScriptConfiguration} for the {@link Curation} script + * + * @author Maria Verdonck (Atmire) on 23/06/2020 + */ +public class CurationScriptConfiguration extends ScriptConfiguration { + + @Autowired + private AuthorizeService authorizeService; + + private Class dspaceRunnableClass; + + @Override + public Class getDspaceRunnableClass() { + return this.dspaceRunnableClass; + } + + @Override + public void setDspaceRunnableClass(Class dspaceRunnableClass) { + this.dspaceRunnableClass = dspaceRunnableClass; + } + + /** + * Only admin can run Curation script via the scripts and processes endpoints. + * @param context The relevant DSpace context + * @return True if currentUser is admin, otherwise false + */ + @Override + public boolean isAllowedToExecute(Context context) { + try { + return authorizeService.isAdmin(context); + } catch (SQLException e) { + throw new RuntimeException("SQLException occurred when checking if the current user is an admin", e); + } + } + + @Override + public Options getOptions() { + if (options == null) { + super.options = CurationClientOptions.constructOptions(); + } + return options; + } +} diff --git a/dspace-api/src/main/java/org/dspace/curate/Curator.java b/dspace-api/src/main/java/org/dspace/curate/Curator.java index 44733174df..8f12750bae 100644 --- a/dspace-api/src/main/java/org/dspace/curate/Curator.java +++ b/dspace-api/src/main/java/org/dspace/curate/Curator.java @@ -98,6 +98,7 @@ public class Curator { communityService = ContentServiceFactory.getInstance().getCommunityService(); itemService = ContentServiceFactory.getInstance().getItemService(); handleService = HandleServiceFactory.getInstance().getHandleService(); + resolver = new TaskResolver(); } /** @@ -142,10 +143,10 @@ public class Curator { // performance order currently FIFO - to be revisited perfList.add(taskName); } catch (IOException ioE) { - log.error("Task: '" + taskName + "' initialization failure: " + ioE.getMessage()); + System.out.println("Task: '" + taskName + "' initialization failure: " + ioE.getMessage()); } } else { - log.error("Task: '" + taskName + "' does not resolve"); + System.out.println("Task: '" + taskName + "' does not resolve"); } return this; } @@ -259,13 +260,6 @@ public class Curator { /** * Performs all configured tasks upon DSpace object * (Community, Collection or Item). - *

- * Note: Site-wide tasks will default to running as - * an Anonymous User unless you call the Site-wide task - * via the {@link curate(Context,String)} or - * {@link #curate(Context, DSpaceObject)} method with an - * authenticated Context object. - * * @param dso the DSpace object * @throws IOException if IO error */ @@ -325,7 +319,7 @@ public class Curator { taskQ.enqueue(queueId, new TaskQueueEntry(c.getCurrentUser().getName(), System.currentTimeMillis(), perfList, id)); } else { - log.error("curate - no TaskQueue implemented"); + System.out.println("curate - no TaskQueue implemented"); } } @@ -346,7 +340,7 @@ public class Curator { try { reporter.append(message); } catch (IOException ex) { - log.error("Task reporting failure", ex); + System.out.println("Task reporting failure: " + ex); } } @@ -552,7 +546,7 @@ public class Curator { return !suspend(statusCode); } catch (IOException ioe) { //log error & pass exception upwards - log.error("Error executing curation task '" + task.getName() + "'", ioe); + System.out.println("Error executing curation task '" + task.getName() + "'; " + ioe); throw ioe; } } @@ -568,7 +562,7 @@ public class Curator { return !suspend(statusCode); } catch (IOException ioe) { //log error & pass exception upwards - log.error("Error executing curation task '" + task.getName() + "'", ioe); + System.out.println("Error executing curation task '" + task.getName() + "'; " + ioe); throw ioe; } } diff --git a/dspace-api/src/main/java/org/dspace/discovery/DiscoverQuery.java b/dspace-api/src/main/java/org/dspace/discovery/DiscoverQuery.java index d3efb3c626..d82779015f 100644 --- a/dspace-api/src/main/java/org/dspace/discovery/DiscoverQuery.java +++ b/dspace-api/src/main/java/org/dspace/discovery/DiscoverQuery.java @@ -7,6 +7,9 @@ */ package org.dspace.discovery; +import static java.util.Collections.singletonList; +import static org.apache.commons.lang3.StringUtils.isNotBlank; + import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; @@ -31,7 +34,7 @@ public class DiscoverQuery { **/ private String query; private List filterQueries; - private String DSpaceObjectFilter = null; + private List dspaceObjectFilters = new ArrayList<>(); private List fieldPresentQueries; private boolean spellCheck; @@ -118,20 +121,33 @@ public class DiscoverQuery { * Sets the DSpace object filter, must be an DSpace Object type integer * can be used to only return objects from a certain DSpace Object type * - * @param DSpaceObjectFilter the DSpace object filer + * @param dspaceObjectFilter the DSpace object filter */ - public void setDSpaceObjectFilter(String DSpaceObjectFilter) { - this.DSpaceObjectFilter = DSpaceObjectFilter; + public void setDSpaceObjectFilter(String dspaceObjectFilter) { + this.dspaceObjectFilters = singletonList(dspaceObjectFilter); } /** - * Gets the DSpace object filter - * can be used to only return objects from a certain DSpace Object type + * Adds a DSpace object filter, must be an DSpace Object type integer. + * Can be used to also return objects from a certain DSpace Object type. * - * @return the DSpace object filer + * @param dspaceObjectFilter the DSpace object filer */ - public String getDSpaceObjectFilter() { - return DSpaceObjectFilter; + public void addDSpaceObjectFilter(String dspaceObjectFilter) { + + if (isNotBlank(dspaceObjectFilter)) { + this.dspaceObjectFilters.add(dspaceObjectFilter); + } + } + + /** + * Gets the DSpace object filters + * can be used to only return objects from certain DSpace Object types + * + * @return the DSpace object filters + */ + public List getDSpaceObjectFilters() { + return dspaceObjectFilters; } /** diff --git a/dspace-api/src/main/java/org/dspace/discovery/IndexClient.java b/dspace-api/src/main/java/org/dspace/discovery/IndexClient.java index 2e7b00a617..4e6fa16177 100644 --- a/dspace-api/src/main/java/org/dspace/discovery/IndexClient.java +++ b/dspace-api/src/main/java/org/dspace/discovery/IndexClient.java @@ -14,7 +14,6 @@ import java.util.Optional; import java.util.UUID; import org.apache.commons.cli.CommandLine; -import org.apache.commons.cli.Options; import org.apache.commons.cli.ParseException; import org.dspace.content.Collection; import org.dspace.content.Community; @@ -30,17 +29,18 @@ import org.dspace.discovery.indexobject.factory.IndexFactory; import org.dspace.discovery.indexobject.factory.IndexObjectFactoryFactory; import org.dspace.handle.factory.HandleServiceFactory; import org.dspace.scripts.DSpaceRunnable; -import org.springframework.beans.factory.annotation.Autowired; +import org.dspace.services.factory.DSpaceServicesFactory; +import org.dspace.utils.DSpace; /** * Class used to reindex dspace communities/collections/items into discovery */ -public class IndexClient extends DSpaceRunnable { +public class IndexClient extends DSpaceRunnable { private Context context; - - @Autowired - private IndexingService indexer; + private IndexingService indexer = DSpaceServicesFactory.getInstance().getServiceManager() + .getServiceByName(IndexingService.class.getName(), + IndexingService.class); private IndexClientOptions indexClientOptions; @@ -144,6 +144,12 @@ public class IndexClient extends DSpaceRunnable { handler.logInfo("Done with indexing"); } + @Override + public IndexDiscoveryScriptConfiguration getScriptConfiguration() { + return new DSpace().getServiceManager().getServiceByName("index-discovery", + IndexDiscoveryScriptConfiguration.class); + } + public void setup() throws ParseException { try { context = new Context(Context.Mode.READ_ONLY); @@ -151,18 +157,8 @@ public class IndexClient extends DSpaceRunnable { } catch (Exception e) { throw new ParseException("Unable to create a new DSpace Context: " + e.getMessage()); } - indexClientOptions = IndexClientOptions.getIndexClientOption(commandLine); } - - /** - * Constructor for this class. This will ensure that the Options are created and set appropriately. - */ - private IndexClient() { - Options options = IndexClientOptions.constructOptions(); - this.options = options; - } - /** * Indexes the given object and all children, if applicable. * diff --git a/dspace-api/src/main/java/org/dspace/discovery/IndexDiscoveryScriptConfiguration.java b/dspace-api/src/main/java/org/dspace/discovery/IndexDiscoveryScriptConfiguration.java new file mode 100644 index 0000000000..8bf3cf2aba --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/discovery/IndexDiscoveryScriptConfiguration.java @@ -0,0 +1,58 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.discovery; + +import java.sql.SQLException; + +import org.apache.commons.cli.Options; +import org.dspace.authorize.service.AuthorizeService; +import org.dspace.core.Context; +import org.dspace.scripts.configuration.ScriptConfiguration; +import org.springframework.beans.factory.annotation.Autowired; + +/** + * The {@link ScriptConfiguration} for the {@link IndexClient} script + */ +public class IndexDiscoveryScriptConfiguration extends ScriptConfiguration { + + @Autowired + private AuthorizeService authorizeService; + + private Class dspaceRunnableClass; + + @Override + public Class getDspaceRunnableClass() { + return dspaceRunnableClass; + } + + @Override + public boolean isAllowedToExecute(Context context) { + try { + return authorizeService.isAdmin(context); + } catch (SQLException e) { + throw new RuntimeException("SQLException occurred when checking if the current user is an admin", e); + } + } + + @Override + public Options getOptions() { + if (options == null) { + super.options = IndexClientOptions.constructOptions(); + } + return options; + } + + /** + * Generic setter for the dspaceRunnableClass + * @param dspaceRunnableClass The dspaceRunnableClass to be set on this IndexDiscoveryScriptConfiguration + */ + @Override + public void setDspaceRunnableClass(Class dspaceRunnableClass) { + this.dspaceRunnableClass = dspaceRunnableClass; + } +} diff --git a/dspace-api/src/main/java/org/dspace/discovery/IndexEventConsumer.java b/dspace-api/src/main/java/org/dspace/discovery/IndexEventConsumer.java index 43ea9eefb2..195c9cd6fc 100644 --- a/dspace-api/src/main/java/org/dspace/discovery/IndexEventConsumer.java +++ b/dspace-api/src/main/java/org/dspace/discovery/IndexEventConsumer.java @@ -8,6 +8,7 @@ package org.dspace.discovery; import java.util.HashSet; +import java.util.Optional; import java.util.Set; import org.apache.logging.log4j.Logger; @@ -15,6 +16,7 @@ import org.dspace.content.Bundle; import org.dspace.content.DSpaceObject; import org.dspace.core.Constants; import org.dspace.core.Context; +import org.dspace.discovery.indexobject.factory.IndexFactory; import org.dspace.discovery.indexobject.factory.IndexObjectFactoryFactory; import org.dspace.event.Consumer; import org.dspace.event.Event; @@ -67,7 +69,7 @@ public class IndexEventConsumer implements Consumer { int st = event.getSubjectType(); if (!(st == Constants.ITEM || st == Constants.BUNDLE - || st == Constants.COLLECTION || st == Constants.COMMUNITY)) { + || st == Constants.COLLECTION || st == Constants.COMMUNITY || st == Constants.SITE)) { log .warn("IndexConsumer should not have been given this kind of Subject in an event, skipping: " + event.toString()); @@ -104,10 +106,28 @@ public class IndexEventConsumer implements Consumer { case Event.MODIFY: case Event.MODIFY_METADATA: if (subject == null) { - log.warn(event.getEventTypeAsString() + " event, could not get object for " + if (st == Constants.SITE) { + // Update the indexable objects of type in event.detail of objects with ids in event.identifiers + for (String id : event.getIdentifiers()) { + IndexFactory indexableObjectService = IndexObjectFactoryFactory.getInstance(). + getIndexFactoryByType(event.getDetail()); + Optional indexableObject = Optional.empty(); + indexableObject = indexableObjectService.findIndexableObject(ctx, id); + if (indexableObject.isPresent()) { + log.debug("consume() adding event to update queue: " + event.toString()); + objectsToUpdate + .addAll(indexObjectServiceFactory + .getIndexableObjects(ctx, indexableObject.get().getIndexedObject())); + } else { + log.warn("Cannot resolve " + id); + } + } + } else { + log.warn(event.getEventTypeAsString() + " event, could not get object for " + event.getSubjectTypeAsString() + " id=" + event.getSubjectID() + ", perhaps it has been deleted."); + } } else { log.debug("consume() adding event to update queue: " + event.toString()); objectsToUpdate.addAll(indexObjectServiceFactory.getIndexableObjects(ctx, subject)); diff --git a/dspace-api/src/main/java/org/dspace/discovery/SolrServiceImpl.java b/dspace-api/src/main/java/org/dspace/discovery/SolrServiceImpl.java index 94361b7cf9..88e32d0aaf 100644 --- a/dspace-api/src/main/java/org/dspace/discovery/SolrServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/discovery/SolrServiceImpl.java @@ -7,6 +7,8 @@ */ package org.dspace.discovery; +import static java.util.stream.Collectors.joining; + import java.io.IOException; import java.io.PrintWriter; import java.io.StringWriter; @@ -24,7 +26,6 @@ import java.util.List; import java.util.Locale; import java.util.Map; import java.util.Optional; -import java.util.Set; import java.util.TimeZone; import java.util.UUID; @@ -69,7 +70,6 @@ import org.dspace.discovery.indexobject.IndexableCommunity; import org.dspace.discovery.indexobject.IndexableItem; import org.dspace.discovery.indexobject.factory.IndexFactory; import org.dspace.discovery.indexobject.factory.IndexObjectFactoryFactory; -import org.dspace.eperson.EPerson; import org.dspace.eperson.Group; import org.dspace.eperson.factory.EPersonServiceFactory; import org.dspace.eperson.service.GroupService; @@ -100,16 +100,6 @@ import org.springframework.stereotype.Service; @Service public class SolrServiceImpl implements SearchService, IndexingService { - /** - * The name of the discover configuration used to search for workflow tasks in the mydspace - */ - public static final String DISCOVER_WORKFLOW_CONFIGURATION_NAME = "workflow"; - - /** - * The name of the discover configuration used to search for inprogress submission in the mydspace - */ - public static final String DISCOVER_WORKSPACE_CONFIGURATION_NAME = "workspace"; - private static final Logger log = org.apache.logging.log4j.LogManager.getLogger(SolrServiceImpl.class); @Autowired @@ -763,8 +753,13 @@ public class SolrServiceImpl implements SearchService, IndexingService { String filterQuery = discoveryQuery.getFilterQueries().get(i); solrQuery.addFilterQuery(filterQuery); } - if (discoveryQuery.getDSpaceObjectFilter() != null) { - solrQuery.addFilterQuery(SearchUtils.RESOURCE_TYPE_FIELD + ":" + discoveryQuery.getDSpaceObjectFilter()); + if (discoveryQuery.getDSpaceObjectFilters() != null) { + solrQuery.addFilterQuery( + discoveryQuery.getDSpaceObjectFilters() + .stream() + .map(filter -> SearchUtils.RESOURCE_TYPE_FIELD + ":" + filter) + .collect(joining(" OR ")) + ); } for (int i = 0; i < discoveryQuery.getFieldPresentQueries().size(); i++) { @@ -848,46 +843,9 @@ public class SolrServiceImpl implements SearchService, IndexingService { } - boolean isWorkspace = StringUtils.startsWith(discoveryQuery.getDiscoveryConfigurationName(), - DISCOVER_WORKSPACE_CONFIGURATION_NAME); - boolean isWorkflow = StringUtils.startsWith(discoveryQuery.getDiscoveryConfigurationName(), - DISCOVER_WORKFLOW_CONFIGURATION_NAME); - EPerson currentUser = context.getCurrentUser(); - - // extra security check to avoid the possibility that an anonymous user - // get access to workspace or workflow - if (currentUser == null && (isWorkflow || isWorkspace)) { - throw new IllegalStateException("An anonymous user cannot perform a workspace or workflow search"); - } - if (isWorkspace) { - // insert filter by submitter - solrQuery - .addFilterQuery("submitter_authority:(" + currentUser.getID() + ")"); - } else if (isWorkflow) { - // Retrieve all the groups the current user is a member of ! - Set groups; - try { - groups = groupService.allMemberGroupsSet(context, currentUser); - } catch (SQLException e) { - throw new org.dspace.discovery.SearchServiceException(e.getMessage(), e); - } - - // insert filter by controllers - StringBuilder controllerQuery = new StringBuilder(); - controllerQuery.append("taskfor:(e" + currentUser.getID()); - for (Group group : groups) { - controllerQuery.append(" OR g").append(group.getID()); - } - controllerQuery.append(")"); - solrQuery.addFilterQuery(controllerQuery.toString()); - } - - //Add any configured search plugins ! - List solrServiceSearchPlugins = DSpaceServicesFactory.getInstance().getServiceManager() - .getServicesByType( - SolrServiceSearchPlugin - .class); + List solrServiceSearchPlugins = DSpaceServicesFactory.getInstance() + .getServiceManager().getServicesByType(SolrServiceSearchPlugin.class); for (SolrServiceSearchPlugin searchPlugin : solrServiceSearchPlugins) { searchPlugin.additionalSearchParameters(context, discoveryQuery, solrQuery); } diff --git a/dspace-api/src/main/java/org/dspace/discovery/SolrServiceIndexCollectionSubmittersPlugin.java b/dspace-api/src/main/java/org/dspace/discovery/SolrServiceIndexCollectionSubmittersPlugin.java new file mode 100644 index 0000000000..ebcaab78af --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/discovery/SolrServiceIndexCollectionSubmittersPlugin.java @@ -0,0 +1,76 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.discovery; + +import java.sql.SQLException; +import java.util.List; + +import org.apache.logging.log4j.Logger; +import org.apache.solr.common.SolrInputDocument; +import org.dspace.authorize.ResourcePolicy; +import org.dspace.authorize.service.AuthorizeService; +import org.dspace.content.Collection; +import org.dspace.content.Community; +import org.dspace.content.factory.ContentServiceFactory; +import org.dspace.core.Constants; +import org.dspace.core.Context; +import org.dspace.core.LogManager; +import org.dspace.discovery.indexobject.IndexableCollection; +import org.springframework.beans.factory.annotation.Autowired; + +/** + * The purpose of this plugin is to index all ADD type resource policies related to collections. + * + * @author Mykhaylo Boychuk (at 4science.it) + */ +public class SolrServiceIndexCollectionSubmittersPlugin implements SolrServiceIndexPlugin { + + private static final Logger log = org.apache.logging.log4j.LogManager + .getLogger(SolrServiceIndexCollectionSubmittersPlugin.class); + + @Autowired(required = true) + protected AuthorizeService authorizeService; + + @Override + public void additionalIndex(Context context, IndexableObject idxObj, SolrInputDocument document) { + if (idxObj instanceof IndexableCollection) { + Collection col = ((IndexableCollection) idxObj).getIndexedObject(); + if (col != null) { + try { + String fieldValue = null; + Community parent = (Community) ContentServiceFactory.getInstance().getDSpaceObjectService(col) + .getParentObject(context, col); + while (parent != null) { + if (parent.getAdministrators() != null) { + fieldValue = "g" + parent.getAdministrators().getID(); + document.addField("submit", fieldValue); + } + parent = (Community) ContentServiceFactory.getInstance().getDSpaceObjectService(parent) + .getParentObject(context, parent); + } + List policies = authorizeService.getPoliciesActionFilter(context, col, + Constants.ADD); + for (ResourcePolicy resourcePolicy : policies) { + if (resourcePolicy.getGroup() != null) { + fieldValue = "g" + resourcePolicy.getGroup().getID(); + } else { + fieldValue = "e" + resourcePolicy.getEPerson().getID(); + + } + document.addField("submit", fieldValue); + context.uncacheEntity(resourcePolicy); + } + } catch (SQLException e) { + log.error(LogManager.getHeader(context, "Error while indexing resource policies", + "Collection: (id " + col.getID() + " type " + col.getName() + ")" )); + } + } + } + } + +} \ No newline at end of file diff --git a/dspace-api/src/main/java/org/dspace/discovery/SolrServiceMetadataBrowseIndexingPlugin.java b/dspace-api/src/main/java/org/dspace/discovery/SolrServiceMetadataBrowseIndexingPlugin.java index 187c6b0600..2b2be66384 100644 --- a/dspace-api/src/main/java/org/dspace/discovery/SolrServiceMetadataBrowseIndexingPlugin.java +++ b/dspace-api/src/main/java/org/dspace/discovery/SolrServiceMetadataBrowseIndexingPlugin.java @@ -17,6 +17,7 @@ import org.apache.logging.log4j.Logger; import org.apache.solr.common.SolrInputDocument; import org.dspace.browse.BrowseException; import org.dspace.browse.BrowseIndex; +import org.dspace.content.Collection; import org.dspace.content.Item; import org.dspace.content.MetadataValue; import org.dspace.content.authority.service.ChoiceAuthorityService; @@ -63,7 +64,7 @@ public class SolrServiceMetadataBrowseIndexingPlugin implements SolrServiceIndex return; } Item item = ((IndexableItem) indexableObject).getIndexedObject(); - + Collection collection = item.getOwningCollection(); // Get the currently configured browse indexes BrowseIndex[] bis; try { @@ -175,7 +176,7 @@ public class SolrServiceMetadataBrowseIndexingPlugin implements SolrServiceIndex true); if (!ignorePrefered) { preferedLabel = choiceAuthorityService - .getLabel(values.get(x), values.get(x).getLanguage()); + .getLabel(values.get(x), collection, values.get(x).getLanguage()); } List variants = null; @@ -195,7 +196,7 @@ public class SolrServiceMetadataBrowseIndexingPlugin implements SolrServiceIndex if (!ignoreVariants) { variants = choiceAuthorityService .getVariants( - values.get(x)); + values.get(x), collection); } if (StringUtils diff --git a/dspace-api/src/main/java/org/dspace/discovery/SolrServiceResourceRestrictionPlugin.java b/dspace-api/src/main/java/org/dspace/discovery/SolrServiceResourceRestrictionPlugin.java index 659a3d77d7..add0fe7589 100644 --- a/dspace-api/src/main/java/org/dspace/discovery/SolrServiceResourceRestrictionPlugin.java +++ b/dspace-api/src/main/java/org/dspace/discovery/SolrServiceResourceRestrictionPlugin.java @@ -11,7 +11,6 @@ import java.sql.SQLException; import java.util.List; import java.util.Set; -import org.apache.commons.lang3.StringUtils; import org.apache.logging.log4j.Logger; import org.apache.solr.client.solrj.SolrQuery; import org.apache.solr.common.SolrInputDocument; @@ -21,6 +20,7 @@ import org.dspace.authorize.service.ResourcePolicyService; import org.dspace.content.Collection; import org.dspace.content.Community; import org.dspace.content.DSpaceObject; +import org.dspace.content.InProgressSubmission; import org.dspace.content.Item; import org.dspace.content.factory.ContentServiceFactory; import org.dspace.content.service.CollectionService; @@ -28,11 +28,16 @@ import org.dspace.content.service.CommunityService; import org.dspace.core.Constants; import org.dspace.core.Context; import org.dspace.core.LogManager; +import org.dspace.discovery.indexobject.IndexableClaimedTask; import org.dspace.discovery.indexobject.IndexableDSpaceObject; +import org.dspace.discovery.indexobject.IndexableInProgressSubmission; +import org.dspace.discovery.indexobject.IndexablePoolTask; import org.dspace.eperson.EPerson; import org.dspace.eperson.Group; import org.dspace.eperson.service.GroupService; import org.dspace.services.factory.DSpaceServicesFactory; +import org.dspace.xmlworkflow.storedcomponents.ClaimedTask; +import org.dspace.xmlworkflow.storedcomponents.PoolTask; import org.springframework.beans.factory.annotation.Autowired; /** @@ -61,8 +66,21 @@ public class SolrServiceResourceRestrictionPlugin implements SolrServiceIndexPlu @Override public void additionalIndex(Context context, IndexableObject idxObj, SolrInputDocument document) { + DSpaceObject dso = null; if (idxObj instanceof IndexableDSpaceObject) { - DSpaceObject dso = ((IndexableDSpaceObject) idxObj).getIndexedObject(); + dso = ((IndexableDSpaceObject) idxObj).getIndexedObject(); + } else if (idxObj instanceof IndexableInProgressSubmission) { + final InProgressSubmission inProgressSubmission + = ((IndexableInProgressSubmission) idxObj).getIndexedObject(); + dso = inProgressSubmission.getItem(); + } else if (idxObj instanceof IndexablePoolTask) { + final PoolTask poolTask = ((IndexablePoolTask) idxObj).getIndexedObject(); + dso = poolTask.getWorkflowItem().getItem(); + } else if (idxObj instanceof IndexableClaimedTask) { + final ClaimedTask claimedTask = ((IndexableClaimedTask) idxObj).getIndexedObject(); + dso = claimedTask.getWorkflowItem().getItem(); + } + if (dso != null) { try { List policies = authorizeService.getPoliciesActionFilter(context, dso, Constants.READ); for (ResourcePolicy resourcePolicy : policies) { @@ -106,7 +124,8 @@ public class SolrServiceResourceRestrictionPlugin implements SolrServiceIndexPlu } } catch (SQLException e) { log.error(LogManager.getHeader(context, "Error while indexing resource policies", - "DSpace object: (id " + dso.getID() + " type " + dso.getType() + ")")); + "DSpace object: (id " + dso.getID() + " type " + dso.getType() + ")" + )); } } } @@ -114,13 +133,6 @@ public class SolrServiceResourceRestrictionPlugin implements SolrServiceIndexPlu @Override public void additionalSearchParameters(Context context, DiscoverQuery discoveryQuery, SolrQuery solrQuery) { try { - // skip workspace and workflow queries as security for it them is builtin in the SolrServiceImpl - if (StringUtils.startsWith(discoveryQuery.getDiscoveryConfigurationName(), - SolrServiceImpl.DISCOVER_WORKSPACE_CONFIGURATION_NAME) - || StringUtils.startsWith(discoveryQuery.getDiscoveryConfigurationName(), - SolrServiceImpl.DISCOVER_WORKFLOW_CONFIGURATION_NAME)) { - return; - } if (!authorizeService.isAdmin(context)) { StringBuilder resourceQuery = new StringBuilder(); //Always add the anonymous group id to the query diff --git a/dspace-api/src/main/java/org/dspace/discovery/SolrServiceSearchPlugin.java b/dspace-api/src/main/java/org/dspace/discovery/SolrServiceSearchPlugin.java index 88cc5edd99..d7994fc7a3 100644 --- a/dspace-api/src/main/java/org/dspace/discovery/SolrServiceSearchPlugin.java +++ b/dspace-api/src/main/java/org/dspace/discovery/SolrServiceSearchPlugin.java @@ -19,5 +19,14 @@ import org.dspace.core.Context; */ public interface SolrServiceSearchPlugin { - public void additionalSearchParameters(Context context, DiscoverQuery discoveryQuery, SolrQuery solrQuery); + /** + * Edits the solr query before it is sent to solr by adding additional parameters to it. + * + * @param context The DSpace Context object. + * @param discoveryQuery The discovery query object on which the solr query is based. + * @param solrQuery The query that will be sent to solr and which may be edited by this plugin. + * @throws SearchServiceException Any checked exception that might happen in this plugin + */ + public void additionalSearchParameters(Context context, DiscoverQuery discoveryQuery, SolrQuery solrQuery) + throws SearchServiceException; } diff --git a/dspace-api/src/main/java/org/dspace/discovery/SolrServiceWorkspaceWorkflowRestrictionPlugin.java b/dspace-api/src/main/java/org/dspace/discovery/SolrServiceWorkspaceWorkflowRestrictionPlugin.java new file mode 100644 index 0000000000..fd05be1cb5 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/discovery/SolrServiceWorkspaceWorkflowRestrictionPlugin.java @@ -0,0 +1,101 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.discovery; + +import java.sql.SQLException; +import java.util.Set; + +import org.apache.commons.lang3.StringUtils; +import org.apache.solr.client.solrj.SolrQuery; +import org.dspace.authorize.service.AuthorizeService; +import org.dspace.core.Context; +import org.dspace.eperson.EPerson; +import org.dspace.eperson.Group; +import org.dspace.eperson.service.GroupService; +import org.springframework.beans.factory.annotation.Autowired; + +/** + * Plugin to restrict or grant access to workspace and workflow items + * based on the discovery configuration used. + */ +public class SolrServiceWorkspaceWorkflowRestrictionPlugin implements SolrServiceSearchPlugin { + + /** + * The name of the discover configuration used to search for inprogress submission in the mydspace + */ + public static final String DISCOVER_WORKSPACE_CONFIGURATION_NAME = "workspace"; + + /** + * The name of the discover configuration used to search for workflow tasks in the mydspace + */ + public static final String DISCOVER_WORKFLOW_CONFIGURATION_NAME = "workflow"; + + /** + * The name of the discover configuration used by administrators to search for workflow tasks + */ + public static final String DISCOVER_WORKFLOW_ADMIN_CONFIGURATION_NAME = "workflowAdmin"; + + @Autowired(required = true) + protected GroupService groupService; + + @Autowired(required = true) + protected AuthorizeService authorizeService; + + @Override + public void additionalSearchParameters( + Context context, DiscoverQuery discoveryQuery, SolrQuery solrQuery + ) throws SearchServiceException { + boolean isWorkspace = StringUtils.startsWith( + discoveryQuery.getDiscoveryConfigurationName(), + DISCOVER_WORKSPACE_CONFIGURATION_NAME + ); + boolean isWorkflow = StringUtils.startsWith( + discoveryQuery.getDiscoveryConfigurationName(), + DISCOVER_WORKFLOW_CONFIGURATION_NAME + ); + boolean isWorkflowAdmin = isAdmin(context) + && DISCOVER_WORKFLOW_ADMIN_CONFIGURATION_NAME.equals(discoveryQuery.getDiscoveryConfigurationName()); + EPerson currentUser = context.getCurrentUser(); + + // extra security check to avoid the possibility that an anonymous user + // get access to workspace or workflow + if (currentUser == null && (isWorkflow || isWorkspace)) { + throw new IllegalStateException( + "An anonymous user cannot perform a workspace or workflow search"); + } + if (isWorkspace) { + // insert filter by submitter + solrQuery.addFilterQuery("submitter_authority:(" + currentUser.getID() + ")"); + } else if (isWorkflow && !isWorkflowAdmin) { + // Retrieve all the groups the current user is a member of ! + Set groups; + try { + groups = groupService.allMemberGroupsSet(context, currentUser); + } catch (SQLException e) { + throw new SearchServiceException(e.getMessage(), e); + } + + // insert filter by controllers + StringBuilder controllerQuery = new StringBuilder(); + controllerQuery.append("taskfor:(e").append(currentUser.getID()); + for (Group group : groups) { + controllerQuery.append(" OR g").append(group.getID()); + } + controllerQuery.append(")"); + solrQuery.addFilterQuery(controllerQuery.toString()); + } + } + + private boolean isAdmin(Context context) throws SearchServiceException { + try { + return authorizeService.isAdmin(context); + } catch (SQLException e) { + throw new SearchServiceException(e.getMessage(), e); + } + } +} diff --git a/dspace-api/src/main/java/org/dspace/discovery/indexobject/AbstractIndexableObject.java b/dspace-api/src/main/java/org/dspace/discovery/indexobject/AbstractIndexableObject.java new file mode 100644 index 0000000000..90aafcbd30 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/discovery/indexobject/AbstractIndexableObject.java @@ -0,0 +1,43 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.discovery.indexobject; + +import java.io.Serializable; + +import org.dspace.core.ReloadableEntity; +import org.dspace.discovery.IndexableObject; + +/** + * This class exists in order to provide a default implementation for the equals and hashCode methods. + * Since IndexableObjects can be made multiple times for the same underlying object, we needed a more finetuned + * equals and hashcode methods. We're simply checking that the underlying objects are equal and generating the hashcode + * for the underlying object. This way, we'll always get a proper result when calling equals or hashcode on an + * IndexableObject because it'll depend on the underlying object + * @param Refers to the underlying entity that is linked to this object + * @param The type of ID that this entity uses + */ +public abstract class AbstractIndexableObject, PK extends Serializable> + implements IndexableObject { + + @Override + public boolean equals(Object obj) { + //Two IndexableObjects of the same DSpaceObject are considered equal + if (!(obj instanceof AbstractIndexableObject)) { + return false; + } + IndexableDSpaceObject other = (IndexableDSpaceObject) obj; + return other.getIndexedObject().equals(getIndexedObject()); + } + + @Override + public int hashCode() { + //Two IndexableObjects of the same DSpaceObject are considered equal + return getIndexedObject().hashCode(); + } + +} diff --git a/dspace-api/src/main/java/org/dspace/discovery/indexobject/IndexFactoryImpl.java b/dspace-api/src/main/java/org/dspace/discovery/indexobject/IndexFactoryImpl.java index ca1423e593..2e4eb67723 100644 --- a/dspace-api/src/main/java/org/dspace/discovery/indexobject/IndexFactoryImpl.java +++ b/dspace-api/src/main/java/org/dspace/discovery/indexobject/IndexFactoryImpl.java @@ -12,6 +12,7 @@ import java.sql.SQLException; import java.util.Date; import java.util.List; +import org.apache.commons.collections4.ListUtils; import org.apache.commons.lang3.StringUtils; import org.apache.solr.client.solrj.SolrClient; import org.apache.solr.client.solrj.SolrServerException; @@ -56,7 +57,7 @@ public abstract class IndexFactoryImpl implements doc.addField(SearchUtils.RESOURCE_ID_FIELD, indexableObject.getID().toString()); //Do any additional indexing, depends on the plugins - for (SolrServiceIndexPlugin solrServiceIndexPlugin : solrServiceIndexPlugins) { + for (SolrServiceIndexPlugin solrServiceIndexPlugin : ListUtils.emptyIfNull(solrServiceIndexPlugins)) { solrServiceIndexPlugin.additionalIndex(context, indexableObject, doc); } @@ -190,4 +191,4 @@ public abstract class IndexFactoryImpl implements public void deleteAll() throws IOException, SolrServerException { solrSearchCore.getSolr().deleteByQuery(SearchUtils.RESOURCE_TYPE_FIELD + ":" + getType()); } -} \ No newline at end of file +} diff --git a/dspace-api/src/main/java/org/dspace/discovery/indexobject/IndexableClaimedTask.java b/dspace-api/src/main/java/org/dspace/discovery/indexobject/IndexableClaimedTask.java index 3810b6803f..b96899b618 100644 --- a/dspace-api/src/main/java/org/dspace/discovery/indexobject/IndexableClaimedTask.java +++ b/dspace-api/src/main/java/org/dspace/discovery/indexobject/IndexableClaimedTask.java @@ -7,7 +7,6 @@ */ package org.dspace.discovery.indexobject; -import org.dspace.discovery.IndexableObject; import org.dspace.xmlworkflow.storedcomponents.ClaimedTask; /** @@ -15,7 +14,7 @@ import org.dspace.xmlworkflow.storedcomponents.ClaimedTask; * * @author Kevin Van de Velde (kevin at atmire dot com) */ -public class IndexableClaimedTask implements IndexableObject { +public class IndexableClaimedTask extends AbstractIndexableObject { private ClaimedTask claimedTask; public static final String TYPE = ClaimedTask.class.getSimpleName(); diff --git a/dspace-api/src/main/java/org/dspace/discovery/indexobject/IndexableDSpaceObject.java b/dspace-api/src/main/java/org/dspace/discovery/indexobject/IndexableDSpaceObject.java index 7ad82b1a95..7abc11eb7f 100644 --- a/dspace-api/src/main/java/org/dspace/discovery/indexobject/IndexableDSpaceObject.java +++ b/dspace-api/src/main/java/org/dspace/discovery/indexobject/IndexableDSpaceObject.java @@ -10,7 +10,6 @@ package org.dspace.discovery.indexobject; import java.util.UUID; import org.dspace.content.DSpaceObject; -import org.dspace.discovery.IndexableObject; /** * DSpaceObject implementation for the IndexableObject, contains methods used by all DSpaceObject methods @@ -18,7 +17,7 @@ import org.dspace.discovery.IndexableObject; * * @author Kevin Van de Velde (kevin at atmire dot com) */ -public abstract class IndexableDSpaceObject implements IndexableObject { +public abstract class IndexableDSpaceObject extends AbstractIndexableObject { private T dso; @@ -40,4 +39,6 @@ public abstract class IndexableDSpaceObject implements I public UUID getID() { return dso.getID(); } -} \ No newline at end of file + + +} diff --git a/dspace-api/src/main/java/org/dspace/discovery/indexobject/IndexableInProgressSubmission.java b/dspace-api/src/main/java/org/dspace/discovery/indexobject/IndexableInProgressSubmission.java index cfa27ff814..d6dd785801 100644 --- a/dspace-api/src/main/java/org/dspace/discovery/indexobject/IndexableInProgressSubmission.java +++ b/dspace-api/src/main/java/org/dspace/discovery/indexobject/IndexableInProgressSubmission.java @@ -8,14 +8,13 @@ package org.dspace.discovery.indexobject; import org.dspace.content.InProgressSubmission; -import org.dspace.discovery.IndexableObject; /** * InProgressSubmission implementation for the IndexableObject * @author Kevin Van de Velde (kevin at atmire dot com) */ public abstract class IndexableInProgressSubmission - implements IndexableObject { + extends AbstractIndexableObject { protected T inProgressSubmission; diff --git a/dspace-api/src/main/java/org/dspace/discovery/indexobject/IndexableMetadataField.java b/dspace-api/src/main/java/org/dspace/discovery/indexobject/IndexableMetadataField.java new file mode 100644 index 0000000000..70e63d19ba --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/discovery/indexobject/IndexableMetadataField.java @@ -0,0 +1,51 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.discovery.indexobject; + +import org.dspace.content.MetadataField; +import org.dspace.discovery.IndexableObject; + +/** + * {@link MetadataField} implementation for the {@link IndexableObject} + * + * @author Maria Verdonck (Atmire) on 14/07/2020 + */ +public class IndexableMetadataField extends AbstractIndexableObject { + + private MetadataField metadataField; + public static final String TYPE = MetadataField.class.getSimpleName(); + + public IndexableMetadataField(MetadataField metadataField) { + this.metadataField = metadataField; + } + + @Override + public String getType() { + return TYPE; + } + + @Override + public Integer getID() { + return this.metadataField.getID(); + } + + @Override + public MetadataField getIndexedObject() { + return this.metadataField; + } + + @Override + public void setIndexedObject(MetadataField metadataField) { + this.metadataField = metadataField; + } + + @Override + public String getTypeText() { + return TYPE.toUpperCase(); + } +} diff --git a/dspace-api/src/main/java/org/dspace/discovery/indexobject/IndexablePoolTask.java b/dspace-api/src/main/java/org/dspace/discovery/indexobject/IndexablePoolTask.java index 6eea1f0ebb..39fdb8b8b5 100644 --- a/dspace-api/src/main/java/org/dspace/discovery/indexobject/IndexablePoolTask.java +++ b/dspace-api/src/main/java/org/dspace/discovery/indexobject/IndexablePoolTask.java @@ -7,14 +7,13 @@ */ package org.dspace.discovery.indexobject; -import org.dspace.discovery.IndexableObject; import org.dspace.xmlworkflow.storedcomponents.PoolTask; /** * PoolTask implementation for the IndexableObject * @author Kevin Van de Velde (kevin at atmire dot com) */ -public class IndexablePoolTask implements IndexableObject { +public class IndexablePoolTask extends AbstractIndexableObject { public static final String TYPE = PoolTask.class.getSimpleName(); diff --git a/dspace-api/src/main/java/org/dspace/discovery/indexobject/ItemIndexFactoryImpl.java b/dspace-api/src/main/java/org/dspace/discovery/indexobject/ItemIndexFactoryImpl.java index 7f98131566..2a1008aaf9 100644 --- a/dspace-api/src/main/java/org/dspace/discovery/indexobject/ItemIndexFactoryImpl.java +++ b/dspace-api/src/main/java/org/dspace/discovery/indexobject/ItemIndexFactoryImpl.java @@ -173,6 +173,8 @@ public class ItemIndexFactoryImpl extends DSpaceObjectIndexFactoryImpl discoveryConfigurations) throws SQLException, IOException { + // use the item service to retrieve the owning collection also for inprogress submission + Collection collection = (Collection) itemService.getParentObject(context, item); //Keep a list of our sort values which we added, sort values can only be added once List sortFieldsAdded = new ArrayList<>(); Map> searchFilters = null; @@ -359,7 +361,7 @@ public class ItemIndexFactoryImpl extends DSpaceObjectIndexFactoryImpl + implements MetadataFieldIndexFactory { + + public static final String SCHEMA_FIELD_NAME = "schema"; + public static final String ELEMENT_FIELD_NAME = "element"; + public static final String QUALIFIER_FIELD_NAME = "qualifier"; + public static final String FIELD_NAME_VARIATIONS = "fieldName"; + + protected GroupService groupService = EPersonServiceFactory.getInstance().getGroupService(); + + @Override + public SolrInputDocument buildDocument(Context context, IndexableMetadataField indexableObject) throws SQLException, + IOException { + // Add the ID's, types and call the SolrServiceIndexPlugins + final SolrInputDocument doc = super.buildDocument(context, indexableObject); + final MetadataField metadataField = indexableObject.getIndexedObject(); + // add schema, element, qualifier and full fieldName + addFacetIndex(doc, SCHEMA_FIELD_NAME, metadataField.getMetadataSchema().getName(), + metadataField.getMetadataSchema().getName()); + addFacetIndex(doc, ELEMENT_FIELD_NAME, metadataField.getElement(), metadataField.getElement()); + String fieldName = metadataField.toString().replace('_', '.'); + addFacetIndex(doc, FIELD_NAME_VARIATIONS, fieldName, fieldName); + if (StringUtils.isNotBlank(metadataField.getQualifier())) { + addFacetIndex(doc, QUALIFIER_FIELD_NAME, metadataField.getQualifier(), metadataField.getQualifier()); + addFacetIndex(doc, FIELD_NAME_VARIATIONS, fieldName, + metadataField.getElement() + "." + metadataField.getQualifier()); + addFacetIndex(doc, FIELD_NAME_VARIATIONS, metadataField.getQualifier(), metadataField.getQualifier()); + } else { + addFacetIndex(doc, FIELD_NAME_VARIATIONS, metadataField.getElement(), metadataField.getElement()); + } + addNamedResourceTypeIndex(doc, indexableObject.getTypeText()); + Group anonymousGroup = groupService.findByName(context, Group.ANONYMOUS); + // add read permission on doc for anonymous group + doc.addField("read", "g" + anonymousGroup.getID()); + return doc; + } + + @Autowired + private MetadataFieldService metadataFieldService; + + @Override + public Iterator findAll(Context context) throws SQLException { + final Iterator metadataFields = metadataFieldService.findAll(context).iterator(); + return new Iterator<>() { + @Override + public boolean hasNext() { + return metadataFields.hasNext(); + } + + @Override + public IndexableMetadataField next() { + return new IndexableMetadataField(metadataFields.next()); + } + }; + } + + @Override + public String getType() { + return IndexableMetadataField.TYPE; + } + + @Override + public Optional findIndexableObject(Context context, String id) throws SQLException { + final MetadataField metadataField = metadataFieldService.find(context, Integer.parseInt(id)); + return metadataField == null ? Optional.empty() : Optional.of(new IndexableMetadataField(metadataField)); + } + + @Override + public boolean supports(Object object) { + return object instanceof MetadataField; + } + + @Override + public List getIndexableObjects(Context context, MetadataField object) { + return Arrays.asList(new IndexableMetadataField(object)); + } +} diff --git a/dspace-api/src/main/java/org/dspace/discovery/indexobject/factory/MetadataFieldIndexFactory.java b/dspace-api/src/main/java/org/dspace/discovery/indexobject/factory/MetadataFieldIndexFactory.java new file mode 100644 index 0000000000..976cc4511c --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/discovery/indexobject/factory/MetadataFieldIndexFactory.java @@ -0,0 +1,19 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.discovery.indexobject.factory; + +import org.dspace.content.MetadataField; +import org.dspace.discovery.indexobject.IndexableMetadataField; + +/** + * Factory interface for indexing/retrieving {@link org.dspace.content.MetadataField} items in the search core + * + * @author Maria Verdonck (Atmire) on 14/07/2020 + */ +public interface MetadataFieldIndexFactory extends IndexFactory { +} diff --git a/dspace-api/src/main/java/org/dspace/disseminate/service/CitationDocumentService.java b/dspace-api/src/main/java/org/dspace/disseminate/service/CitationDocumentService.java index d6c7935a86..4a59de3f5f 100644 --- a/dspace-api/src/main/java/org/dspace/disseminate/service/CitationDocumentService.java +++ b/dspace-api/src/main/java/org/dspace/disseminate/service/CitationDocumentService.java @@ -38,7 +38,7 @@ public interface CitationDocumentService { * Citation enabled globally (all citable bitstreams will get "watermarked") modules/disseminate-citation: * enable_globally * OR - * The container is this object is whitelist enabled. + * The container is this object is "allow list" enabled. * - community: modules/disseminate-citation: enabled_communities * - collection: modules/disseminate-citation: enabled_collections * AND diff --git a/dspace-api/src/main/java/org/dspace/eperson/AccountServiceImpl.java b/dspace-api/src/main/java/org/dspace/eperson/AccountServiceImpl.java index e00a9568e3..40da31a0f9 100644 --- a/dspace-api/src/main/java/org/dspace/eperson/AccountServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/eperson/AccountServiceImpl.java @@ -12,6 +12,7 @@ import java.sql.SQLException; import java.util.Locale; import javax.mail.MessagingException; +import org.apache.commons.lang3.StringUtils; import org.apache.logging.log4j.Logger; import org.dspace.authorize.AuthorizeException; import org.dspace.core.ConfigurationManager; @@ -22,6 +23,7 @@ import org.dspace.core.Utils; import org.dspace.eperson.service.AccountService; import org.dspace.eperson.service.EPersonService; import org.dspace.eperson.service.RegistrationDataService; +import org.dspace.services.ConfigurationService; import org.springframework.beans.factory.annotation.Autowired; /** @@ -47,6 +49,8 @@ public class AccountServiceImpl implements AccountService { protected EPersonService ePersonService; @Autowired(required = true) protected RegistrationDataService registrationDataService; + @Autowired + private ConfigurationService configurationService; protected AccountServiceImpl() { @@ -67,6 +71,9 @@ public class AccountServiceImpl implements AccountService { public void sendRegistrationInfo(Context context, String email) throws SQLException, IOException, MessagingException, AuthorizeException { + if (!configurationService.getBooleanProperty("user.registration", true)) { + throw new IllegalStateException("The user.registration parameter was set to false"); + } sendInfo(context, email, true, true); } @@ -155,6 +162,14 @@ public class AccountServiceImpl implements AccountService { registrationDataService.deleteByToken(context, token); } + @Override + public boolean verifyPasswordStructure(String password) { + if (StringUtils.length(password) < 6) { + return false; + } + return true; + } + /** * THIS IS AN INTERNAL METHOD. THE SEND PARAMETER ALLOWS IT TO BE USED FOR * TESTING PURPOSES. @@ -233,8 +248,8 @@ public class AccountServiceImpl implements AccountService { // Note change from "key=" to "token=" String specialLink = new StringBuffer().append(base).append( base.endsWith("/") ? "" : "/").append( - isRegister ? "register" : "forgot").append("?") - .append("token=").append(rd.getToken()) + isRegister ? "register" : "forgot").append("/") + .append(rd.getToken()) .toString(); Locale locale = context.getCurrentLocale(); Email bean = Email.getEmail(I18nUtil.getEmailFilename(locale, isRegister ? "register" diff --git a/dspace-api/src/main/java/org/dspace/eperson/EPerson.java b/dspace-api/src/main/java/org/dspace/eperson/EPerson.java index fc2950ee2b..3c48a5244a 100644 --- a/dspace-api/src/main/java/org/dspace/eperson/EPerson.java +++ b/dspace-api/src/main/java/org/dspace/eperson/EPerson.java @@ -141,7 +141,7 @@ public class EPerson extends DSpaceObject implements DSpaceObjectLegacySupport { return false; } final EPerson other = (EPerson) obj; - if (this.getID() != other.getID()) { + if (!this.getID().equals(other.getID())) { return false; } if (!StringUtils.equals(this.getEmail(), other.getEmail())) { diff --git a/dspace-api/src/main/java/org/dspace/eperson/EPersonCLITool.java b/dspace-api/src/main/java/org/dspace/eperson/EPersonCLITool.java index 850cb992bc..547044d460 100644 --- a/dspace-api/src/main/java/org/dspace/eperson/EPersonCLITool.java +++ b/dspace-api/src/main/java/org/dspace/eperson/EPersonCLITool.java @@ -7,8 +7,11 @@ */ package org.dspace.eperson; +import java.io.BufferedReader; import java.io.IOException; +import java.io.InputStreamReader; import java.sql.SQLException; +import java.util.List; import java.util.Locale; import org.apache.commons.cli.CommandLine; @@ -196,7 +199,6 @@ public class EPersonCLITool { try { ePersonService.update(context, eperson); - context.complete(); System.out.printf("Created EPerson %s\n", eperson.getID().toString()); } catch (SQLException ex) { context.abort(); @@ -259,16 +261,26 @@ public class EPersonCLITool { } try { - ePersonService.delete(context, eperson); - context.complete(); - System.out.printf("Deleted EPerson %s\n", eperson.getID().toString()); - } catch (SQLException ex) { - System.err.println(ex.getMessage()); - return 1; - } catch (AuthorizeException ex) { - System.err.println(ex.getMessage()); - return 1; - } catch (IOException ex) { + List tableList = ePersonService.getDeleteConstraints(context, eperson); + if (!tableList.isEmpty()) { + System.out.printf("The EPerson with ID: %s is referenced by the following database tables:%n", + eperson.getID().toString()); + tableList.forEach((s) -> { + System.out.println(s); + }); + } + System.out.printf("Are you sure you want to delete this EPerson with ID: %s? (y or n): ", + eperson.getID().toString()); + BufferedReader input = new BufferedReader(new InputStreamReader(System.in)); + System.out.flush(); + String s = input.readLine(); + if (s != null && s.trim().toLowerCase().startsWith("y")) { + ePersonService.delete(context, eperson); + System.out.printf("%nDeleted EPerson with ID: %s", eperson.getID().toString()); + } else { + System.out.printf("%nAbort Deletion of EPerson with ID: %s %n", eperson.getID().toString()); + } + } catch (SQLException | AuthorizeException | IOException ex) { System.err.println(ex.getMessage()); return 1; } @@ -373,7 +385,6 @@ public class EPersonCLITool { if (modified) { try { ePersonService.update(context, eperson); - context.complete(); System.out.printf("Modified EPerson %s\n", eperson.getID().toString()); } catch (SQLException ex) { context.abort(); diff --git a/dspace-api/src/main/java/org/dspace/eperson/EPersonDeletionException.java b/dspace-api/src/main/java/org/dspace/eperson/EPersonDeletionException.java index 5429f3d102..b86d5f5e8e 100644 --- a/dspace-api/src/main/java/org/dspace/eperson/EPersonDeletionException.java +++ b/dspace-api/src/main/java/org/dspace/eperson/EPersonDeletionException.java @@ -9,6 +9,8 @@ package org.dspace.eperson; import java.util.List; +import org.apache.commons.lang3.ArrayUtils; + /** * Exception indicating that an EPerson may not be deleted due to the presence * of the EPerson's ID in certain tables @@ -33,7 +35,10 @@ public class EPersonDeletionException extends Exception { * deleted if it exists in these tables. */ public EPersonDeletionException(List tableList) { - super(); + // this may not be the most beautiful way to print the tablenames as part or the error message. + // but it has to be a one liner, as the super() call must be the first statement in the constructor. + super("Cannot delete EPerson as it is referenced by the following database tables: " + + ArrayUtils.toString(tableList.toArray())); myTableList = tableList; } diff --git a/dspace-api/src/main/java/org/dspace/eperson/EPersonServiceImpl.java b/dspace-api/src/main/java/org/dspace/eperson/EPersonServiceImpl.java index f173250cf3..ab9f7831c7 100644 --- a/dspace-api/src/main/java/org/dspace/eperson/EPersonServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/eperson/EPersonServiceImpl.java @@ -7,10 +7,13 @@ */ package org.dspace.eperson; +import java.io.IOException; import java.sql.SQLException; import java.util.ArrayList; import java.util.Arrays; +import java.util.Collections; import java.util.Date; +import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Set; @@ -21,26 +24,56 @@ import org.apache.commons.collections4.CollectionUtils; import org.apache.commons.lang3.StringUtils; import org.apache.logging.log4j.Logger; import org.dspace.authorize.AuthorizeException; +import org.dspace.authorize.factory.AuthorizeServiceFactory; import org.dspace.authorize.service.AuthorizeService; +import org.dspace.authorize.service.ResourcePolicyService; import org.dspace.content.DSpaceObjectServiceImpl; import org.dspace.content.Item; import org.dspace.content.MetadataField; +import org.dspace.content.WorkspaceItem; +import org.dspace.content.factory.ContentServiceFactory; import org.dspace.content.service.ItemService; +import org.dspace.content.service.WorkspaceItemService; import org.dspace.core.Constants; import org.dspace.core.Context; import org.dspace.core.LogManager; import org.dspace.core.Utils; import org.dspace.eperson.dao.EPersonDAO; import org.dspace.eperson.service.EPersonService; +import org.dspace.eperson.service.GroupService; import org.dspace.eperson.service.SubscribeService; import org.dspace.event.Event; +import org.dspace.versioning.Version; +import org.dspace.versioning.VersionHistory; +import org.dspace.versioning.dao.VersionDAO; +import org.dspace.versioning.factory.VersionServiceFactory; +import org.dspace.versioning.service.VersionHistoryService; +import org.dspace.versioning.service.VersioningService; import org.dspace.workflow.WorkflowService; import org.dspace.workflow.factory.WorkflowServiceFactory; +import org.dspace.workflowbasic.BasicWorkflowItem; +import org.dspace.workflowbasic.BasicWorkflowServiceImpl; +import org.dspace.workflowbasic.factory.BasicWorkflowServiceFactory; +import org.dspace.workflowbasic.service.BasicWorkflowItemService; +import org.dspace.workflowbasic.service.BasicWorkflowService; +import org.dspace.workflowbasic.service.TaskListItemService; +import org.dspace.xmlworkflow.WorkflowConfigurationException; +import org.dspace.xmlworkflow.factory.XmlWorkflowServiceFactory; +import org.dspace.xmlworkflow.service.WorkflowRequirementsService; +import org.dspace.xmlworkflow.service.XmlWorkflowService; +import org.dspace.xmlworkflow.storedcomponents.ClaimedTask; +import org.dspace.xmlworkflow.storedcomponents.CollectionRole; +import org.dspace.xmlworkflow.storedcomponents.XmlWorkflowItem; +import org.dspace.xmlworkflow.storedcomponents.service.ClaimedTaskService; +import org.dspace.xmlworkflow.storedcomponents.service.CollectionRoleService; +import org.dspace.xmlworkflow.storedcomponents.service.PoolTaskService; +import org.dspace.xmlworkflow.storedcomponents.service.WorkflowItemRoleService; +import org.dspace.xmlworkflow.storedcomponents.service.XmlWorkflowItemService; import org.springframework.beans.factory.annotation.Autowired; /** - * Service implementation for the EPerson object. - * This class is responsible for all business logic calls for the EPerson object and is autowired by spring. + * Service implementation for the EPerson object. This class is responsible for + * all business logic calls for the EPerson object and is autowired by spring. * This class should never be accessed directly. * * @author kevinvandevelde at atmire.com @@ -60,7 +93,17 @@ public class EPersonServiceImpl extends DSpaceObjectServiceImpl impleme @Autowired(required = true) protected ItemService itemService; @Autowired(required = true) + protected WorkflowItemRoleService workflowItemRoleService; + @Autowired(required = true) + CollectionRoleService collectionRoleService; + @Autowired(required = true) + protected GroupService groupService; + @Autowired(required = true) protected SubscribeService subscribeService; + @Autowired(required = true) + protected VersionDAO versionDAO; + @Autowired(required = true) + protected ClaimedTaskService claimedTaskService; protected EPersonServiceImpl() { super(); @@ -129,7 +172,7 @@ public class EPersonServiceImpl extends DSpaceObjectServiceImpl impleme query = null; } return ePersonDAO.search(context, query, Arrays.asList(firstNameField, lastNameField), - Arrays.asList(firstNameField, lastNameField), offset, limit); + Arrays.asList(firstNameField, lastNameField), offset, limit); } } @@ -179,45 +222,202 @@ public class EPersonServiceImpl extends DSpaceObjectServiceImpl impleme // authorized? if (!authorizeService.isAdmin(context)) { throw new AuthorizeException( - "You must be an admin to create an EPerson"); + "You must be an admin to create an EPerson"); } // Create a table row EPerson e = ePersonDAO.create(context, new EPerson()); log.info(LogManager.getHeader(context, "create_eperson", "eperson_id=" - + e.getID())); + + e.getID())); context.addEvent(new Event(Event.CREATE, Constants.EPERSON, e.getID(), - null, getIdentifiers(context, e))); + null, getIdentifiers(context, e))); return e; } @Override public void delete(Context context, EPerson ePerson) throws SQLException, AuthorizeException { + try { + delete(context, ePerson, true); + } catch (AuthorizeException ex) { + log.error("This AuthorizeException: " + ex + " occured while deleting Eperson with the ID: " + + ePerson.getID()); + throw new AuthorizeException(ex); + } catch (IOException ex) { + log.error("This IOException: " + ex + " occured while deleting Eperson with the ID: " + ePerson.getID()); + throw new AuthorizeException(ex); + } catch (EPersonDeletionException e) { + throw new IllegalStateException(e); + } + } + + /** + * Deletes an EPerson. The argument cascade defines whether all references + * on an EPerson should be deleted as well (by either deleting the + * referencing object - e.g. WorkspaceItem, ResourcePolicy - or by setting + * the foreign key null - e.g. archived Items). If cascade is set to false + * and the EPerson is referenced somewhere, this leads to an + * AuthorizeException. EPersons may be referenced by Items, ResourcePolicies + * and workflow tasks. + * + * @param context DSpace context + * @param ePerson The EPerson to delete. + * @param cascade Whether to delete references on the EPerson (cascade = + * true) or to abort the deletion (cascade = false) if the EPerson is + * referenced within DSpace. + * + * @throws SQLException + * @throws AuthorizeException + * @throws IOException + */ + public void delete(Context context, EPerson ePerson, boolean cascade) + throws SQLException, AuthorizeException, IOException, EPersonDeletionException { // authorized? if (!authorizeService.isAdmin(context)) { throw new AuthorizeException( - "You must be an admin to delete an EPerson"); + "You must be an admin to delete an EPerson"); + } + Set workFlowGroups = getAllWorkFlowGroups(context, ePerson); + for (Group group: workFlowGroups) { + List ePeople = groupService.allMembers(context, group); + if (ePeople.size() == 1 && ePeople.contains(ePerson)) { + throw new IllegalStateException( + "Refused to delete user " + ePerson.getID() + " because it the only member of the workflow group" + + group.getID() + ". Delete the tasks and group first if you want to remove this user."); + } } - // check for presence of eperson in tables that // have constraints on eperson_id List constraintList = getDeleteConstraints(context, ePerson); - - // if eperson exists in tables that have constraints - // on eperson, throw an exception if (constraintList.size() > 0) { - throw new AuthorizeException(new EPersonDeletionException(constraintList)); - } + // Check if the constraints we found should be deleted + if (cascade) { + boolean isBasicFramework = WorkflowServiceFactory.getInstance().getWorkflowService() + instanceof BasicWorkflowService; + boolean isXmlFramework = WorkflowServiceFactory.getInstance().getWorkflowService() + instanceof XmlWorkflowService; + Iterator constraintsIterator = constraintList.iterator(); + while (constraintsIterator.hasNext()) { + String tableName = constraintsIterator.next(); + if (StringUtils.equals(tableName, "item") || StringUtils.equals(tableName, "workspaceitem")) { + Iterator itemIterator = itemService.findBySubmitter(context, ePerson, true); + + VersionHistoryService versionHistoryService = VersionServiceFactory.getInstance() + .getVersionHistoryService(); + VersioningService versioningService = VersionServiceFactory.getInstance().getVersionService(); + + while (itemIterator.hasNext()) { + Item item = itemIterator.next(); + + VersionHistory versionHistory = versionHistoryService.findByItem(context, item); + if (null != versionHistory) { + for (Version version : versioningService.getVersionsByHistory(context, + versionHistory)) { + version.setePerson(null); + versionDAO.save(context, version); + } + } + WorkspaceItemService workspaceItemService = ContentServiceFactory.getInstance() + .getWorkspaceItemService(); + WorkspaceItem wsi = workspaceItemService.findByItem(context, item); + + if (null != wsi) { + workspaceItemService.deleteAll(context, wsi); + } else { + // we can do that as dc.provenance still contains + // information about who submitted and who + // archived an item. + item.setSubmitter(null); + itemService.update(context, item); + } + } + } else if (StringUtils.equals(tableName, "cwf_claimtask") && isXmlFramework) { + // Unclaim all XmlWorkflow tasks + XmlWorkflowItemService xmlWorkflowItemService = XmlWorkflowServiceFactory + .getInstance().getXmlWorkflowItemService(); + ClaimedTaskService claimedTaskService = XmlWorkflowServiceFactory + .getInstance().getClaimedTaskService(); + XmlWorkflowService xmlWorkflowService = XmlWorkflowServiceFactory + .getInstance().getXmlWorkflowService(); + WorkflowRequirementsService workflowRequirementsService = XmlWorkflowServiceFactory + .getInstance().getWorkflowRequirementsService(); + + List xmlWorkflowItems = xmlWorkflowItemService + .findBySubmitter(context, ePerson); + List claimedTasks = claimedTaskService.findByEperson(context, ePerson); + + for (ClaimedTask task : claimedTasks) { + xmlWorkflowService.deleteClaimedTask(context, task.getWorkflowItem(), task); + + try { + workflowRequirementsService.removeClaimedUser(context, task.getWorkflowItem(), + ePerson, task.getStepID()); + } catch (WorkflowConfigurationException ex) { + log.error("This WorkflowConfigurationException: " + ex + + " occured while deleting Eperson with the ID: " + ePerson.getID()); + throw new AuthorizeException(new EPersonDeletionException(Collections + .singletonList(tableName))); + } + } + } else if (StringUtils.equals(tableName, "workflowitem") && isBasicFramework) { + // Remove basicWorkflow workflowitem and unclaim them + BasicWorkflowItemService basicWorkflowItemService = BasicWorkflowServiceFactory.getInstance() + .getBasicWorkflowItemService(); + BasicWorkflowService basicWorkflowService = BasicWorkflowServiceFactory.getInstance() + .getBasicWorkflowService(); + TaskListItemService taskListItemService = BasicWorkflowServiceFactory.getInstance() + .getTaskListItemService(); + List workflowItems = basicWorkflowItemService.findByOwner(context, ePerson); + for (BasicWorkflowItem workflowItem : workflowItems) { + int state = workflowItem.getState(); + // unclaim tasks that are in the pool. + if (state == BasicWorkflowServiceImpl.WFSTATE_STEP1 + || state == BasicWorkflowServiceImpl.WFSTATE_STEP2 + || state == BasicWorkflowServiceImpl.WFSTATE_STEP3) { + log.info(LogManager.getHeader(context, "unclaim_workflow", + "workflow_id=" + workflowItem.getID() + ", claiming EPerson is deleted")); + basicWorkflowService.unclaim(context, workflowItem, context.getCurrentUser()); + // remove the EPerson from the list of persons that can (re-)claim the task + // while we are doing it below, we must do this here as well as the previously + // unclaimed tasks was put back into pool and we do not know the order the tables + // are checked. + taskListItemService.deleteByWorkflowItemAndEPerson(context, workflowItem, ePerson); + } + } + } else if (StringUtils.equals(tableName, "resourcepolicy")) { + // we delete the EPerson, it won't need any rights anymore. + authorizeService.removeAllEPersonPolicies(context, ePerson); + } else if (StringUtils.equals(tableName, "tasklistitem") && isBasicFramework) { + // remove EPerson from the list of EPersons that may claim some specific workflow tasks. + TaskListItemService taskListItemService = BasicWorkflowServiceFactory.getInstance() + .getTaskListItemService(); + taskListItemService.deleteByEPerson(context, ePerson); + } else if (StringUtils.equals(tableName, "cwf_pooltask") && isXmlFramework) { + PoolTaskService poolTaskService = XmlWorkflowServiceFactory.getInstance().getPoolTaskService(); + poolTaskService.deleteByEperson(context, ePerson); + } else if (StringUtils.equals(tableName, "cwf_workflowitemrole") && isXmlFramework) { + WorkflowItemRoleService workflowItemRoleService = XmlWorkflowServiceFactory.getInstance() + .getWorkflowItemRoleService(); + workflowItemRoleService.deleteByEPerson(context, ePerson); + } else { + log.warn("EPerson is referenced in table '" + tableName + + "'. Deletion of EPerson " + ePerson.getID() + " may fail " + + "if the database does not handle this " + + "reference."); + } + } + } else { + throw new EPersonDeletionException(constraintList); + } + } context.addEvent(new Event(Event.DELETE, Constants.EPERSON, ePerson.getID(), ePerson.getEmail(), - getIdentifiers(context, ePerson))); + getIdentifiers(context, ePerson))); // XXX FIXME: This sidesteps the object model code so it won't // generate REMOVE events on the affected Groups. - // Remove any group memberships first // Remove any group memberships first Iterator groups = ePerson.getGroups().iterator(); @@ -234,7 +434,20 @@ public class EPersonServiceImpl extends DSpaceObjectServiceImpl impleme ePersonDAO.delete(context, ePerson); log.info(LogManager.getHeader(context, "delete_eperson", - "eperson_id=" + ePerson.getID())); + "eperson_id=" + ePerson.getID())); + } + + private Set getAllWorkFlowGroups(Context context, EPerson ePerson) throws SQLException { + Set workFlowGroups = new HashSet<>(); + + Set groups = groupService.allMemberGroupsSet(context, ePerson); + for (Group group: groups) { + List collectionRoles = collectionRoleService.findByGroup(context, group); + if (!collectionRoles.isEmpty()) { + workFlowGroups.add(group); + } + } + return workFlowGroups; } @Override @@ -268,8 +481,8 @@ public class EPersonServiceImpl extends DSpaceObjectServiceImpl impleme PasswordHash hash = null; try { hash = new PasswordHash(ePerson.getDigestAlgorithm(), - ePerson.getSalt(), - ePerson.getPassword()); + ePerson.getSalt(), + ePerson.getPassword()); } catch (DecoderException ex) { log.error("Problem decoding stored salt or hash: " + ex.getMessage()); } @@ -281,9 +494,9 @@ public class EPersonServiceImpl extends DSpaceObjectServiceImpl impleme PasswordHash myHash; try { myHash = new PasswordHash( - ePerson.getDigestAlgorithm(), - ePerson.getSalt(), - ePerson.getPassword()); + ePerson.getDigestAlgorithm(), + ePerson.getSalt(), + ePerson.getPassword()); } catch (DecoderException ex) { log.error(ex.getMessage()); return false; @@ -312,8 +525,8 @@ public class EPersonServiceImpl extends DSpaceObjectServiceImpl impleme // Check authorisation - if you're not the eperson // see if the authorization system says you can if (!context.ignoreAuthorization() - && ((context.getCurrentUser() == null) || (ePerson.getID() != context - .getCurrentUser().getID()))) { + && ((context.getCurrentUser() == null) || (ePerson.getID() != context + .getCurrentUser().getID()))) { authorizeService.authorizeAction(context, ePerson, Constants.WRITE); } @@ -322,11 +535,11 @@ public class EPersonServiceImpl extends DSpaceObjectServiceImpl impleme ePersonDAO.save(context, ePerson); log.info(LogManager.getHeader(context, "update_eperson", - "eperson_id=" + ePerson.getID())); + "eperson_id=" + ePerson.getID())); if (ePerson.isModified()) { context.addEvent(new Event(Event.MODIFY, Constants.EPERSON, - ePerson.getID(), null, getIdentifiers(context, ePerson))); + ePerson.getID(), null, getIdentifiers(context, ePerson))); ePerson.clearModified(); } if (ePerson.isMetadataModified()) { @@ -339,11 +552,22 @@ public class EPersonServiceImpl extends DSpaceObjectServiceImpl impleme List tableList = new ArrayList(); // check for eperson in item table - Iterator itemsBySubmitter = itemService.findBySubmitter(context, ePerson); + Iterator itemsBySubmitter = itemService.findBySubmitter(context, ePerson, true); if (itemsBySubmitter.hasNext()) { tableList.add("item"); } + WorkspaceItemService workspaceItemService = ContentServiceFactory.getInstance().getWorkspaceItemService(); + List workspaceBySubmitter = workspaceItemService.findByEPerson(context, ePerson); + if (workspaceBySubmitter.size() > 0) { + tableList.add("workspaceitem"); + } + + ResourcePolicyService resourcePolicyService = AuthorizeServiceFactory.getInstance().getResourcePolicyService(); + if (resourcePolicyService.find(context, ePerson).size() > 0) { + tableList.add("resourcepolicy"); + } + WorkflowService workflowService = WorkflowServiceFactory.getInstance().getWorkflowService(); List workflowConstraints = workflowService.getEPersonDeleteConstraints(context, ePerson); tableList.addAll(workflowConstraints); diff --git a/dspace-api/src/main/java/org/dspace/eperson/GroupServiceImpl.java b/dspace-api/src/main/java/org/dspace/eperson/GroupServiceImpl.java index 7c23216458..71fbcce7d3 100644 --- a/dspace-api/src/main/java/org/dspace/eperson/GroupServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/eperson/GroupServiceImpl.java @@ -23,7 +23,9 @@ import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.tuple.Pair; import org.dspace.authorize.AuthorizeConfiguration; import org.dspace.authorize.AuthorizeException; +import org.dspace.authorize.ResourcePolicy; import org.dspace.authorize.service.AuthorizeService; +import org.dspace.authorize.service.ResourcePolicyService; import org.dspace.content.Collection; import org.dspace.content.DSpaceObject; import org.dspace.content.DSpaceObjectServiceImpl; @@ -40,8 +42,15 @@ import org.dspace.eperson.service.EPersonService; import org.dspace.eperson.service.GroupService; import org.dspace.event.Event; import org.dspace.util.UUIDUtils; +import org.dspace.xmlworkflow.Role; +import org.dspace.xmlworkflow.factory.XmlWorkflowFactory; +import org.dspace.xmlworkflow.state.Step; +import org.dspace.xmlworkflow.storedcomponents.ClaimedTask; import org.dspace.xmlworkflow.storedcomponents.CollectionRole; +import org.dspace.xmlworkflow.storedcomponents.PoolTask; +import org.dspace.xmlworkflow.storedcomponents.service.ClaimedTaskService; import org.dspace.xmlworkflow.storedcomponents.service.CollectionRoleService; +import org.dspace.xmlworkflow.storedcomponents.service.PoolTaskService; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; @@ -76,6 +85,15 @@ public class GroupServiceImpl extends DSpaceObjectServiceImpl implements @Autowired(required = true) protected AuthorizeService authorizeService; + @Autowired(required = true) + protected ResourcePolicyService resourcePolicyService; + + @Autowired(required = true) + protected PoolTaskService poolTaskService; + @Autowired(required = true) + protected ClaimedTaskService claimedTaskService; + @Autowired(required = true) + protected XmlWorkflowFactory workflowFactory; protected GroupServiceImpl() { super(); @@ -139,8 +157,48 @@ public class GroupServiceImpl extends DSpaceObjectServiceImpl implements groupChild.getName(), getIdentifiers(context, groupParent))); } + /** + * Removes a member of a group. + * The removal will be refused if the group is linked to a workflow step which has claimed tasks or pool tasks + * and no other member is present in the group to handle these. + * @param context DSpace context object + * @param group DSpace group + * @param ePerson eperson + * @throws SQLException + */ @Override - public void removeMember(Context context, Group group, EPerson ePerson) { + public void removeMember(Context context, Group group, EPerson ePerson) throws SQLException { + List collectionRoles = collectionRoleService.findByGroup(context, group); + if (!collectionRoles.isEmpty()) { + List poolTasks = poolTaskService.findByGroup(context, group); + List claimedTasks = claimedTaskService.findByEperson(context, ePerson); + for (ClaimedTask claimedTask : claimedTasks) { + Step stepByName = workflowFactory.getStepByName(claimedTask.getStepID()); + Role role = stepByName.getRole(); + for (CollectionRole collectionRole : collectionRoles) { + if (StringUtils.equals(collectionRole.getRoleId(), role.getId()) + && claimedTask.getWorkflowItem().getCollection() == collectionRole.getCollection()) { + List ePeople = allMembers(context, group); + if (ePeople.size() == 1 && ePeople.contains(ePerson)) { + throw new IllegalStateException( + "Refused to remove user " + ePerson + .getID() + " from workflow group because the group " + group + .getID() + " has tasks assigned and no other members"); + } + + } + } + } + if (!poolTasks.isEmpty()) { + List ePeople = allMembers(context, group); + if (ePeople.size() == 1 && ePeople.contains(ePerson)) { + throw new IllegalStateException( + "Refused to remove user " + ePerson + .getID() + " from workflow group because the group " + group + .getID() + " has tasks assigned and no other members"); + } + } + } if (group.remove(ePerson)) { context.addEvent(new Event(Event.REMOVE, Constants.GROUP, group.getID(), Constants.EPERSON, ePerson.getID(), ePerson.getEmail(), getIdentifiers(context, group))); @@ -149,6 +207,20 @@ public class GroupServiceImpl extends DSpaceObjectServiceImpl implements @Override public void removeMember(Context context, Group groupParent, Group childGroup) throws SQLException { + List collectionRoles = collectionRoleService.findByGroup(context, groupParent); + if (!collectionRoles.isEmpty()) { + List poolTasks = poolTaskService.findByGroup(context, groupParent); + if (!poolTasks.isEmpty()) { + List parentPeople = allMembers(context, groupParent); + List childPeople = allMembers(context, childGroup); + if (childPeople.containsAll(parentPeople)) { + throw new IllegalStateException( + "Refused to remove sub group " + childGroup + .getID() + " from workflow group because the group " + groupParent + .getID() + " has tasks assigned and no other members"); + } + } + } if (groupParent.remove(childGroup)) { childGroup.removeParentGroup(groupParent); context.addEvent( @@ -185,7 +257,8 @@ public class GroupServiceImpl extends DSpaceObjectServiceImpl implements return false; // special, everyone is member of group 0 (anonymous) - } else if (StringUtils.equals(group.getName(), Group.ANONYMOUS)) { + } else if (StringUtils.equals(group.getName(), Group.ANONYMOUS) || + isParentOf(context, group, findByName(context, Group.ANONYMOUS))) { return true; } else { @@ -654,6 +727,23 @@ public class GroupServiceImpl extends DSpaceObjectServiceImpl implements return collectionService.getParentObject(context, collection); } } + } else { + if (AuthorizeConfiguration.canCollectionAdminManagePolicies() + || AuthorizeConfiguration.canCommunityAdminManagePolicies() + || AuthorizeConfiguration.canCommunityAdminManageCollectionWorkflows()) { + List groups = new ArrayList(); + groups.add(group); + List policies = resourcePolicyService.find(context, null, groups, + Constants.DEFAULT_ITEM_READ, Constants.COLLECTION); + if (policies.size() > 0) { + return policies.get(0).getdSpaceObject(); + } + policies = resourcePolicyService.find(context, null, groups, + Constants.DEFAULT_BITSTREAM_READ, Constants.COLLECTION); + if (policies.size() > 0) { + return policies.get(0).getdSpaceObject(); + } + } } } if (AuthorizeConfiguration.canCommunityAdminManageAdminGroup()) { diff --git a/dspace-api/src/main/java/org/dspace/eperson/service/AccountService.java b/dspace-api/src/main/java/org/dspace/eperson/service/AccountService.java index c8ecb0cc67..45fa6d26b1 100644 --- a/dspace-api/src/main/java/org/dspace/eperson/service/AccountService.java +++ b/dspace-api/src/main/java/org/dspace/eperson/service/AccountService.java @@ -46,4 +46,11 @@ public interface AccountService { public void deleteToken(Context context, String token) throws SQLException; + + /** + * This method verifies that a certain String adheres to the password rules for DSpace + * @param password The String to be checked + * @return A boolean indicating whether or not the given String adheres to the password rules + */ + public boolean verifyPasswordStructure(String password); } diff --git a/dspace-api/src/main/java/org/dspace/eperson/service/GroupService.java b/dspace-api/src/main/java/org/dspace/eperson/service/GroupService.java index f750419af1..b49ee857fb 100644 --- a/dspace-api/src/main/java/org/dspace/eperson/service/GroupService.java +++ b/dspace-api/src/main/java/org/dspace/eperson/service/GroupService.java @@ -76,7 +76,7 @@ public interface GroupService extends DSpaceObjectService, DSpaceObjectLe * @param group DSpace group * @param ePerson eperson */ - public void removeMember(Context context, Group group, EPerson ePerson); + public void removeMember(Context context, Group group, EPerson ePerson) throws SQLException; /** diff --git a/dspace-api/src/main/java/org/dspace/external/provider/impl/LiveImportDataProvider.java b/dspace-api/src/main/java/org/dspace/external/provider/impl/LiveImportDataProvider.java new file mode 100644 index 0000000000..45855a74ad --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/external/provider/impl/LiveImportDataProvider.java @@ -0,0 +1,162 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.external.provider.impl; + +import java.util.Collection; +import java.util.List; +import java.util.Optional; +import java.util.stream.Collectors; + +import org.apache.commons.lang3.StringUtils; +import org.dspace.content.dto.MetadataValueDTO; +import org.dspace.external.model.ExternalDataObject; +import org.dspace.external.provider.ExternalDataProvider; +import org.dspace.importer.external.datamodel.ImportRecord; +import org.dspace.importer.external.exception.MetadataSourceException; +import org.dspace.importer.external.metadatamapping.MetadatumDTO; +import org.dspace.importer.external.service.components.QuerySource; + +/** + * This class allows to configure a Live Import Provider as an External Data Provider + * + * @author Andrea Bollini (andrea.bollini at 4science.it) + * + */ +public class LiveImportDataProvider implements ExternalDataProvider { + /** + * The {@link QuerySource} live import provider + */ + private QuerySource querySource; + + /** + * An unique human readable identifier for this provider + */ + private String sourceIdentifier; + + private String recordIdMetadata; + + private String displayMetadata = "dc.title"; + + @Override + public String getSourceIdentifier() { + return sourceIdentifier; + } + + /** + * This method set the SourceIdentifier for the ExternalDataProvider + * @param sourceIdentifier The UNIQUE sourceIdentifier to be set on any LiveImport data provider + */ + public void setSourceIdentifier(String sourceIdentifier) { + this.sourceIdentifier = sourceIdentifier; + } + + /** + * This method set the MetadataSource for the ExternalDataProvider + * @param metadataSource {@link org.dspace.importer.external.service.components.MetadataSource} implementation used to process the input data + */ + public void setMetadataSource(QuerySource querySource) { + this.querySource = querySource; + } + + /** + * This method set dublin core identifier to use as metadata id + * @param recordIdMetadata dublin core identifier to use as metadata id + */ + public void setRecordIdMetadata(String recordIdMetadata) { + this.recordIdMetadata = recordIdMetadata; + } + + /** + * This method set the dublin core identifier to display the title + * @param displayMetadata metadata to use as title + */ + public void setDisplayMetadata(String displayMetadata) { + this.displayMetadata = displayMetadata; + } + + @Override + public Optional getExternalDataObject(String id) { + try { + ExternalDataObject externalDataObject = getExternalDataObject(querySource.getRecord(id)); + return Optional.of(externalDataObject); + } catch (MetadataSourceException e) { + throw new RuntimeException( + "The live import provider " + querySource.getImportSource() + " throws an exception", e); + } + } + + @Override + public List searchExternalDataObjects(String query, int start, int limit) { + Collection records; + try { + records = querySource.getRecords(query, start, limit); + return records.stream().map(r -> getExternalDataObject(r)).collect(Collectors.toList()); + } catch (MetadataSourceException e) { + throw new RuntimeException( + "The live import provider " + querySource.getImportSource() + " throws an exception", e); + } + } + + @Override + public boolean supports(String source) { + return StringUtils.equalsIgnoreCase(sourceIdentifier, source); + } + + @Override + public int getNumberOfResults(String query) { + try { + return querySource.getRecordsCount(query); + } catch (MetadataSourceException e) { + throw new RuntimeException( + "The live import provider " + querySource.getImportSource() + " throws an exception", e); + } + } + + /** + * Internal method to convert an ImportRecord to an ExternalDataObject + * + * FIXME it would be useful to remove ImportRecord at all in favor of the + * ExternalDataObject + * + * @param record + * @return + */ + private ExternalDataObject getExternalDataObject(ImportRecord record) { + //return 400 if no record were found + if (record == null) { + throw new IllegalArgumentException("No record found for query or id"); + } + ExternalDataObject externalDataObject = new ExternalDataObject(sourceIdentifier); + String id = getFirstValue(record, recordIdMetadata); + String display = getFirstValue(record, displayMetadata); + externalDataObject.setId(id); + externalDataObject.setDisplayValue(display); + externalDataObject.setValue(display); + for (MetadatumDTO dto : record.getValueList()) { + // FIXME it would be useful to remove MetadatumDTO in favor of MetadataValueDTO + MetadataValueDTO mvDTO = new MetadataValueDTO(); + mvDTO.setSchema(dto.getSchema()); + mvDTO.setElement(dto.getElement()); + mvDTO.setQualifier(dto.getQualifier()); + mvDTO.setValue(dto.getValue()); + externalDataObject.addMetadata(mvDTO); + } + return externalDataObject; + } + + private String getFirstValue(ImportRecord record, String metadata) { + String id = null; + String[] split = StringUtils.split(metadata, ".", 3); + Collection values = record.getValue(split[0], split[1], split.length == 3 ? split[2] : null); + if (!values.isEmpty()) { + id = (values.iterator().next().getValue()); + } + return id; + } + +} diff --git a/dspace-api/src/main/java/org/dspace/harvest/HarvestScheduler.java b/dspace-api/src/main/java/org/dspace/harvest/HarvestScheduler.java index d668b09bc4..5d0545845c 100644 --- a/dspace-api/src/main/java/org/dspace/harvest/HarvestScheduler.java +++ b/dspace-api/src/main/java/org/dspace/harvest/HarvestScheduler.java @@ -134,11 +134,13 @@ public class HarvestScheduler implements Runnable { if (maxActiveThreads == 0) { maxActiveThreads = 3; } - minHeartbeat = ConfigurationManager.getIntProperty("oai", "harvester.minHeartbeat") * 1000; + minHeartbeat = ConfigurationManager.getIntProperty("oai", "harvester.minHeartbeat"); + minHeartbeat = minHeartbeat * 1000; // multiple by 1000 to turn seconds to ms if (minHeartbeat == 0) { minHeartbeat = 30000; } - maxHeartbeat = ConfigurationManager.getIntProperty("oai", "harvester.maxHeartbeat") * 1000; + maxHeartbeat = ConfigurationManager.getIntProperty("oai", "harvester.maxHeartbeat"); + maxHeartbeat = maxHeartbeat * 1000; // multiple by 1000 to turn seconds to ms if (maxHeartbeat == 0) { maxHeartbeat = 3600000; } diff --git a/dspace-api/src/main/java/org/dspace/identifier/DOIIdentifierProvider.java b/dspace-api/src/main/java/org/dspace/identifier/DOIIdentifierProvider.java index 46bc317d13..9db4402007 100644 --- a/dspace-api/src/main/java/org/dspace/identifier/DOIIdentifierProvider.java +++ b/dspace-api/src/main/java/org/dspace/identifier/DOIIdentifierProvider.java @@ -761,9 +761,9 @@ public class DOIIdentifierProvider Item item = (Item) dso; List metadata = itemService.getMetadata(item, MD_SCHEMA, DOI_ELEMENT, DOI_QUALIFIER, null); + String leftPart = DOI.RESOLVER + SLASH + getPrefix() + SLASH + getNamespaceSeparator(); for (MetadataValue id : metadata) { - if (id.getValue().startsWith( - DOI.RESOLVER + String.valueOf(SLASH) + PREFIX + String.valueOf(SLASH) + NAMESPACE_SEPARATOR)) { + if (id.getValue().startsWith(leftPart)) { return doiService.DOIFromExternalFormat(id.getValue()); } } diff --git a/dspace-api/src/main/java/org/dspace/importer/external/arxiv/metadatamapping/ArXivFieldMapping.java b/dspace-api/src/main/java/org/dspace/importer/external/arxiv/metadatamapping/ArXivFieldMapping.java new file mode 100644 index 0000000000..272b149015 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/importer/external/arxiv/metadatamapping/ArXivFieldMapping.java @@ -0,0 +1,37 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.importer.external.arxiv.metadatamapping; + +import java.util.Map; +import javax.annotation.Resource; + +import org.dspace.importer.external.metadatamapping.AbstractMetadataFieldMapping; + +/** + * An implementation of {@link AbstractMetadataFieldMapping} + * Responsible for defining the mapping of the ArXiv metadatum fields on the DSpace metadatum fields + * + * @author Pasquale Cavallo (pasquale.cavallo at 4science dot it) + */ +public class ArXivFieldMapping extends AbstractMetadataFieldMapping { + + /** + * Defines which metadatum is mapped on which metadatum. Note that while the key must be unique it + * only matters here for postprocessing of the value. The mapped MetadatumContributor has full control over + * what metadatafield is generated. + * + * @param metadataFieldMap The map containing the link between retrieve metadata and metadata that will be set to + * the item. + */ + @Override + @Resource(name = "arxivMetadataFieldMap") + public void setMetadataFieldMap(Map metadataFieldMap) { + super.setMetadataFieldMap(metadataFieldMap); + } + +} diff --git a/dspace-api/src/main/java/org/dspace/importer/external/arxiv/metadatamapping/contributor/ArXivIdMetadataContributor.java b/dspace-api/src/main/java/org/dspace/importer/external/arxiv/metadatamapping/contributor/ArXivIdMetadataContributor.java new file mode 100644 index 0000000000..ed5ac5960b --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/importer/external/arxiv/metadatamapping/contributor/ArXivIdMetadataContributor.java @@ -0,0 +1,60 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.importer.external.arxiv.metadatamapping.contributor; + +import java.util.Collection; + +import org.apache.axiom.om.OMElement; +import org.dspace.importer.external.metadatamapping.MetadatumDTO; +import org.dspace.importer.external.metadatamapping.contributor.MetadataContributor; +import org.dspace.importer.external.metadatamapping.contributor.SimpleXpathMetadatumContributor; + +/** + * Arxiv specific implementation of {@link MetadataContributor} + * Responsible for generating the ArXiv Id from the retrieved item. + * + * @author Pasquale Cavallo (pasquale.cavallo at 4science dot it) + * + */ +public class ArXivIdMetadataContributor extends SimpleXpathMetadatumContributor { + + /** + * Retrieve the metadata associated with the given object. + * Depending on the retrieved node (using the query), different types of values will be added to the MetadatumDTO + * list + * + * @param t A class to retrieve metadata from. + * @return a collection of import records. Only the identifier of the found records may be put in the record. + */ + @Override + public Collection contributeMetadata(OMElement t) { + Collection values = super.contributeMetadata(t); + parseValue(values); + return values; + } + + /** + * ArXiv returns a full URL as in the value, e.g. http://arxiv.org/abs/1911.11405v1. + * This method parses out the identifier from the end of the URL, e.g. 1911.11405v1. + * + * @param dtos Metadata which contains the items uri + */ + private void parseValue(Collection dtos) { + if (dtos != null) { + for (MetadatumDTO dto : dtos) { + if (dto != null && dto.getValue() != null && dto.getValue().contains("/")) { + int startIndex = dto.getValue().lastIndexOf('/') + 1; + int endIndex = dto.getValue().length(); + String id = dto.getValue().substring(startIndex, endIndex); + dto.setValue(id); + } + } + } + } + +} diff --git a/dspace-api/src/main/java/org/dspace/importer/external/arxiv/service/ArXivImportMetadataSourceServiceImpl.java b/dspace-api/src/main/java/org/dspace/importer/external/arxiv/service/ArXivImportMetadataSourceServiceImpl.java new file mode 100644 index 0000000000..6b418423fa --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/importer/external/arxiv/service/ArXivImportMetadataSourceServiceImpl.java @@ -0,0 +1,421 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.importer.external.arxiv.service; + +import java.io.StringReader; +import java.util.ArrayList; +import java.util.Collection; +import java.util.List; +import java.util.concurrent.Callable; +import javax.el.MethodNotFoundException; +import javax.ws.rs.client.Client; +import javax.ws.rs.client.ClientBuilder; +import javax.ws.rs.client.Invocation; +import javax.ws.rs.client.WebTarget; +import javax.ws.rs.core.MediaType; +import javax.ws.rs.core.Response; + +import org.apache.axiom.om.OMElement; +import org.apache.axiom.om.OMXMLBuilderFactory; +import org.apache.axiom.om.OMXMLParserWrapper; +import org.apache.axiom.om.xpath.AXIOMXPath; +import org.apache.commons.lang3.StringUtils; +import org.dspace.content.Item; +import org.dspace.importer.external.datamodel.ImportRecord; +import org.dspace.importer.external.datamodel.Query; +import org.dspace.importer.external.exception.MetadataSourceException; +import org.dspace.importer.external.service.AbstractImportMetadataSourceService; +import org.dspace.importer.external.service.components.QuerySource; +import org.jaxen.JaxenException; + +/** + * Implements a data source for querying ArXiv + * + * @author Pasquale Cavallo (pasquale.cavallo at 4Science dot it) + * + */ +public class ArXivImportMetadataSourceServiceImpl extends AbstractImportMetadataSourceService + implements QuerySource { + + private WebTarget webTarget; + private String baseAddress; + + /** + * Find the number of records matching the query string in ArXiv. Supports pagination. + * + * @param query a query string to base the search on. + * @param start offset to start at + * @param count number of records to retrieve. + * @return a set of records. Fully transformed. + * @throws MetadataSourceException if the underlying methods throw any exception. + */ + @Override + public Collection getRecords(String query, int start, int count) throws MetadataSourceException { + return retry(new SearchByQueryCallable(query, count, start)); + } + + /** + * Find records based on a object query and convert them to a list metadata mapped in ImportRecord. + * The entry with the key "query" of the Query's map will be used as query string value. + * + * @see org.dspace.importer.external.datamodel.Query + * @see org.dspace.importer.external.datamodel.ImportRecord + * @param query a query object to base the search on. + * @return a set of records. Fully transformed. + * @throws MetadataSourceException if the underlying methods throw any exception. + */ + @Override + public Collection getRecords(Query query) throws MetadataSourceException { + return retry(new SearchByQueryCallable(query)); + } + + /** + * Find the number of records matching the query string in ArXiv; + * + * @param query a query object to base the search on. + * @return the sum of the matching records over this import source + * @throws MetadataSourceException if the underlying methods throw any exception. + */ + @Override + public int getRecordsCount(String query) throws MetadataSourceException { + return retry(new CountByQueryCallable(query)); + } + + + /** + * Find the number of records matching a query; + * The entry with the key "query" of the Query's map will be used to get the query string. + * + * @see org.dspace.importer.external.datamodel.Query + * @param query a query string to base the search on. + * @return the sum of the matching records over this import source + * @throws MetadataSourceException if the underlying methods throw any exception. + */ + @Override + public int getRecordsCount(Query query) throws MetadataSourceException { + return retry(new CountByQueryCallable(query)); + } + + /** + * Get a single record of metadata from the arxiv by ArXiv ID. + * + * @param id id of the record in ArXiv + * @return the first matching record + * @throws MetadataSourceException if the underlying methods throw any exception. + */ + + @Override + public ImportRecord getRecord(String id) throws MetadataSourceException { + List records = retry(new SearchByIdCallable(id)); + return records == null || records.isEmpty() ? null : records.get(0); + } + + /** + * Get a single record from the ArXiv matching the query. + * Field "query" will be used to get data from. + * + * @see org.dspace.importer.external.datamodel.Query + * @param query a query matching a single record + * @return the first matching record + * @throws MetadataSourceException if the underlying methods throw any exception. + */ + @Override + public ImportRecord getRecord(Query query) throws MetadataSourceException { + List records = retry(new SearchByIdCallable(query)); + return records == null || records.isEmpty() ? null : records.get(0); + } + + /** + * Initialize the class + * + * @throws Exception on generic exception + */ + @Override + public void init() throws Exception { + Client client = ClientBuilder.newClient(); + webTarget = client.target(baseAddress); + } + + /** + * The string that identifies this import implementation. Preferable a URI + * + * @return the identifying uri + */ + @Override + public String getImportSource() { + return "arxiv"; + } + + /** + * Expect this method will be not used and erased from the interface soon + */ + @Override + public Collection findMatchingRecords(Item item) throws MetadataSourceException { + // FIXME: we need this method? + throw new MethodNotFoundException("This method is not implemented for ArXiv"); + } + + /** + * Finds records based on query object. + * Supports search by title and/or author + * + * @param query a query object to base the search on. + * @return a collection of import records. + * @throws MetadataSourceException if the underlying methods throw any exception. + */ + @Override + public Collection findMatchingRecords(Query query) throws MetadataSourceException { + return retry(new FindMatchingRecordCallable(query)); + } + + /** + * This class is a Callable implementation to count the number of entries for an ArXiv + * query. + * This Callable use as query value to ArXiv the string queryString passed to constructor. + * If the object will be construct through Query.class instance, the value of the Query's + * map with the key "query" will be used. + * + * @author Pasquale Cavallo (pasquale.cavallo at 4science dot it) + * + */ + private class CountByQueryCallable implements Callable { + private Query query; + + + private CountByQueryCallable(String queryString) { + query = new Query(); + query.addParameter("query", queryString); + } + + private CountByQueryCallable(Query query) { + this.query = query; + } + + + @Override + public Integer call() throws Exception { + String queryString = query.getParameterAsClass("query", String.class); + Integer start = query.getParameterAsClass("start", Integer.class); + Integer maxResult = query.getParameterAsClass("count", Integer.class); + WebTarget local = webTarget.queryParam("search_query", queryString); + if (maxResult != null) { + local = local.queryParam("max_results", String.valueOf(maxResult)); + } + if (start != null) { + local = local.queryParam("start", String.valueOf(start)); + } + Invocation.Builder invocationBuilder = local.request(MediaType.TEXT_PLAIN_TYPE); + Response response = invocationBuilder.get(); + if (response.getStatus() == 200) { + String responseString = response.readEntity(String.class); + OMXMLParserWrapper records = OMXMLBuilderFactory.createOMBuilder(new StringReader(responseString)); + OMElement element = records.getDocumentElement(); + AXIOMXPath xpath = null; + try { + xpath = new AXIOMXPath("opensearch:totalResults"); + xpath.addNamespace("opensearch", "http://a9.com/-/spec/opensearch/1.1/"); + OMElement count = (OMElement) xpath.selectSingleNode(element); + return Integer.parseInt(count.getText()); + } catch (JaxenException e) { + return null; + } + } else { + return null; + } + } + } + + /** + * This class is a Callable implementation to get ArXiv entries based on + * query object. + * This Callable use as query value the string queryString passed to constructor. + * If the object will be construct through Query.class instance, a Query's map entry with key "query" will be used. + * Pagination is supported too, using the value of the Query's map with keys "start" and "count". + * + * @see org.dspace.importer.external.datamodel.Query + * @author Pasquale Cavallo (pasquale.cavallo at 4science dot it) + * + */ + private class SearchByQueryCallable implements Callable> { + private Query query; + + + private SearchByQueryCallable(String queryString, Integer maxResult, Integer start) { + query = new Query(); + query.addParameter("query", queryString); + query.addParameter("start", start); + query.addParameter("count", maxResult); + } + + private SearchByQueryCallable(Query query) { + this.query = query; + } + + + @Override + public List call() throws Exception { + List results = new ArrayList(); + String queryString = query.getParameterAsClass("query", String.class); + Integer start = query.getParameterAsClass("start", Integer.class); + Integer maxResult = query.getParameterAsClass("count", Integer.class); + WebTarget local = webTarget.queryParam("search_query", queryString); + if (maxResult != null) { + local = local.queryParam("max_results", String.valueOf(maxResult)); + } + if (start != null) { + local = local.queryParam("start", String.valueOf(start)); + } + Invocation.Builder invocationBuilder = local.request(MediaType.TEXT_PLAIN_TYPE); + Response response = invocationBuilder.get(); + if (response.getStatus() == 200) { + String responseString = response.readEntity(String.class); + List omElements = splitToRecords(responseString); + for (OMElement record : omElements) { + results.add(transformSourceRecords(record)); + } + return results; + } else { + return null; + } + } + } + + /** + * This class is a Callable implementation to get an ArXiv entry using ArXiv ID + * The ID to use can be passed through the constructor as a String or as Query's map entry, with the key "id". + * + * @author Pasquale Cavallo (pasquale.cavallo at 4science dot it) + * + */ + private class SearchByIdCallable implements Callable> { + private Query query; + + private SearchByIdCallable(Query query) { + this.query = query; + } + + private SearchByIdCallable(String id) { + this.query = new Query(); + query.addParameter("id", id); + } + + @Override + public List call() throws Exception { + List results = new ArrayList(); + String arxivid = query.getParameterAsClass("id", String.class); + if (StringUtils.isNotBlank(arxivid)) { + arxivid = arxivid.trim(); + if (arxivid.startsWith("http://arxiv.org/abs/")) { + arxivid = arxivid.substring("http://arxiv.org/abs/".length()); + } else if (arxivid.toLowerCase().startsWith("arxiv:")) { + arxivid = arxivid.substring("arxiv:".length()); + } + } + WebTarget local = webTarget.queryParam("id_list", arxivid); + Invocation.Builder invocationBuilder = local.request(MediaType.TEXT_PLAIN_TYPE); + Response response = invocationBuilder.get(); + if (response.getStatus() == 200) { + String responseString = response.readEntity(String.class); + List omElements = splitToRecords(responseString); + for (OMElement record : omElements) { + results.add(transformSourceRecords(record)); + } + return results; + } else { + return null; + } + } + } + + /** + * This class is a Callable implementation to search ArXiv entries + * using author and title. + * There are two field in the Query map to pass, with keys "title" and "author" + * (at least one must be used). + * + * @see org.dspace.importer.external.datamodel.Query + * @author Pasquale Cavallo (pasquale.cavallo at 4science dot it) + * + */ + private class FindMatchingRecordCallable implements Callable> { + + private Query query; + + private FindMatchingRecordCallable(Query q) { + query = q; + } + + @Override + public List call() throws Exception { + String queryString = getQuery(this.query); + List results = new ArrayList(); + WebTarget local = webTarget.queryParam("search_query", queryString); + Invocation.Builder invocationBuilder = local.request(MediaType.TEXT_PLAIN_TYPE); + Response response = invocationBuilder.get(); + if (response.getStatus() == 200) { + String responseString = response.readEntity(String.class); + List omElements = splitToRecords(responseString); + for (OMElement record : omElements) { + results.add(transformSourceRecords(record)); + } + return results; + } else { + return null; + } + } + + private String getQuery(Query query) { + String title = query.getParameterAsClass("title", String.class); + String author = query.getParameterAsClass("author", String.class); + StringBuffer queryString = new StringBuffer(); + if (StringUtils.isNotBlank(title)) { + queryString.append("ti:\"").append(title).append("\""); + } + if (StringUtils.isNotBlank(author)) { + // [FAU] + if (queryString.length() > 0) { + queryString.append(" AND "); + } + queryString.append("au:\"").append(author).append("\""); + } + return queryString.toString(); + } + } + + private List splitToRecords(String recordsSrc) { + OMXMLParserWrapper records = OMXMLBuilderFactory.createOMBuilder(new StringReader(recordsSrc)); + OMElement element = records.getDocumentElement(); + AXIOMXPath xpath = null; + try { + xpath = new AXIOMXPath("ns:entry"); + xpath.addNamespace("ns", "http://www.w3.org/2005/Atom"); + List recordsList = xpath.selectNodes(element); + return recordsList; + } catch (JaxenException e) { + return null; + } + } + + /** + * Return the baseAddress set to this object + * + * @return The String object that represents the baseAddress of this object + */ + public String getBaseAddress() { + return baseAddress; + } + + /** + * Set the baseAddress to this object + * + * @param baseAddress The String object that represents the baseAddress of this object + */ + public void setBaseAddress(String baseAddress) { + this.baseAddress = baseAddress; + } +} diff --git a/dspace-api/src/main/java/org/dspace/importer/external/bibtex/service/BibtexImportMetadataSourceServiceImpl.java b/dspace-api/src/main/java/org/dspace/importer/external/bibtex/service/BibtexImportMetadataSourceServiceImpl.java new file mode 100644 index 0000000000..7468d601f5 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/importer/external/bibtex/service/BibtexImportMetadataSourceServiceImpl.java @@ -0,0 +1,107 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ + +package org.dspace.importer.external.bibtex.service; + +import java.io.IOException; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.io.Reader; +import java.util.ArrayList; +import java.util.List; +import java.util.Map; +import java.util.Map.Entry; +import javax.annotation.Resource; + +import org.dspace.importer.external.exception.FileSourceException; +import org.dspace.importer.external.service.components.AbstractPlainMetadataSource; +import org.dspace.importer.external.service.components.dto.PlainMetadataKeyValueItem; +import org.dspace.importer.external.service.components.dto.PlainMetadataSourceDto; +import org.jbibtex.BibTeXDatabase; +import org.jbibtex.BibTeXEntry; +import org.jbibtex.BibTeXParser; +import org.jbibtex.Key; +import org.jbibtex.ParseException; +import org.jbibtex.Value; + +/** + * Implements a metadata importer for BibTeX files + * + * @author Pasquale Cavallo (pasquale.cavallo at 4science dot it) + */ +public class BibtexImportMetadataSourceServiceImpl extends AbstractPlainMetadataSource { + + + /** + * The string that identifies this import implementation as + * MetadataSource implementation + * + * @return the identifying uri + */ + @Override + public String getImportSource() { + return "BibTeXMetadataSource"; + } + + @Override + protected List readData (InputStream + inputStream) throws FileSourceException { + List list = new ArrayList<>(); + BibTeXDatabase database; + try { + database = parseBibTex(inputStream); + } catch (IOException | ParseException e) { + throw new FileSourceException("Unable to parse file with BibTeX parser"); + } + if (database == null || database.getEntries() == null) { + throw new FileSourceException("File results in an empty list of metadata"); + } + if (database.getEntries() != null) { + for (Entry entry : database.getEntries().entrySet()) { + PlainMetadataSourceDto item = new PlainMetadataSourceDto(); + List keyValues = new ArrayList<>(); + item.setMetadata(keyValues); + PlainMetadataKeyValueItem keyValueItem = new PlainMetadataKeyValueItem(); + keyValueItem.setKey(entry.getValue().getType().getValue()); + keyValueItem.setValue(entry.getKey().getValue()); + keyValues.add(keyValueItem); + if (entry.getValue().getFields() != null) { + for (Entry subentry : entry.getValue().getFields().entrySet()) { + PlainMetadataKeyValueItem innerItem = new PlainMetadataKeyValueItem(); + innerItem.setKey(subentry.getKey().getValue()); + innerItem.setValue(subentry.getValue().toUserString()); + keyValues.add(innerItem); + } + } + list.add(item); + } + } + return list; + } + + private BibTeXDatabase parseBibTex(InputStream inputStream) throws IOException, ParseException { + Reader reader = new InputStreamReader(inputStream); + BibTeXParser bibtexParser = new BibTeXParser(); + return bibtexParser.parse(reader); + } + + + /** + * Retrieve the MetadataFieldMapping containing the mapping between RecordType + * (in this case PlainMetadataSourceDto.class) and Metadata + * + * @return The configured MetadataFieldMapping + */ + @Override + @SuppressWarnings("unchecked") + @Resource(name = "bibtexMetadataFieldMap") + public void setMetadataFieldMap(@SuppressWarnings("rawtypes") Map metadataFieldMap) { + super.setMetadataFieldMap(metadataFieldMap); + } + +} diff --git a/dspace-api/src/main/java/org/dspace/importer/external/csv/service/CharacterSeparatedImportMetadataSourceServiceImpl.java b/dspace-api/src/main/java/org/dspace/importer/external/csv/service/CharacterSeparatedImportMetadataSourceServiceImpl.java new file mode 100644 index 0000000000..31ee1e5e5a --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/importer/external/csv/service/CharacterSeparatedImportMetadataSourceServiceImpl.java @@ -0,0 +1,154 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.importer.external.csv.service; + +import java.io.IOException; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.charset.StandardCharsets; +import java.util.ArrayList; +import java.util.List; +import java.util.Map; + +import au.com.bytecode.opencsv.CSVReader; +import org.dspace.importer.external.exception.FileSourceException; +import org.dspace.importer.external.metadatamapping.MetadataFieldConfig; +import org.dspace.importer.external.metadatamapping.contributor.MetadataContributor; +import org.dspace.importer.external.service.components.AbstractPlainMetadataSource; +import org.dspace.importer.external.service.components.MetadataSource; +import org.dspace.importer.external.service.components.dto.PlainMetadataKeyValueItem; +import org.dspace.importer.external.service.components.dto.PlainMetadataSourceDto; + + +/** + * This class is an implementation of {@link MetadataSource} which extends {@link AbstractPlainMetadataSource} + * in order to parse "character separated" files like csv, tsv, etc using the Live Import framework. + * + * @author Pasquale Cavallo + * + */ +public class CharacterSeparatedImportMetadataSourceServiceImpl extends AbstractPlainMetadataSource { + + private char separator = ','; + + private char escapeCharacter = '"'; + + private Integer skipLines = 1; + + private String importSource = "CsvMetadataSource"; + + /** + * Set the number of lines to skip at the start of the file. This method is suitable, + * for example, to skip file headers. + * + * @param skipLines number of the line at the start of the file to skip. + */ + public void setSkipLines(Integer skipLines) { + this.skipLines = skipLines; + } + + /** + * + * @return the number of the lines to skip + */ + public Integer getSkipLines() { + return skipLines; + } + + /** + * Method to inject the separator + * This must be the ASCII integer + * related to the char. + * In example, 9 for tab, 44 for comma + */ + public void setSeparator(char separator) { + this.separator = separator; + } + + @Override + public String getImportSource() { + return importSource; + } + + /** + * Method to set the name of the source + */ + public void setImportSource(String importSource) { + this.importSource = importSource; + } + + /** + * Method to inject the escape character. This must be the ASCII integer + * related to the char. + * In example, 9 for tab, 44 for comma + * + */ + public void setEscapeCharacter(char escapeCharacter) { + this.escapeCharacter = escapeCharacter; + } + + /** + * The method process any kind of "character separated" files, like CSV, TSV, and so on. + * It return a List of PlainMetadataSourceDto. + * Using the superclass methods AbstractPlainMetadataSource.getRecord(s), any of this + * element will then be converted in an {@link org.dspace.importer.external.datamodel.ImportRecord}. + + * Columns will be identified by their position, zero based notation. + * Separator character and escape character MUST be defined at class level. Number of lines to skip (headers) + * could also be defined in the field skipLines. + * + * @param InputStream The inputStream of the file + * @return A list of PlainMetadataSourceDto + * @throws FileSourceException if, for any reason, the file is not parsable + + */ + @Override + protected List readData(InputStream inputStream) throws FileSourceException { + List plainMetadataList = new ArrayList<>(); + try (CSVReader csvReader = new CSVReader(new InputStreamReader(inputStream, StandardCharsets.UTF_8), + separator, escapeCharacter);) { + // read all row + List lines = csvReader.readAll(); + int listSize = lines == null ? 0 : lines.size(); + int count = skipLines; + // iterate over row (skipping the first skipLines) + while (count < listSize) { + String [] items = lines.get(count); + List keyValueList = new ArrayList<>(); + if (items != null) { + int size = items.length; + int index = 0; + //iterate over column in the selected row + while (index < size) { + //create key/value item for the specifics row/column + PlainMetadataKeyValueItem keyValueItem = new PlainMetadataKeyValueItem(); + keyValueItem.setKey(String.valueOf(index)); + keyValueItem.setValue(items[index]); + keyValueList.add(keyValueItem); + index++; + } + //save all column key/value for the given row + PlainMetadataSourceDto dto = new PlainMetadataSourceDto(); + dto.setMetadata(keyValueList); + plainMetadataList.add(dto); + } + count++; + } + } catch (IOException e) { + throw new FileSourceException("Error reading file", e); + } + return plainMetadataList; + } + + @Override + public void setMetadataFieldMap(Map> metadataFieldMap) { + super.setMetadataFieldMap(metadataFieldMap); + } + +} diff --git a/dspace-api/src/main/java/org/dspace/importer/external/datamodel/Query.java b/dspace-api/src/main/java/org/dspace/importer/external/datamodel/Query.java index 8c5e1b394a..8f392bdb52 100644 --- a/dspace-api/src/main/java/org/dspace/importer/external/datamodel/Query.java +++ b/dspace-api/src/main/java/org/dspace/importer/external/datamodel/Query.java @@ -71,7 +71,7 @@ public class Query { return null; } else { Object o = c.iterator().next(); - if (clazz.isAssignableFrom(o.getClass())) { + if (o != null && clazz.isAssignableFrom(o.getClass())) { return (T) o; } else { return null; diff --git a/dspace-api/src/main/java/org/dspace/importer/external/endnote/service/EndnoteImportMetadataSourceServiceImpl.java b/dspace-api/src/main/java/org/dspace/importer/external/endnote/service/EndnoteImportMetadataSourceServiceImpl.java new file mode 100644 index 0000000000..9881832369 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/importer/external/endnote/service/EndnoteImportMetadataSourceServiceImpl.java @@ -0,0 +1,140 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.importer.external.endnote.service; + +import java.io.BufferedReader; +import java.io.IOException; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.util.ArrayList; +import java.util.List; +import java.util.Map; +import java.util.regex.Matcher; +import java.util.regex.Pattern; + +import org.dspace.importer.external.exception.FileSourceException; +import org.dspace.importer.external.metadatamapping.MetadataFieldConfig; +import org.dspace.importer.external.metadatamapping.contributor.MetadataContributor; +import org.dspace.importer.external.service.components.AbstractPlainMetadataSource; +import org.dspace.importer.external.service.components.dto.PlainMetadataKeyValueItem; +import org.dspace.importer.external.service.components.dto.PlainMetadataSourceDto; + +/** + * Implements a metadata importer for Endnote files + * + * @author Pasquale Cavallo (pasquale.cavallo at 4science dot it) + */ +public class EndnoteImportMetadataSourceServiceImpl extends AbstractPlainMetadataSource { + + @Override + public String getImportSource() { + return "EndnoteMetadataSource"; + } + + /** + * This method map the data present in the inputStream, then return a list PlainMetadataSourceDto. + * Any PlainMetadataSourceDto will be used to create a single {@link org.dspace.importer.external.datamodel.ImportRecord} + * + * @param inputStream the inputStream of the Endnote file + * @return List of {@link org.dspace.importer.external.service.components.dto.PlainMetadataSourceDto} + * @throws FileSourceException + * @see org.dspace.importer.external.service.components.AbstractPlainMetadataSource + */ + @Override + protected List readData(InputStream fileInpuStream) throws FileSourceException { + List list = new ArrayList<>(); + try { + // row start from 3, because the first 2 (FN and VR) will be removed by tokenize + int lineForDebug = 3; + List tokenized = tokenize(fileInpuStream); + List tmpList = new ArrayList<>(); + // iterate over key/value pairs, create a new PlainMetadataSourceDto on "ER" rows (which means "new record) + // and stop on EF (end of file). + for (PlainMetadataKeyValueItem item : tokenized) { + if (item.getKey() == null || item.getKey().isEmpty()) { + throw new FileSourceException("Null or empty key expected on line " + + lineForDebug + ". Keys cannot be null nor empty"); + } + if ("EF".equals(item.getKey())) { + // end of file + break; + } + if ("ER".equals(item.getKey())) { + // new ImportRecord start from here (ER is a content delimiter) + // save the previous, then create a new one + PlainMetadataSourceDto dto = new PlainMetadataSourceDto(); + dto.setMetadata(new ArrayList<>(tmpList)); + list.add(dto); + tmpList = new ArrayList<>(); + } else { + if (item.getValue() == null || item.getValue().isEmpty()) { + throw new FileSourceException("Null or empty value expected on line " + + lineForDebug + ". Value expected"); + } + tmpList.add(item); + } + lineForDebug++; + } + } catch (Exception e) { + throw new FileSourceException("Error reading file", e); + } + return list; + } + + + /** + * This method iterate over file rows, split content in a list of key/value items through RexExp + * and save the content sequentially. + * Key "FN" and "VR", which is a preamble in Endnote, will be checked but not saved. + * + * @param fileInpuStream the inputStream of the Endnote file + * @return A list of key/value items which map the file's row sequentially + * @throws IOException + * @throws FileSourceException + */ + private List tokenize(InputStream fileInpuStream) + throws IOException, FileSourceException { + BufferedReader reader = new BufferedReader(new InputStreamReader(fileInpuStream)); + String line; + line = reader.readLine(); + // FN and VR works as preamble, just check and skip them + if (line == null || !line.startsWith("FN")) { + throw new FileSourceException("Invalid endNote file"); + } + line = reader.readLine(); + if (line == null || !line.startsWith("VR")) { + throw new FileSourceException("Invalid endNote file"); + } + // split any row into first part ^[A-Z]{2} used as key (the meaning of the data) + // and second part ?(.*) used as value (the data) + Pattern pattern = Pattern.compile("(^[A-Z]{2}) ?(.*)$"); + List list = new ArrayList(); + while ((line = reader.readLine()) != null) { + line = line.trim(); + // skip empty lines + if (line.isEmpty() || line.equals("")) { + continue; + } + Matcher matcher = pattern.matcher(line); + if (matcher.matches()) { + PlainMetadataKeyValueItem item = new PlainMetadataKeyValueItem(); + item.setKey(matcher.group(1)); + item.setValue(matcher.group(2)); + list.add(item); + } + } + return list; + } + + @Override + public void setMetadataFieldMap(Map> metadataFieldMap) { + super.setMetadataFieldMap(metadataFieldMap); + } + +} diff --git a/dspace-api/src/main/java/org/dspace/importer/external/exception/FileMultipleOccurencesException.java b/dspace-api/src/main/java/org/dspace/importer/external/exception/FileMultipleOccurencesException.java new file mode 100644 index 0000000000..d09889a7ff --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/importer/external/exception/FileMultipleOccurencesException.java @@ -0,0 +1,29 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ + +package org.dspace.importer.external.exception; + +/** + * This exception could be throws when more than one element is found + * in a method that works on one only. + * + * @author Pasquale Cavallo (pasquale.cavallo at 4science dot it) + */ + +public class FileMultipleOccurencesException extends Exception { + + private static final long serialVersionUID = 1222409723339501937L; + + public FileMultipleOccurencesException(String message, Throwable cause) { + super(message, cause); + } + + public FileMultipleOccurencesException(String message) { + super(message); + } +} diff --git a/dspace-api/src/main/java/org/dspace/importer/external/exception/FileSourceException.java b/dspace-api/src/main/java/org/dspace/importer/external/exception/FileSourceException.java new file mode 100644 index 0000000000..c41ce94151 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/importer/external/exception/FileSourceException.java @@ -0,0 +1,28 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ + +package org.dspace.importer.external.exception; + +/** + * Represents a problem with the File content: e.g. null input stream, invalid content, ... + * + * @author Pasquale Cavallo (pasquale.cavallo at 4science dot it) + */ + +public class FileSourceException extends Exception { + + private static final long serialVersionUID = 6895579588455260182L; + + public FileSourceException(String message, Throwable cause) { + super(message, cause); + } + + public FileSourceException(String message) { + super(message); + } +} diff --git a/dspace-api/src/main/java/org/dspace/importer/external/metadatamapping/AbstractMetadataFieldMapping.java b/dspace-api/src/main/java/org/dspace/importer/external/metadatamapping/AbstractMetadataFieldMapping.java index 3ce45d6048..aed2f0e084 100644 --- a/dspace-api/src/main/java/org/dspace/importer/external/metadatamapping/AbstractMetadataFieldMapping.java +++ b/dspace-api/src/main/java/org/dspace/importer/external/metadatamapping/AbstractMetadataFieldMapping.java @@ -117,16 +117,13 @@ public abstract class AbstractMetadataFieldMapping public Collection resultToDCValueMapping(RecordType record) { List values = new LinkedList(); - for (MetadataContributor query : getMetadataFieldMap().values()) { try { values.addAll(query.contributeMetadata(record)); } catch (Exception e) { log.error("Error", e); } - } return values; - } } diff --git a/dspace-api/src/main/java/org/dspace/importer/external/metadatamapping/contributor/EnhancedSimpleMetadataContributor.java b/dspace-api/src/main/java/org/dspace/importer/external/metadatamapping/contributor/EnhancedSimpleMetadataContributor.java new file mode 100644 index 0000000000..b06322ac2c --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/importer/external/metadatamapping/contributor/EnhancedSimpleMetadataContributor.java @@ -0,0 +1,108 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.importer.external.metadatamapping.contributor; + +import java.io.IOException; +import java.io.StringReader; +import java.util.Collection; +import java.util.LinkedList; +import java.util.List; + +import au.com.bytecode.opencsv.CSVReader; +import org.dspace.importer.external.metadatamapping.MetadatumDTO; +import org.dspace.importer.external.service.components.dto.PlainMetadataKeyValueItem; +import org.dspace.importer.external.service.components.dto.PlainMetadataSourceDto; + + +/** + * This class implements functionalities to handle common situation regarding plain metadata. + * In some scenario, like csv or tsv, the format don't allow lists. + * We can use this MetadataContribut to parse a given plain metadata and split it into + * related list, based on the delimiter. No escape character is present. + * Default values are comma (,) for delimiter, and double quote (") for escape character + * + * @author Pasquale Cavallo (pasquale.cavallo at 4science dot it) + * + */ +public class EnhancedSimpleMetadataContributor extends SimpleMetadataContributor { + + private char delimiter = ','; + + private char escape = '"'; + + /** + * This method could be used to set the delimiter used during parse + * If no delimiter is set, comma will be used + */ + public void setDelimiter(char delimiter) { + this.delimiter = delimiter; + } + + /** + * This method could be used to get the delimiter used in this class + */ + public char getDelimiter() { + return delimiter; + } + + /** + * Method to inject the escape character. + * This must be the ASCII integer + * related to the char. + * In example, 9 for tab, 44 for comma + * If no escape is set, double quote will be used + */ + public void setEscape(char escape) { + this.escape = escape; + } + + /** + * Method to get the escape character. + * + */ + public char getEscape() { + return escape; + } + + @Override + public Collection contributeMetadata(PlainMetadataSourceDto t) { + Collection values = null; + values = new LinkedList<>(); + for (PlainMetadataKeyValueItem metadatum : t.getMetadata()) { + if (getKey().equals(metadatum.getKey())) { + String[] splitted = splitToRecord(metadatum.getValue()); + for (String value : splitted) { + MetadatumDTO dcValue = new MetadatumDTO(); + dcValue.setValue(value); + dcValue.setElement(getField().getElement()); + dcValue.setQualifier(getField().getQualifier()); + dcValue.setSchema(getField().getSchema()); + values.add(dcValue); + } + } + } + return values; + } + + private String[] splitToRecord(String value) { + List rows; + // For example, list of author must be: Author 1, author 2, author 3 + // if author name contains comma, is important to escape its in + // this way: Author 1, \"Author 2, something\", Author 3 + try (CSVReader csvReader = new CSVReader(new StringReader(value), + delimiter, escape);) { + rows = csvReader.readAll(); + } catch (IOException e) { + //fallback, use the inpu as value + return new String[] { value }; + } + //must be one row + return rows.get(0); + } + +} diff --git a/dspace-api/src/main/java/org/dspace/importer/external/metadatamapping/contributor/MultipleMetadataContributor.java b/dspace-api/src/main/java/org/dspace/importer/external/metadatamapping/contributor/MultipleMetadataContributor.java new file mode 100644 index 0000000000..2685948fd9 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/importer/external/metadatamapping/contributor/MultipleMetadataContributor.java @@ -0,0 +1,139 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.importer.external.metadatamapping.contributor; + +import java.util.ArrayList; +import java.util.Collection; +import java.util.LinkedList; +import java.util.List; + +import org.dspace.importer.external.metadatamapping.MetadataFieldConfig; +import org.dspace.importer.external.metadatamapping.MetadataFieldMapping; +import org.dspace.importer.external.metadatamapping.MetadatumDTO; + +/** + * This Contributor is helpful to avoid the limit of the Live Import Framework. + * In Live Import, one dc schema/element/qualifier could be associate with one and + * only one MetadataContributor, because the map they're saved in use dc entity as key. + * + * In fact, in this implementation we use the MetadataFieldConfig present in this MultipleMetadataContributor + * contributor, but the data (values of the dc metadatum) will be loaded using any of the contributor defined + * in the List metadatumContributors, by iterating over them. + * + * @see org.dspace.importer.external.metadatamapping.AbstractMetadataFieldMapping + * + * @author Pasquale Cavallo (pasquale.cavallo at 4science dot it) + * + */ +public class MultipleMetadataContributor implements MetadataContributor { + + private MetadataFieldConfig field; + + private List metadatumContributors; + + /** + * Empty constructor + */ + public MultipleMetadataContributor() { + } + + /** + * @param field {@link org.dspace.importer.external.metadatamapping.MetadataFieldConfig} used in + * mapping + * @param metadatumContributors A list of MetadataContributor + */ + public MultipleMetadataContributor(MetadataFieldConfig field, List metadatumContributors) { + this.field = field; + this.metadatumContributors = (LinkedList) metadatumContributors; + } + + /** + * Set the metadatafieldMapping used in the transforming of a record to actual metadata + * + * @param metadataFieldMapping the new mapping. + */ + @Override + public void setMetadataFieldMapping(MetadataFieldMapping> metadataFieldMapping) { + for (MetadataContributor metadatumContributor : metadatumContributors) { + metadatumContributor.setMetadataFieldMapping(metadataFieldMapping); + } + } + + + /** + * a separate Metadatum object is created for each index of Metadatum returned from the calls to + * MetadatumContributor.contributeMetadata(t) for each MetadatumContributor in the metadatumContributors list. + * All of them have as dc schema/element/qualifier the values defined in MetadataFieldConfig. + * + * @param t the object we are trying to translate + * @return a collection of metadata got from each MetadataContributor + */ + @Override + public Collection contributeMetadata(T t) { + Collection values = new ArrayList<>(); + for (MetadataContributor metadatumContributor : metadatumContributors) { + Collection metadata = metadatumContributor.contributeMetadata(t); + values.addAll(metadata); + } + changeDC(values); + return values; + } + + /** + * This method does the trick of this implementation. + * It changes the DC schema/element/qualifier of the given Metadatum into + * the ones present in this contributor. + * In this way, the contributors in metadatumContributors could have any dc values, + * because this method remap them all. + * + * @param the list of metadata we want to remap + */ + private void changeDC(Collection values) { + for (MetadatumDTO dto : values) { + dto.setElement(field.getElement()); + dto.setQualifier(field.getQualifier()); + dto.setSchema(field.getSchema()); + } + } + + /** + * Return the MetadataFieldConfig used while retrieving MetadatumDTO + * + * @return MetadataFieldConfig + */ + public MetadataFieldConfig getField() { + return field; + } + + /** + * Setting the MetadataFieldConfig + * + * @param field MetadataFieldConfig used while retrieving MetadatumDTO + */ + public void setField(MetadataFieldConfig field) { + this.field = field; + } + + /** + * Return the List of MetadataContributor objects set to this class + * + * @return metadatumContributors, list of MetadataContributor + */ + public List getMetadatumContributors() { + return metadatumContributors; + } + + /** + * Set the List of MetadataContributor objects set to this class + * + * @param metadatumContributors A list of MetadatumContributor classes + */ + public void setMetadatumContributors(List metadatumContributors) { + this.metadatumContributors = metadatumContributors; + } +} diff --git a/dspace-api/src/main/java/org/dspace/importer/external/metadatamapping/contributor/SimpleMetadataContributor.java b/dspace-api/src/main/java/org/dspace/importer/external/metadatamapping/contributor/SimpleMetadataContributor.java new file mode 100644 index 0000000000..1b9007f23c --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/importer/external/metadatamapping/contributor/SimpleMetadataContributor.java @@ -0,0 +1,109 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ + +package org.dspace.importer.external.metadatamapping.contributor; + +import java.util.Collection; +import java.util.LinkedList; +import java.util.List; + +import org.dspace.importer.external.metadatamapping.MetadataFieldConfig; +import org.dspace.importer.external.metadatamapping.MetadataFieldMapping; +import org.dspace.importer.external.metadatamapping.MetadatumDTO; +import org.dspace.importer.external.service.components.dto.PlainMetadataKeyValueItem; +import org.dspace.importer.external.service.components.dto.PlainMetadataSourceDto; + +/** + * Metadata contributor that takes an PlainMetadataSourceDto instance and turns it into a + * collection of metadatum + * + * @author Pasquale Cavallo (pasquale.cavallo at 4science dot it) + */ +public class SimpleMetadataContributor implements MetadataContributor { + + private MetadataFieldConfig field; + + private String key; + + private MetadataFieldMapping> metadataFieldMapping; + + public SimpleMetadataContributor(MetadataFieldConfig field, String key) { + this.field = field; + this.key = key; + } + + public SimpleMetadataContributor() { } + + /** + * Set the metadataFieldMapping of this SimpleMetadataContributor + * + * @param metadataFieldMapping the new mapping. + */ + @Override + public void setMetadataFieldMapping( + MetadataFieldMapping> metadataFieldMapping) { + this.metadataFieldMapping = metadataFieldMapping; + } + + /** + * Retrieve the metadata associated with the given object. + * It match the key found in PlainMetadataSourceDto instance with the key passed to constructor. + * In case of success, new metadatum is constructer (using field elements and PlainMetadataSourceDto value) + * and added to the list. + * + * @param t A class to retrieve metadata and key to match from. t and contained list "metadata" MUST be not null. + * @return a collection of import records. Only the identifier of the found records may be put in the record. + */ + @Override + public Collection contributeMetadata(PlainMetadataSourceDto t) { + List values = new LinkedList<>(); + for (PlainMetadataKeyValueItem metadatum : t.getMetadata()) { + if (key.equals(metadatum.getKey())) { + MetadatumDTO dcValue = new MetadatumDTO(); + dcValue.setValue(metadatum.getValue()); + dcValue.setElement(field.getElement()); + dcValue.setQualifier(field.getQualifier()); + dcValue.setSchema(field.getSchema()); + values.add(dcValue); + } + } + return values; + } + + /** + * Method to inject field item + * + * @param field the {@link MetadataFieldConfig} to use in this contributor + */ + public void setField(MetadataFieldConfig field) { + this.field = field; + } + + /** + * Method to inject key value + */ + public void setKey(String key) { + this.key = key; + } + + /** + * Method to retrieve field item + */ + public String getKey() { + return key; + } + + /** + * Method to retrieve the {@link MetadataFieldConfig} used in this contributor + */ + public MetadataFieldConfig getField() { + return field; + } +} diff --git a/dspace-api/src/main/java/org/dspace/importer/external/metadatamapping/contributor/SimpleXpathMetadatumContributor.java b/dspace-api/src/main/java/org/dspace/importer/external/metadatamapping/contributor/SimpleXpathMetadatumContributor.java index ba5afceb5f..c8d2467d5f 100644 --- a/dspace-api/src/main/java/org/dspace/importer/external/metadatamapping/contributor/SimpleXpathMetadatumContributor.java +++ b/dspace-api/src/main/java/org/dspace/importer/external/metadatamapping/contributor/SimpleXpathMetadatumContributor.java @@ -21,6 +21,8 @@ import org.dspace.importer.external.metadatamapping.MetadataFieldConfig; import org.dspace.importer.external.metadatamapping.MetadataFieldMapping; import org.dspace.importer.external.metadatamapping.MetadatumDTO; import org.jaxen.JaxenException; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Required; /** @@ -31,6 +33,8 @@ import org.springframework.beans.factory.annotation.Required; public class SimpleXpathMetadatumContributor implements MetadataContributor { private MetadataFieldConfig field; + private static final Logger log = LoggerFactory.getLogger(SimpleXpathMetadatumContributor.class); + /** * Return prefixToNamespaceMapping * @@ -79,7 +83,7 @@ public class SimpleXpathMetadatumContributor implements MetadataContributorMetadataFieldConfig + * MetadataFieldConfig */ public SimpleXpathMetadatumContributor(String query, Map prefixToNamespaceMapping, MetadataFieldConfig field) { @@ -157,12 +161,12 @@ public class SimpleXpathMetadatumContributor implements MetadataContributor { +public class PubmedImportMetadataSourceServiceImpl extends AbstractImportMetadataSourceService + implements QuerySource, FileSource { + private String baseAddress; private WebTarget pubmedWebTarget; + private List supportedExtensions; + + /** + * Set the file extensions supported by this metadata service + * + * @param supportedExtensionsthe file extensions (xml,txt,...) supported by this service + */ + public void setSupportedExtensions(List supportedExtensions) { + this.supportedExtensions = supportedExtensions; + } + + @Override + public List getSupportedExtensions() { + return supportedExtensions; + } + /** * Find the number of records matching a query; * @@ -49,7 +77,7 @@ public class PubmedImportMetadataSourceServiceImpl extends AbstractImportMetadat * @throws MetadataSourceException if the underlying methods throw any exception. */ @Override - public int getNbRecords(String query) throws MetadataSourceException { + public int getRecordsCount(String query) throws MetadataSourceException { return retry(new GetNbRecords(query)); } @@ -61,7 +89,7 @@ public class PubmedImportMetadataSourceServiceImpl extends AbstractImportMetadat * @throws MetadataSourceException if the underlying methods throw any exception. */ @Override - public int getNbRecords(Query query) throws MetadataSourceException { + public int getRecordsCount(Query query) throws MetadataSourceException { return retry(new GetNbRecords(query)); } @@ -357,7 +385,6 @@ public class PubmedImportMetadataSourceServiceImpl extends AbstractImportMetadat @Override public Collection call() throws Exception { - List records = new LinkedList(); WebTarget getRecordIdsTarget = pubmedWebTarget .queryParam("term", query.getParameterAsClass("term", String.class)); @@ -382,13 +409,41 @@ public class PubmedImportMetadataSourceServiceImpl extends AbstractImportMetadat invocationBuilder = getRecordsTarget.request(MediaType.TEXT_PLAIN_TYPE); response = invocationBuilder.get(); - List omElements = splitToRecords(response.readEntity(String.class)); - - for (OMElement record : omElements) { - records.add(transformSourceRecords(record)); - } - - return records; + String xml = response.readEntity(String.class); + return parseXMLString(xml); } } + + + @Override + public List getRecords(InputStream inputStream) throws FileSourceException { + String xml = null; + try (Reader reader = new InputStreamReader(inputStream, "UTF-8")) { + xml = CharStreams.toString(reader); + return parseXMLString(xml); + } catch (IOException e) { + throw new FileSourceException ("Cannot read XML from InputStream", e); + } + } + + @Override + public ImportRecord getRecord(InputStream inputStream) throws FileSourceException, FileMultipleOccurencesException { + List importRecord = getRecords(inputStream); + if (importRecord == null || importRecord.isEmpty()) { + throw new FileSourceException("Cannot find (valid) record in File"); + } else if (importRecord.size() > 1) { + throw new FileMultipleOccurencesException("File contains more than one entry"); + } else { + return importRecord.get(0); + } + } + + private List parseXMLString(String xml) { + List records = new LinkedList(); + List omElements = splitToRecords(xml); + for (OMElement record : omElements) { + records.add(transformSourceRecords(record)); + } + return records; + } } diff --git a/dspace-api/src/main/java/org/dspace/importer/external/ris/service/RisImportMetadataSourceServiceImpl.java b/dspace-api/src/main/java/org/dspace/importer/external/ris/service/RisImportMetadataSourceServiceImpl.java new file mode 100644 index 0000000000..2574e187df --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/importer/external/ris/service/RisImportMetadataSourceServiceImpl.java @@ -0,0 +1,141 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.importer.external.ris.service; + +import java.io.BufferedReader; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.util.ArrayList; +import java.util.Iterator; +import java.util.LinkedList; +import java.util.List; +import java.util.Map; +import java.util.regex.Matcher; +import java.util.regex.Pattern; +import javax.annotation.Resource; + +import org.dspace.importer.external.exception.FileSourceException; +import org.dspace.importer.external.service.components.AbstractPlainMetadataSource; +import org.dspace.importer.external.service.components.dto.PlainMetadataKeyValueItem; +import org.dspace.importer.external.service.components.dto.PlainMetadataSourceDto; + +/** + * Implements a metadata importer for RIS files + * Implementations insprider by BTE DataLoader {@link https://github.com/EKT/Biblio-Transformation-Engine/blob/master/bte-io/src/main/java/gr/ekt/bteio/loaders/RISDataLoader.java} + * + * @author Pasquale Cavallo (pasquale.cavallo at 4science dot it) + */ +public class RisImportMetadataSourceServiceImpl extends AbstractPlainMetadataSource { + + @Override + public String getImportSource() { + return "RISMetadataSource"; + } + + @Override + protected List readData(InputStream inputStream) throws FileSourceException { + return aggregateData(inputStream); + } + + /** + * This method map the data present in the inputStream, then return a list PlainMetadataSourceDto. + * Any PlainMetadataSourceDto will be used to create a single {@link org.dspace.importer.external.datamodel.ImportRecord} + * + * @see org.dspace.importer.external.service.components.AbstractPlainMetadataSource + * + * @param inputStream the inputStream of the RIS file + * @return List of {@link org.dspace.importer.external.service.components.dto.PlainMetadataSourceDto} + * @throws FileSourceException + */ + private List aggregateData(InputStream inputStream) throws FileSourceException { + List metadata = new ArrayList<>(); + //map any line of the field to a key/value pair + List notAggregatedItems = notAggregatedData(inputStream); + List aggregatedTmpList = null; + Iterator itr = notAggregatedItems.iterator(); + // iterate over the list of key/value items + // create a new PlainMetadataSourceDto (which map and ImportRecord) + // any times the key is "TY" (content separator in RIS) + while (itr.hasNext()) { + PlainMetadataKeyValueItem item = itr.next(); + if ("TY".equals(item.getKey())) { + if (aggregatedTmpList != null) { + PlainMetadataSourceDto dto = new PlainMetadataSourceDto(); + dto.setMetadata(new ArrayList<>(aggregatedTmpList)); + metadata.add(dto); + } + aggregatedTmpList = new ArrayList<>(); + aggregatedTmpList.add(item); + } else { + if (aggregatedTmpList != null) { + aggregatedTmpList.add(item); + // save last iteration metadata + if (!itr.hasNext()) { + PlainMetadataSourceDto dto = new PlainMetadataSourceDto(); + dto.setMetadata(new ArrayList<>(aggregatedTmpList)); + metadata.add(dto); + } + } + } + } + return metadata; + } + + /** + * This method transform any row of the RIS file into a PlainMetadataKeyValueItem, + * splitting the row sequentially through a RegExp without take care of the means of the data. + * In this way, all entries present in the file are mapped in the resulting list. + * + * @param inputStream the inputStrem of the file + * @return A list + * @throws FileSourceException + */ + private List notAggregatedData(InputStream inputStream) throws FileSourceException { + LinkedList items = new LinkedList<>(); + BufferedReader reader; + try { + reader = new BufferedReader(new InputStreamReader(inputStream, "UTF-8")); + String line; + while ((line = reader.readLine()) != null) { + if (line.isEmpty() || line.equals("") || line.matches("^\\s*$")) { + continue; + } + //match valid RIS entry + Pattern risPattern = Pattern.compile("^([A-Z][A-Z0-9]) - (.*)$"); + Matcher risMatcher = risPattern.matcher(line); + if (risMatcher.matches()) { + PlainMetadataKeyValueItem keyValueItem = new PlainMetadataKeyValueItem(); + keyValueItem.setValue(risMatcher.group(2)); + keyValueItem.setKey(risMatcher.group(1)); + items.add(keyValueItem); + } else { + if (!items.isEmpty()) { + items.getLast().setValue(items.getLast().getValue().concat(line)); + } + } + } + } catch (Exception e) { + throw new FileSourceException("Cannot parse RIS file", e); + } + return items; + } + + /** + * Retrieve the MetadataFieldMapping containing the mapping between RecordType + * (in this case PlainMetadataSourceDto.class) and Metadata + * + * @return The configured MetadataFieldMapping + */ + @Override + @SuppressWarnings("unchecked") + @Resource(name = "risMetadataFieldMap") + public void setMetadataFieldMap(@SuppressWarnings("rawtypes") Map metadataFieldMap) { + super.setMetadataFieldMap(metadataFieldMap); + } + +} diff --git a/dspace-api/src/main/java/org/dspace/importer/external/service/AbstractImportMetadataSourceService.java b/dspace-api/src/main/java/org/dspace/importer/external/service/AbstractImportMetadataSourceService.java index a803958a9d..3bf76438cd 100644 --- a/dspace-api/src/main/java/org/dspace/importer/external/service/AbstractImportMetadataSourceService.java +++ b/dspace-api/src/main/java/org/dspace/importer/external/service/AbstractImportMetadataSourceService.java @@ -16,7 +16,6 @@ import org.dspace.importer.external.metadatamapping.contributor.MetadataContribu import org.dspace.importer.external.metadatamapping.transform.GenerateQueryService; import org.dspace.importer.external.service.components.AbstractRemoteMetadataSource; import org.dspace.importer.external.service.components.MetadataSource; -import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Required; /** @@ -49,7 +48,6 @@ public abstract class AbstractImportMetadataSourceService extends Ab * * @param generateQueryForItem the query generator to be used. */ - @Autowired public void setGenerateQueryForItem(GenerateQueryService generateQueryForItem) { this.generateQueryForItem = generateQueryForItem; } diff --git a/dspace-api/src/main/java/org/dspace/importer/external/service/ImportService.java b/dspace-api/src/main/java/org/dspace/importer/external/service/ImportService.java index 87c2bd0029..815a10b5a7 100644 --- a/dspace-api/src/main/java/org/dspace/importer/external/service/ImportService.java +++ b/dspace-api/src/main/java/org/dspace/importer/external/service/ImportService.java @@ -8,6 +8,10 @@ package org.dspace.importer.external.service; +import java.io.File; +import java.io.FileInputStream; +import java.io.IOException; +import java.io.InputStream; import java.util.Collection; import java.util.Collections; import java.util.HashMap; @@ -19,11 +23,16 @@ import org.apache.logging.log4j.Logger; import org.dspace.content.Item; import org.dspace.importer.external.datamodel.ImportRecord; import org.dspace.importer.external.datamodel.Query; +import org.dspace.importer.external.exception.FileMultipleOccurencesException; +import org.dspace.importer.external.exception.FileSourceException; import org.dspace.importer.external.exception.MetadataSourceException; import org.dspace.importer.external.service.components.Destroyable; +import org.dspace.importer.external.service.components.FileSource; import org.dspace.importer.external.service.components.MetadataSource; +import org.dspace.importer.external.service.components.QuerySource; import org.springframework.beans.factory.annotation.Autowired; + /** * Main entry point for the import framework. * Instead of calling the different importer implementations, the ImportService should be called instead. @@ -32,8 +41,10 @@ import org.springframework.beans.factory.annotation.Autowired; * importer implementation you want to use. * * @author Roeland Dillen (roeland at atmire dot com) + * @author Pasquale Cavallo (pasquale.cavallo@4science.it) */ public class ImportService implements Destroyable { + private HashMap importSources = new HashMap<>(); Logger log = org.apache.logging.log4j.LogManager.getLogger(ImportService.class); @@ -101,11 +112,11 @@ public class ImportService implements Destroyable { public Collection findMatchingRecords(String uri, Item item) throws MetadataSourceException { try { List recordList = new LinkedList(); - for (MetadataSource metadataSource : matchingImports(uri)) { - recordList.addAll(metadataSource.findMatchingRecords(item)); + if (metadataSource instanceof QuerySource) { + recordList.addAll(((QuerySource)metadataSource).findMatchingRecords(item)); + } } - return recordList; } catch (Exception e) { throw new MetadataSourceException(e); @@ -125,9 +136,10 @@ public class ImportService implements Destroyable { try { List recordList = new LinkedList(); for (MetadataSource metadataSource : matchingImports(uri)) { - recordList.addAll(metadataSource.findMatchingRecords(query)); + if (metadataSource instanceof QuerySource) { + recordList.addAll(((QuerySource)metadataSource).findMatchingRecords(query)); + } } - return recordList; } catch (Exception e) { throw new MetadataSourceException(e); @@ -145,8 +157,10 @@ public class ImportService implements Destroyable { public int getNbRecords(String uri, String query) throws MetadataSourceException { try { int total = 0; - for (MetadataSource MetadataSource : matchingImports(uri)) { - total += MetadataSource.getNbRecords(query); + for (MetadataSource metadataSource : matchingImports(uri)) { + if (metadataSource instanceof QuerySource) { + total += ((QuerySource)metadataSource).getRecordsCount(query); + } } return total; } catch (Exception e) { @@ -165,8 +179,10 @@ public class ImportService implements Destroyable { public int getNbRecords(String uri, Query query) throws MetadataSourceException { try { int total = 0; - for (MetadataSource MetadataSource : matchingImports(uri)) { - total += MetadataSource.getNbRecords(query); + for (MetadataSource metadataSource : matchingImports(uri)) { + if (metadataSource instanceof QuerySource) { + total += ((QuerySource)metadataSource).getRecordsCount(query); + } } return total; } catch (Exception e) { @@ -189,7 +205,9 @@ public class ImportService implements Destroyable { try { List recordList = new LinkedList<>(); for (MetadataSource metadataSource : matchingImports(uri)) { - recordList.addAll(metadataSource.getRecords(query, start, count)); + if (metadataSource instanceof QuerySource) { + recordList.addAll(((QuerySource)metadataSource).getRecords(query, start, count)); + } } return recordList; } catch (Exception e) { @@ -209,7 +227,9 @@ public class ImportService implements Destroyable { try { List recordList = new LinkedList<>(); for (MetadataSource metadataSource : matchingImports(uri)) { - recordList.addAll(metadataSource.getRecords(query)); + if (metadataSource instanceof QuerySource) { + recordList.addAll(((QuerySource)metadataSource).getRecords(query)); + } } return recordList; } catch (Exception e) { @@ -229,10 +249,12 @@ public class ImportService implements Destroyable { public ImportRecord getRecord(String uri, String id) throws MetadataSourceException { try { for (MetadataSource metadataSource : matchingImports(uri)) { - if (metadataSource.getRecord(id) != null) { - return metadataSource.getRecord(id); + if (metadataSource instanceof QuerySource) { + QuerySource querySource = (QuerySource)metadataSource; + if (querySource.getRecord(id) != null) { + return querySource.getRecord(id); + } } - } return null; } catch (Exception e) { @@ -252,10 +274,12 @@ public class ImportService implements Destroyable { public ImportRecord getRecord(String uri, Query query) throws MetadataSourceException { try { for (MetadataSource metadataSource : matchingImports(uri)) { - if (metadataSource.getRecord(query) != null) { - return metadataSource.getRecord(query); + if (metadataSource instanceof QuerySource) { + QuerySource querySource = (QuerySource)metadataSource; + if (querySource.getRecord(query) != null) { + return querySource.getRecord(query); + } } - } return null; } catch (Exception e) { @@ -272,6 +296,41 @@ public class ImportService implements Destroyable { return importSources.keySet(); } + /* + * Get a collection of record from File, + * The first match will be return. + * + * @param file The file from which will read records + * @param originalName The original file name or full path + * @return a single record contains the metadatum + * @throws FileMultipleOccurencesException if more than one entry is found + */ + public ImportRecord getRecord(File file, String originalName) + throws FileMultipleOccurencesException, FileSourceException { + ImportRecord importRecords = null; + for (MetadataSource metadataSource : importSources.values()) { + try (InputStream fileInputStream = new FileInputStream(file)) { + if (metadataSource instanceof FileSource) { + FileSource fileSource = (FileSource)metadataSource; + if (fileSource.isValidSourceForFile(originalName)) { + importRecords = fileSource.getRecord(fileInputStream); + break; + } + } + //catch statements is required because we could have supported format (i.e. XML) + //which fail on schema validation + } catch (FileSourceException e) { + log.debug(metadataSource.getImportSource() + " isn't a valid parser for file"); + } catch (FileMultipleOccurencesException e) { + log.debug("File contains multiple metadata, return with error"); + throw e; + } catch (IOException e1) { + throw new FileSourceException("File cannot be read, may be null"); + } + } + return importRecords; + } + /** * Call destroy on all {@link Destroyable} {@link MetadataSource} objects set in this ImportService */ diff --git a/dspace-api/src/main/java/org/dspace/importer/external/service/components/AbstractPlainMetadataSource.java b/dspace-api/src/main/java/org/dspace/importer/external/service/components/AbstractPlainMetadataSource.java new file mode 100644 index 0000000000..019cf33177 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/importer/external/service/components/AbstractPlainMetadataSource.java @@ -0,0 +1,103 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ + +package org.dspace.importer.external.service.components; + +import java.io.InputStream; +import java.util.ArrayList; +import java.util.List; + +import org.dspace.importer.external.datamodel.ImportRecord; +import org.dspace.importer.external.exception.FileMultipleOccurencesException; +import org.dspace.importer.external.exception.FileSourceException; +import org.dspace.importer.external.metadatamapping.AbstractMetadataFieldMapping; +import org.dspace.importer.external.metadatamapping.MetadatumDTO; +import org.dspace.importer.external.service.components.dto.PlainMetadataSourceDto; + + +/** + * This class is an abstract implementation of {@link MetadataSource} useful in cases + * of plain metadata sources. + * It provides the methot to mapping metadata to DSpace Format when source is a file + * whit a list of strings. + * + * @author Pasquale Cavallo (pasquale.cavallo at 4science dot it) + */ + +public abstract class AbstractPlainMetadataSource + extends AbstractMetadataFieldMapping + implements FileSource { + + protected abstract List + readData(InputStream fileInpuStream) throws FileSourceException; + + + private List supportedExtensions; + + /** + * Set the file extensions supported by this metadata service + * + * @param supportedExtensionsthe file extensions (xml,txt,...) supported by this service + */ + public void setSupportedExtensions(List supportedExtensions) { + this.supportedExtensions = supportedExtensions; + } + + @Override + public List getSupportedExtensions() { + return supportedExtensions; + } + + /** + * Return a list of ImportRecord constructed from input file. This list is based on + * the results retrieved from the file (InputStream) parsed through abstract method readData + * + * @param InputStream The inputStream of the file + * @return A list of {@link ImportRecord} + * @throws FileSourceException if, for any reason, the file is not parsable + */ + @Override + public List getRecords(InputStream is) throws FileSourceException { + List datas = readData(is); + List records = new ArrayList<>(); + for (PlainMetadataSourceDto item : datas) { + records.add(toRecord(item)); + } + return records; + } + + /** + * Return an ImportRecord constructed from input file. This list is based on + * the result retrieved from the file (InputStream) parsed through abstract method + * "readData" implementation + * + * @param InputStream The inputStream of the file + * @return An {@link ImportRecord} matching the file content + * @throws FileSourceException if, for any reason, the file is not parsable + * @throws FileMultipleOccurencesException if the file contains more than one entry + */ + @Override + public ImportRecord getRecord(InputStream is) throws FileSourceException, FileMultipleOccurencesException { + List datas = readData(is); + if (datas == null || datas.isEmpty()) { + throw new FileSourceException("File is empty"); + } + if (datas.size() > 1) { + throw new FileMultipleOccurencesException("File " + + "contains more than one entry (" + datas.size() + " entries"); + } + return toRecord(datas.get(0)); + } + + + private ImportRecord toRecord(PlainMetadataSourceDto entry) { + List metadata = new ArrayList<>(); + metadata.addAll(resultToDCValueMapping(entry)); + return new ImportRecord(metadata); + } +} diff --git a/dspace-api/src/main/java/org/dspace/importer/external/service/components/FileSource.java b/dspace-api/src/main/java/org/dspace/importer/external/service/components/FileSource.java new file mode 100644 index 0000000000..5bef0984df --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/importer/external/service/components/FileSource.java @@ -0,0 +1,70 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ + +package org.dspace.importer.external.service.components; + +import java.io.InputStream; +import java.util.List; + +import org.dspace.importer.external.datamodel.ImportRecord; +import org.dspace.importer.external.exception.FileMultipleOccurencesException; +import org.dspace.importer.external.exception.FileSourceException; + +/** + * This interface declare the base methods to work with files containing metadata. + * + * @author Pasquale Cavallo (pasquale.cavallo at 4science dot it) + */ +public interface FileSource extends MetadataSource { + + /** + * Get the file extensions (xml, csv, txt, ...) supported by the FileSource + */ + public List getSupportedExtensions(); + + /** + * Return a list of ImportRecord constructed from input file. + * + * @param InputStream The inputStream of the file + * @return A list of {@link ImportRecord} + * @throws FileSourceException if, for any reason, the file is not parsable + */ + public List getRecords(InputStream inputStream) + throws FileSourceException; + + /** + * Return an ImportRecord constructed from input file. + * + * @param InputStream The inputStream of the file + * @return An {@link ImportRecord} matching the file content + * @throws FileSourceException if, for any reason, the file is not parsable + * @throws FileMultipleOccurencesException if the file contains more than one entry + */ + public ImportRecord getRecord(InputStream inputStream) + throws FileSourceException, FileMultipleOccurencesException; + + /** + * This method is used to decide if the FileSource manage the file format + * + * @param originalName the file file original name + * @return true if the FileSource can parse the file, false otherwise + */ + public default boolean isValidSourceForFile(String originalName) { + List extensions = getSupportedExtensions(); + if (extensions == null || extensions.isEmpty()) { + return false; + } + if (originalName != null && originalName.contains(".")) { + String extension = originalName.substring(originalName.lastIndexOf('.') + 1, + originalName.length()); + return getSupportedExtensions().contains(extension); + } + return false; + } + +} diff --git a/dspace-api/src/main/java/org/dspace/importer/external/service/components/MetadataSource.java b/dspace-api/src/main/java/org/dspace/importer/external/service/components/MetadataSource.java index 79bdcfa903..353f77b798 100644 --- a/dspace-api/src/main/java/org/dspace/importer/external/service/components/MetadataSource.java +++ b/dspace-api/src/main/java/org/dspace/importer/external/service/components/MetadataSource.java @@ -8,76 +8,14 @@ package org.dspace.importer.external.service.components; -import java.util.Collection; - -import org.dspace.content.Item; -import org.dspace.importer.external.datamodel.ImportRecord; -import org.dspace.importer.external.datamodel.Query; -import org.dspace.importer.external.exception.MetadataSourceException; - /** - * Common interface for all import implementations. + * Super interface for all import implementations. * * @author Roeland Dillen (roeland at atmire dot com) + * @author Pasquale Cavallo (pasquale.cavallo@4science.it) */ public interface MetadataSource { - /** - * Gets the number of records matching a query - * - * @param query the query in string format - * @return the number of records matching the query - * @throws MetadataSourceException if the underlying methods throw any exception. - */ - public int getNbRecords(String query) throws MetadataSourceException; - /** - * Gets the number of records matching a query - * - * @param query the query object - * @return the number of records matching the query - * @throws MetadataSourceException if the underlying methods throw any exception. - */ - public int getNbRecords(Query query) throws MetadataSourceException; - - /** - * Gets a set of records matching a query. Supports pagination - * - * @param query the query. The query will generally be posted 'as is' to the source - * @param start offset - * @param count page size - * @return a collection of fully transformed id's - * @throws MetadataSourceException if the underlying methods throw any exception. - */ - public Collection getRecords(String query, int start, int count) throws MetadataSourceException; - - /** - * Find records based on a object query. - * - * @param query a query object to base the search on. - * @return a set of records. Fully transformed. - * @throws MetadataSourceException if the underlying methods throw any exception. - */ - public Collection getRecords(Query query) throws MetadataSourceException; - - /** - * Get a single record from the source. - * The first match will be returned - * - * @param id identifier for the record - * @return a matching record - * @throws MetadataSourceException if the underlying methods throw any exception. - */ - public ImportRecord getRecord(String id) throws MetadataSourceException; - - /** - * Get a single record from the source. - * The first match will be returned - * - * @param query a query matching a single record - * @return a matching record - * @throws MetadataSourceException if the underlying methods throw any exception. - */ - public ImportRecord getRecord(Query query) throws MetadataSourceException; /** * The string that identifies this import implementation. Preferable a URI @@ -86,23 +24,4 @@ public interface MetadataSource { */ public String getImportSource(); - /** - * Finds records based on an item - * Delegates to one or more MetadataSource implementations based on the uri. Results will be aggregated. - * - * @param item an item to base the search on - * @return a collection of import records. Only the identifier of the found records may be put in the record. - * @throws MetadataSourceException if the underlying methods throw any exception. - */ - public Collection findMatchingRecords(Item item) throws MetadataSourceException; - - /** - * Finds records based on query object. - * Delegates to one or more MetadataSource implementations based on the uri. Results will be aggregated. - * - * @param query a query object to base the search on. - * @return a collection of import records. Only the identifier of the found records may be put in the record. - * @throws MetadataSourceException passed through. - */ - public Collection findMatchingRecords(Query query) throws MetadataSourceException; } diff --git a/dspace-api/src/main/java/org/dspace/importer/external/service/components/QuerySource.java b/dspace-api/src/main/java/org/dspace/importer/external/service/components/QuerySource.java new file mode 100644 index 0000000000..bcd10cc554 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/importer/external/service/components/QuerySource.java @@ -0,0 +1,106 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ + +package org.dspace.importer.external.service.components; + +import java.util.Collection; + +import org.dspace.content.Item; +import org.dspace.importer.external.datamodel.ImportRecord; +import org.dspace.importer.external.datamodel.Query; +import org.dspace.importer.external.exception.MetadataSourceException; + + +/** + * Common interface for database-based imports. + * + * @author Roeland Dillen (roeland at atmire dot com) + * @author Pasquale Cavallo (pasquale.cavallo@4science.it) + */ + +public interface QuerySource extends MetadataSource { + + /** + * Get a single record from the source. + * The first match will be returned + * + * @param id identifier for the record + * @return a matching record + * @throws MetadataSourceException if the underlying methods throw any exception. + */ + public ImportRecord getRecord(String id) throws MetadataSourceException; + + /** + * Gets the number of records matching a query + * + * @param query the query in string format + * @return the number of records matching the query + * @throws MetadataSourceException if the underlying methods throw any exception. + */ + public int getRecordsCount(String query) throws MetadataSourceException; + + /** + * Gets the number of records matching a query + * + * @param query the query object + * @return the number of records matching the query + * @throws MetadataSourceException if the underlying methods throw any exception. + */ + public int getRecordsCount(Query query) throws MetadataSourceException; + + /** + * Gets a set of records matching a query. Supports pagination + * + * @param query the query. The query will generally be posted 'as is' to the source + * @param start offset + * @param count page size + * @return a collection of fully transformed id's + * @throws MetadataSourceException if the underlying methods throw any exception. + */ + public Collection getRecords(String query, int start, int count) throws MetadataSourceException; + + /** + * Find records based on a object query. + * + * @param query a query object to base the search on. + * @return a set of records. Fully transformed. + * @throws MetadataSourceException if the underlying methods throw any exception. + */ + public Collection getRecords(Query query) throws MetadataSourceException; + + /** + * Get a single record from the source. + * The first match will be returned + * + * @param query a query matching a single record + * @return a matching record + * @throws MetadataSourceException if the underlying methods throw any exception. + */ + public ImportRecord getRecord(Query query) throws MetadataSourceException; + + /** + * Finds records based on query object. + * Delegates to one or more MetadataSource implementations based on the uri. Results will be aggregated. + * + * @param query a query object to base the search on. + * @return a collection of import records. Only the identifier of the found records may be put in the record. + * @throws MetadataSourceException passed through. + */ + public Collection findMatchingRecords(Query query) throws MetadataSourceException; + + /** + * Finds records based on an item + * Delegates to one or more MetadataSource implementations based on the uri. Results will be aggregated. + * + * @param item an item to base the search on + * @return a collection of import records. Only the identifier of the found records may be put in the record. + * @throws MetadataSourceException if the underlying methods throw any exception. + */ + public Collection findMatchingRecords(Item item) throws MetadataSourceException; + +} diff --git a/dspace-api/src/main/java/org/dspace/importer/external/service/components/dto/PlainMetadataKeyValueItem.java b/dspace-api/src/main/java/org/dspace/importer/external/service/components/dto/PlainMetadataKeyValueItem.java new file mode 100644 index 0000000000..fa362760b9 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/importer/external/service/components/dto/PlainMetadataKeyValueItem.java @@ -0,0 +1,50 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.importer.external.service.components.dto; + +/** + * Simple object to construct items + * + * @author Pasquale Cavallo (pasquale.cavallo at 4science dot it) + */ +public class PlainMetadataKeyValueItem { + + private String key; + private String value; + + /* + * In a key-value items, like PlainMetadata, this method get the item's key + */ + public String getKey() { + return key; + } + + /* + * In a key-value items, like PlainMetadata, this method set the item's key. + * Never set or leave this field to null + * + */ + public void setKey(String key) { + this.key = key; + } + + /* + * In key-value items, like PlainMetadata, this method get the item's value + */ + public String getValue() { + return value; + } + + /* + * In key-value items, like PlainMetadata, this method set the item's value + */ + public void setValue(String value) { + this.value = value; + } + +} diff --git a/dspace-api/src/main/java/org/dspace/importer/external/service/components/dto/PlainMetadataSourceDto.java b/dspace-api/src/main/java/org/dspace/importer/external/service/components/dto/PlainMetadataSourceDto.java new file mode 100644 index 0000000000..041823b027 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/importer/external/service/components/dto/PlainMetadataSourceDto.java @@ -0,0 +1,38 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.importer.external.service.components.dto; + +import java.util.List; + + +/** + * Simple object used to construct a list of items. + * This type is used in file plain metadata import as RecordType. + * + * @author Pasquale Cavallo (pasquale.cavallo at 4science dot it) + */ + +public class PlainMetadataSourceDto { + + private List metadata; + + /* + * Method used to get the Metadata list + */ + public List getMetadata() { + return metadata; + } + + /* + * Method used to set the metadata list + */ + public void setMetadata(List metadata) { + this.metadata = metadata; + } + +} diff --git a/dspace-api/src/main/java/org/dspace/license/CCLicense.java b/dspace-api/src/main/java/org/dspace/license/CCLicense.java index b015e3a9d3..d5d9fe14a2 100644 --- a/dspace-api/src/main/java/org/dspace/license/CCLicense.java +++ b/dspace-api/src/main/java/org/dspace/license/CCLicense.java @@ -8,6 +8,8 @@ package org.dspace.license; +import java.util.List; + /** * @author wbossons */ @@ -15,17 +17,17 @@ public class CCLicense { private String licenseName; private String licenseId; - private int order = 0; + private List ccLicenseFieldList; public CCLicense() { super(); } - public CCLicense(String licenseId, String licenseName, int order) { + public CCLicense(String licenseId, String licenseName, List ccLicenseFieldList) { super(); this.licenseId = licenseId; this.licenseName = licenseName; - this.order = order; + this.ccLicenseFieldList = ccLicenseFieldList; } public String getLicenseName() { @@ -44,13 +46,19 @@ public class CCLicense { this.licenseId = licenseId; } - public int getOrder() { - return this.order; + /** + * Gets the list of CC License Fields + * @return the list of CC License Fields + */ + public List getCcLicenseFieldList() { + return ccLicenseFieldList; } - public void setOrder(int order) { - this.order = order; + /** + * Sets the list of CC License Fields + * @param ccLicenseFieldList + */ + public void setCcLicenseFieldList(final List ccLicenseFieldList) { + this.ccLicenseFieldList = ccLicenseFieldList; } - - } diff --git a/dspace-api/src/main/java/org/dspace/license/CCLicenseConnectorService.java b/dspace-api/src/main/java/org/dspace/license/CCLicenseConnectorService.java new file mode 100644 index 0000000000..0c061d2d64 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/license/CCLicenseConnectorService.java @@ -0,0 +1,60 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.license; + +import java.io.IOException; +import java.util.Map; + +import org.jdom.Document; + +/** + * Service interface class for the Creative commons license connector service. + * The implementation of this class is responsible for all the calls to the CC license API and parsing the response + * The service is autowired by spring + */ +public interface CCLicenseConnectorService { + + /** + * Retrieves the CC Licenses for the provided language from the CC License API + * + * @param language - the language to retrieve the licenses for + * @return a map of licenses with the id and the license for the provided language + */ + public Map retrieveLicenses(String language); + + /** + * Retrieve the CC License URI based on the provided license id, language and answers to the field questions from + * the CC License API + * + * @param licenseId - the ID of the license + * @param language - the language for which to retrieve the full answerMap + * @param answerMap - the answers to the different field questions + * @return the CC License URI + */ + public String retrieveRightsByQuestion(String licenseId, + String language, + Map answerMap); + + /** + * Retrieve the license RDF document based on the license URI + * + * @param licenseURI - The license URI for which to retrieve the license RDF document + * @return the license RDF document + * @throws IOException + */ + public Document retrieveLicenseRDFDoc(String licenseURI) throws IOException; + + /** + * Retrieve the license Name from the license document + * + * @param doc - The license document from which to retrieve the license name + * @return the license name + */ + public String retrieveLicenseName(final Document doc); + +} diff --git a/dspace-api/src/main/java/org/dspace/license/CCLicenseConnectorServiceImpl.java b/dspace-api/src/main/java/org/dspace/license/CCLicenseConnectorServiceImpl.java new file mode 100644 index 0000000000..792c25d629 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/license/CCLicenseConnectorServiceImpl.java @@ -0,0 +1,375 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.license; + +import java.io.IOException; +import java.io.InputStream; +import java.io.StringReader; +import java.net.MalformedURLException; +import java.net.URL; +import java.net.URLConnection; +import java.text.MessageFormat; +import java.util.Collections; +import java.util.HashMap; +import java.util.LinkedList; +import java.util.List; +import java.util.Map; + +import org.apache.commons.lang3.ArrayUtils; +import org.apache.commons.lang3.StringUtils; +import org.apache.http.HttpEntity; +import org.apache.http.client.methods.CloseableHttpResponse; +import org.apache.http.client.methods.HttpGet; +import org.apache.http.client.methods.HttpPost; +import org.apache.http.entity.mime.MultipartEntityBuilder; +import org.apache.http.impl.client.CloseableHttpClient; +import org.apache.http.impl.client.HttpClientBuilder; +import org.apache.http.util.EntityUtils; +import org.apache.logging.log4j.Logger; +import org.dspace.services.ConfigurationService; +import org.jaxen.JaxenException; +import org.jaxen.jdom.JDOMXPath; +import org.jdom.Attribute; +import org.jdom.Document; +import org.jdom.Element; +import org.jdom.JDOMException; +import org.jdom.input.SAXBuilder; +import org.springframework.beans.factory.InitializingBean; +import org.springframework.beans.factory.annotation.Autowired; +import org.xml.sax.InputSource; + +/** + * Implementation for the Creative commons license connector service. + * This class is responsible for all the calls to the CC license API and parsing the response + */ +public class CCLicenseConnectorServiceImpl implements CCLicenseConnectorService, InitializingBean { + + private Logger log = org.apache.logging.log4j.LogManager.getLogger(CCLicenseConnectorServiceImpl.class); + + private CloseableHttpClient client; + protected SAXBuilder parser = new SAXBuilder(); + + private String postArgument = "answers"; + private String postAnswerFormat = + " " + + "{1}" + + "" + + "{2}" + + "" + + ""; + + + @Autowired + private ConfigurationService configurationService; + + @Override + public void afterPropertiesSet() throws Exception { + HttpClientBuilder builder = HttpClientBuilder.create(); + + client = builder + .disableAutomaticRetries() + .setMaxConnTotal(5) + .build(); + + // disallow DTD parsing to ensure no XXE attacks can occur. + // See https://cheatsheetseries.owasp.org/cheatsheets/XML_External_Entity_Prevention_Cheat_Sheet.html + parser.setFeature("http://apache.org/xml/features/disallow-doctype-decl", true); + } + + /** + * Retrieves the CC Licenses for the provided language from the CC License API + * + * @param language - the language to retrieve the licenses for + * @return a map of licenses with the id and the license for the provided language + */ + public Map retrieveLicenses(String language) { + String ccLicenseUrl = configurationService.getProperty("cc.api.rooturl"); + + String uri = ccLicenseUrl + "/?locale=" + language; + HttpGet httpGet = new HttpGet(uri); + + List licenses; + try (CloseableHttpResponse response = client.execute(httpGet)) { + licenses = retrieveLicenses(response); + } catch (JDOMException | JaxenException | IOException e) { + log.error("Error while retrieving the license details using url: " + uri, e); + licenses = Collections.emptyList(); + } + + Map ccLicenses = new HashMap<>(); + + for (String license : licenses) { + + String licenseUri = ccLicenseUrl + "/license/" + license; + HttpGet licenseHttpGet = new HttpGet(licenseUri); + try (CloseableHttpResponse response = client.execute(licenseHttpGet)) { + CCLicense ccLicense = retrieveLicenseObject(license, response); + ccLicenses.put(ccLicense.getLicenseId(), ccLicense); + } catch (JaxenException | JDOMException | IOException e) { + log.error("Error while retrieving the license details using url: " + licenseUri, e); + } + } + + return ccLicenses; + } + + /** + * Retrieve the list of licenses from the response from the CC License API and remove the licenses configured + * to be excluded + * + * @param response The response from the API + * @return a list of license identifiers for which details need to be retrieved + * @throws IOException + * @throws JaxenException + * @throws JDOMException + */ + private List retrieveLicenses(CloseableHttpResponse response) + throws IOException, JaxenException, JDOMException { + + List domains = new LinkedList<>(); + String[] excludedLicenses = configurationService.getArrayProperty("cc.license.classfilter"); + + + String responseString = EntityUtils.toString(response.getEntity()); + JDOMXPath licenseClassXpath = new JDOMXPath("//licenses/license"); + + + try (StringReader stringReader = new StringReader(responseString)) { + InputSource is = new InputSource(stringReader); + org.jdom.Document classDoc = this.parser.build(is); + + List elements = licenseClassXpath.selectNodes(classDoc); + for (Element element : elements) { + String licenseId = getSingleNodeValue(element, "@id"); + if (StringUtils.isNotBlank(licenseId) && !ArrayUtils.contains(excludedLicenses, licenseId)) { + domains.add(licenseId); + } + } + } + + return domains; + + } + + /** + * Parse the response for a single CC License and return the corresponding CC License Object + * + * @param licenseId the license id of the CC License to retrieve + * @param response for a specific CC License response + * @return the corresponding CC License Object + * @throws IOException + * @throws JaxenException + * @throws JDOMException + */ + private CCLicense retrieveLicenseObject(final String licenseId, CloseableHttpResponse response) + throws IOException, JaxenException, JDOMException { + + String responseString = EntityUtils.toString(response.getEntity()); + + + JDOMXPath licenseClassXpath = new JDOMXPath("//licenseclass"); + JDOMXPath licenseFieldXpath = new JDOMXPath("field"); + + + try (StringReader stringReader = new StringReader(responseString)) { + InputSource is = new InputSource(stringReader); + + org.jdom.Document classDoc = this.parser.build(is); + + Object element = licenseClassXpath.selectSingleNode(classDoc); + String licenseLabel = getSingleNodeValue(element, "label"); + + List ccLicenseFields = new LinkedList<>(); + + List licenseFields = licenseFieldXpath.selectNodes(element); + for (Element licenseField : licenseFields) { + CCLicenseField ccLicenseField = parseLicenseField(licenseField); + ccLicenseFields.add(ccLicenseField); + } + + return new CCLicense(licenseId, licenseLabel, ccLicenseFields); + } + } + + private CCLicenseField parseLicenseField(final Element licenseField) throws JaxenException { + String id = getSingleNodeValue(licenseField, "@id"); + String label = getSingleNodeValue(licenseField, "label"); + String description = getSingleNodeValue(licenseField, "description"); + + JDOMXPath enumXpath = new JDOMXPath("enum"); + List enums = enumXpath.selectNodes(licenseField); + + List ccLicenseFieldEnumList = new LinkedList<>(); + + for (Element enumElement : enums) { + CCLicenseFieldEnum ccLicenseFieldEnum = parseEnum(enumElement); + ccLicenseFieldEnumList.add(ccLicenseFieldEnum); + } + + return new CCLicenseField(id, label, description, ccLicenseFieldEnumList); + + } + + private CCLicenseFieldEnum parseEnum(final Element enumElement) throws JaxenException { + String id = getSingleNodeValue(enumElement, "@id"); + String label = getSingleNodeValue(enumElement, "label"); + String description = getSingleNodeValue(enumElement, "description"); + + return new CCLicenseFieldEnum(id, label, description); + } + + + private String getNodeValue(final Object el) { + if (el instanceof Element) { + return ((Element) el).getValue(); + } else if (el instanceof Attribute) { + return ((Attribute) el).getValue(); + } else if (el instanceof String) { + return (String) el; + } else { + return null; + } + } + + private String getSingleNodeValue(final Object t, String query) throws JaxenException { + JDOMXPath xpath = new JDOMXPath(query); + Object singleNode = xpath.selectSingleNode(t); + + return getNodeValue(singleNode); + } + + /** + * Retrieve the CC License URI based on the provided license id, language and answers to the field questions from + * the CC License API + * + * @param licenseId - the ID of the license + * @param language - the language for which to retrieve the full answerMap + * @param answerMap - the answers to the different field questions + * @return the CC License URI + */ + public String retrieveRightsByQuestion(String licenseId, + String language, + Map answerMap) { + + String ccLicenseUrl = configurationService.getProperty("cc.api.rooturl"); + + + HttpPost httpPost = new HttpPost(ccLicenseUrl + "/license/" + licenseId + "/issue"); + + + String answers = createAnswerString(answerMap); + MultipartEntityBuilder builder = MultipartEntityBuilder.create(); + String text = MessageFormat.format(postAnswerFormat, licenseId, language, answers); + builder.addTextBody(postArgument, text); + + HttpEntity multipart = builder.build(); + + httpPost.setEntity(multipart); + + try (CloseableHttpResponse response = client.execute(httpPost)) { + return retrieveLicenseUri(response); + } catch (JDOMException | JaxenException | IOException e) { + log.error("Error while retrieving the license uri for license : " + licenseId + " with answers " + + answerMap.toString(), e); + } + return null; + } + + /** + * Parse the response for the CC License URI request and return the corresponding CC License URI + * + * @param response for a specific CC License URI response + * @return the corresponding CC License URI as a string + * @throws IOException + * @throws JaxenException + * @throws JDOMException + */ + private String retrieveLicenseUri(final CloseableHttpResponse response) + throws IOException, JaxenException, JDOMException { + + String responseString = EntityUtils.toString(response.getEntity()); + JDOMXPath licenseClassXpath = new JDOMXPath("//result/license-uri"); + + + try (StringReader stringReader = new StringReader(responseString)) { + InputSource is = new InputSource(stringReader); + org.jdom.Document classDoc = this.parser.build(is); + + Object node = licenseClassXpath.selectSingleNode(classDoc); + String nodeValue = getNodeValue(node); + + if (StringUtils.isNotBlank(nodeValue)) { + return nodeValue; + } + } + return null; + } + + private String createAnswerString(final Map parameterMap) { + StringBuilder sb = new StringBuilder(); + for (String key : parameterMap.keySet()) { + sb.append("<"); + sb.append(key); + sb.append(">"); + sb.append(parameterMap.get(key)); + sb.append(""); + } + return sb.toString(); + } + + /** + * Retrieve the license RDF document based on the license URI + * + * @param licenseURI - The license URI for which to retrieve the license RDF document + * @return the license RDF document + * @throws IOException + */ + @Override + public Document retrieveLicenseRDFDoc(String licenseURI) throws IOException { + String ccLicenseUrl = configurationService.getProperty("cc.api.rooturl"); + + String issueUrl = ccLicenseUrl + "/details?license-uri=" + licenseURI; + + URL request_url; + try { + request_url = new URL(issueUrl); + } catch (MalformedURLException e) { + return null; + } + URLConnection connection = request_url.openConnection(); + connection.setDoOutput(true); + try { + // parsing document from input stream + InputStream stream = connection.getInputStream(); + Document doc = parser.build(stream); + return doc; + + } catch (Exception e) { + log.error("Error while retrieving the license document for URI: " + licenseURI, e); + } + return null; + } + + /** + * Retrieve the license Name from the license document + * + * @param doc - The license document from which to retrieve the license name + * @return the license name + */ + public String retrieveLicenseName(final Document doc) { + try { + return getSingleNodeValue(doc, "//result/license-name"); + } catch (JaxenException e) { + log.error("Error while retrieving the license name from the license document", e); + } + return null; + } + +} diff --git a/dspace-api/src/main/java/org/dspace/license/CCLicenseField.java b/dspace-api/src/main/java/org/dspace/license/CCLicenseField.java index 6360249f65..8fb6de5478 100644 --- a/dspace-api/src/main/java/org/dspace/license/CCLicenseField.java +++ b/dspace-api/src/main/java/org/dspace/license/CCLicenseField.java @@ -7,8 +7,7 @@ */ package org.dspace.license; -import java.util.HashMap; -import java.util.Map; +import java.util.List; /** * Wrapper class for representation of a license field declaration. @@ -22,7 +21,7 @@ public class CCLicenseField { private String description = ""; private String type = ""; - private HashMap fieldEnum = null; + private List fieldEnum = null; /** * Construct a new LicenseField class. Note that after construction, @@ -31,13 +30,11 @@ public class CCLicenseField { * @param id The unique identifier for this field; this value will be used in constructing the answers XML. * @param label The label to use when generating the user interface. */ - public CCLicenseField(String id, String label) { - super(); - - this.fieldEnum = new HashMap(); - + public CCLicenseField(String id, String label, String description, List fieldEnum) { this.id = id; this.label = label; + this.description = description; + this.fieldEnum = fieldEnum; } /** @@ -90,16 +87,12 @@ public class CCLicenseField { } /** - * @return Returns an instance implementing the Map interface; - * the instance contains a mapping from identifiers to - * labels for the enumeration values. - * @see Map + * Returns the list of enums of this field + * @return the list of enums of this field */ - public Map getEnum() { - return this.fieldEnum; + public List getFieldEnum() { + return fieldEnum; } - - } diff --git a/dspace-api/src/main/java/org/dspace/license/CCLicenseFieldEnum.java b/dspace-api/src/main/java/org/dspace/license/CCLicenseFieldEnum.java new file mode 100644 index 0000000000..628fcb8354 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/license/CCLicenseFieldEnum.java @@ -0,0 +1,82 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.license; + +import org.apache.commons.lang3.StringUtils; + +/** + * Wrapper class for representation of a license field enum declaration. + * A field enum is a single "answer" to the field question + */ +public class CCLicenseFieldEnum { + + private String id = ""; + private String label = ""; + private String description = ""; + + public CCLicenseFieldEnum(String id, String label, String description) { + if (StringUtils.isNotBlank(id)) { + this.id = id; + } + if (StringUtils.isNotBlank(label)) { + this.label = label; + } + if (StringUtils.isNotBlank(description)) { + this.description = description; + } + + } + + /** + * Get the id of this enum + * @return the id of this enum + */ + public String getId() { + return id; + } + + /** + * Set the id of this enum + * @param id + */ + public void setId(final String id) { + this.id = id; + } + + /** + * Get the label of this enum + * @return the label of this enum + */ + public String getLabel() { + return label; + } + + /** + * Set the label of this enum + * @param label + */ + public void setLabel(final String label) { + this.label = label; + } + + /** + * Get the description of this enum + * @return the description of this enum + */ + public String getDescription() { + return description; + } + + /** + * Set the description of this enum + * @param description + */ + public void setDescription(final String description) { + this.description = description; + } +} diff --git a/dspace-api/src/main/java/org/dspace/license/CCLookup.java b/dspace-api/src/main/java/org/dspace/license/CCLookup.java deleted file mode 100644 index c86aa78301..0000000000 --- a/dspace-api/src/main/java/org/dspace/license/CCLookup.java +++ /dev/null @@ -1,435 +0,0 @@ -/** - * The contents of this file are subject to the license and copyright - * detailed in the LICENSE and NOTICE files at the root of the source - * tree and available online at - * - * http://www.dspace.org/license/ - */ -package org.dspace.license; - -import java.io.IOException; -import java.io.OutputStreamWriter; -import java.io.UnsupportedEncodingException; -import java.net.MalformedURLException; -import java.net.URL; -import java.net.URLConnection; -import java.net.URLEncoder; -import java.util.ArrayList; -import java.util.Collection; -import java.util.Iterator; -import java.util.List; -import java.util.Map; -import java.util.NoSuchElementException; - -import org.apache.logging.log4j.Logger; -import org.dspace.license.factory.LicenseServiceFactory; -import org.dspace.license.service.CreativeCommonsService; -import org.dspace.services.ConfigurationService; -import org.dspace.services.factory.DSpaceServicesFactory; -import org.jaxen.JaxenException; -import org.jaxen.jdom.JDOMXPath; -import org.jdom.Attribute; -import org.jdom.Document; -import org.jdom.Element; -import org.jdom.JDOMException; -import org.jdom.input.SAXBuilder; - - -/** - * A wrapper around Creative Commons REST web services. - * - * @author Wendy Bossons - */ -public class CCLookup { - - /** - * log4j logger - */ - private static Logger log = org.apache.logging.log4j.LogManager.getLogger(CCLookup.class); - - private String cc_root; - private String jurisdiction; - private List lcFilter = new ArrayList(); - - private Document license_doc = null; - private String rdfString = null; - private String errorMessage = null; - private boolean success = false; - - private SAXBuilder parser = new SAXBuilder(); - private List licenses = new ArrayList(); - private List licenseFields = new ArrayList(); - - protected CreativeCommonsService creativeCommonsService = LicenseServiceFactory.getInstance() - .getCreativeCommonsService(); - - /** - * Constructs a new instance with the default web services root. - */ - public CCLookup() { - super(); - - ConfigurationService configurationService = DSpaceServicesFactory.getInstance().getConfigurationService(); - - cc_root = configurationService.getProperty("cc.api.rooturl"); - - String jurisProp = configurationService.getProperty("cc.license.jurisdiction"); - jurisdiction = (jurisProp != null) ? jurisProp : ""; - - String[] filters = configurationService.getArrayProperty("cc.license.classfilter"); - if (filters != null) { - for (String name : filters) { - lcFilter.add(name.trim()); - } - } - } - - /** - * Returns the id for a particular CCLicense label. Returns an - * empty string if no match is found. - * - * @param class_label The CCLicense label to find. - * @return Returns a String containing the License class ID if the label - * is found; if not found, returns an empty string. - * @see CCLicense - */ - public String getLicenseId(String class_label) { - for (int i = 0; i < this.licenses.size(); i++) { - if (((CCLicense) this.licenses.get(i)).getLicenseName().equals(class_label)) { - return ((CCLicense) this.licenses.get(i)).getLicenseId(); - } - } - - return ""; - } - - /** - * Queries the web service for the available licenses. - * - * @param language The language to request labels and description strings in. - * @return Returns a Map of CCLicense objects. - * @see Map - * @see CCLicense - */ - public Collection getLicenses(String language) { - - // create XPath expressions - try { - JDOMXPath xp_Licenses = new JDOMXPath("//licenses/license"); - JDOMXPath xp_LicenseID = new JDOMXPath("@id"); - URL classUrl = new URL(this.cc_root + "/?locale=" + language); - Document classDoc = this.parser.build(classUrl); - // extract the identifiers and labels using XPath - List results = xp_Licenses.selectNodes(classDoc); - // populate licenses container - this.licenses.clear(); - for (int i = 0; i < results.size(); i++) { - Element license = results.get(i); - // add if not filtered - String liD = ((Attribute) xp_LicenseID.selectSingleNode(license)).getValue(); - if (!lcFilter.contains(liD)) { - this.licenses.add(new CCLicense(liD, license.getText(), i)); - } - } - } catch (JaxenException jaxen_e) { - return null; - } catch (JDOMException jdom_e) { - return null; - } catch (IOException io_e) { - return null; - } catch (Exception e) { - // do nothing... but we should - return null; - } - - return licenses; - } - - - /** - * Queries the web service for a set of licenseFields for a particular license class. - * - * @param license A String specifying the CCLicense identifier to - * retrieve fields for. - * @param language the locale string - * @return A Collection of LicenseField objects. - * @see CCLicense - */ - public Collection getLicenseFields(String license, String language) { - - JDOMXPath xp_LicenseField; - JDOMXPath xp_LicenseID; - JDOMXPath xp_FieldType; - JDOMXPath xp_Description; - JDOMXPath xp_Label; - JDOMXPath xp_Enum; - - Document fieldDoc; - - URL classUrl; - List results = null; - List enumOptions = null; - - // create XPath expressions - try { - xp_LicenseField = new JDOMXPath("//field"); - xp_LicenseID = new JDOMXPath("@id"); - xp_Description = new JDOMXPath("description"); - xp_Label = new JDOMXPath("label"); - xp_FieldType = new JDOMXPath("type"); - xp_Enum = new JDOMXPath("enum"); - - } catch (JaxenException e) { - return null; - } - - // retrieve and parse the license class document - try { - classUrl = new URL(this.cc_root + "/license/" + license + "?locale=" + language); - } catch (Exception err) { - // do nothing... but we should - return null; - } - - // parse the licenses document - try { - fieldDoc = this.parser.build(classUrl); - } catch (JDOMException e) { - return null; - } catch (IOException e) { - return null; - } - - // reset the field definition container - this.licenseFields.clear(); - - // extract the identifiers and labels using XPath - try { - results = xp_LicenseField.selectNodes(fieldDoc); - } catch (JaxenException e) { - return null; - } - - for (int i = 0; i < results.size(); i++) { - Element field = (Element) results.get(i); - - try { - // create the field object - CCLicenseField cclicensefield = new CCLicenseField( - ((Attribute) xp_LicenseID.selectSingleNode(field)).getValue(), - ((Element) xp_Label.selectSingleNode(field)).getText()); - - // extract additional properties - cclicensefield.setDescription(((Element) xp_Description.selectSingleNode(field)).getText()); - cclicensefield.setType(((Element) xp_FieldType.selectSingleNode(field)).getText()); - - enumOptions = xp_Enum.selectNodes(field); - - for (int j = 0; j < enumOptions.size(); j++) { - String id = ((Attribute) xp_LicenseID.selectSingleNode(enumOptions.get(j))).getValue(); - String label = ((Element) xp_Label.selectSingleNode(enumOptions.get(j))).getText(); - - cclicensefield.getEnum().put(id, label); - - } // for each enum option - - this.licenseFields.add(cclicensefield); - } catch (JaxenException e) { - return null; - } - } - - return licenseFields; - } // licenseFields - - /** - * Passes a set of "answers" to the web service and retrieves a license. - * - * @param licenseId The identifier of the license class being requested. - * @param answers A Map containing the answers to the license fields; - * each key is the identifier of a LicenseField, with the value - * containing the user-supplied answer. - * @param lang The language to request localized elements in. - * @throws IOException if IO error - * @see CCLicense - * @see Map - */ - public void issue(String licenseId, Map answers, String lang) - throws IOException { - - // Determine the issue URL - String issueUrl = this.cc_root + "/license/" + licenseId + "/issue"; - // Assemble the "answers" document - String answer_doc = "\n" + lang + "\n" + "\n"; - Iterator keys = answers.keySet().iterator(); - - try { - String current = (String) keys.next(); - - while (true) { - answer_doc += "<" + current + ">" + (String) answers.get(current) + "\n"; - current = (String) keys.next(); - } - - - } catch (NoSuchElementException e) { - // exception indicates we've iterated through the - // entire collection; just swallow and continue - } - // answer_doc += "\n"; FAILS with jurisdiction argument - answer_doc += "\n\n"; - String post_data; - - try { - post_data = URLEncoder.encode("answers", "UTF-8") + "=" + URLEncoder.encode(answer_doc, "UTF-8"); - } catch (UnsupportedEncodingException e) { - return; - } - - URL post_url; - try { - post_url = new URL(issueUrl); - } catch (MalformedURLException e) { - return; - } - URLConnection connection = post_url.openConnection(); - // this will not be needed after I'm done TODO: remove - connection.setDoOutput(true); - OutputStreamWriter writer = new OutputStreamWriter(connection.getOutputStream()); - writer.write(post_data); - writer.flush(); - // end TODO - try { - // parsing document from input stream - java.io.InputStream stream = connection.getInputStream(); - this.license_doc = this.parser.build(stream); - } catch (JDOMException jde) { - log.warn(jde.getMessage()); - } catch (Exception e) { - log.warn(e.getCause()); - } - return; - } // issue - - /** - * Passes a set of "answers" to the web service and retrieves a license. - * - * @param licenseURI The uri of the license. - * - * Note: does not support localization in 1.5 -- not yet - * @throws IOException if IO error - * @see CCLicense - * @see Map - */ - public void issue(String licenseURI) - throws IOException { - - // Determine the issue URL - // Example: http://api.creativecommons.org/rest/1.5/details? - // license-uri=http://creativecommons.org/licenses/by-nc-sa/3.0/ - String issueUrl = cc_root + "/details?license-uri=" + licenseURI; - - URL request_url; - try { - request_url = new URL(issueUrl); - } catch (MalformedURLException e) { - return; - } - URLConnection connection = request_url.openConnection(); - // this will not be needed after I'm done TODO: remove - connection.setDoOutput(true); - try { - // parsing document from input stream - java.io.InputStream stream = connection.getInputStream(); - license_doc = this.parser.build(stream); - } catch (JDOMException jde) { - log.warn(jde.getMessage()); - } catch (Exception e) { - log.warn(e.getCause()); - } - return; - } // issue - - /** - * Retrieves the URI for the license issued. - * - * @return A String containing the URI for the license issued. - */ - public String getLicenseUrl() { - String text = null; - try { - JDOMXPath xp_LicenseName = new JDOMXPath("//result/license-uri"); - text = ((Element) xp_LicenseName.selectSingleNode(this.license_doc)).getText(); - } catch (Exception e) { - log.warn(e.getMessage()); - setSuccess(false); - text = "An error occurred getting the license - uri."; - } finally { - return text; - } - } // getLicenseUrl - - /** - * Retrieves the human readable name for the license issued. - * - * @return A String containing the license name. - */ - public String getLicenseName() { - String text = null; - try { - JDOMXPath xp_LicenseName = new JDOMXPath("//result/license-name"); - text = ((Element) xp_LicenseName.selectSingleNode(this.license_doc)).getText(); - } catch (Exception e) { - log.warn(e.getMessage()); - setSuccess(false); - text = "An error occurred on the license name."; - } finally { - return text; - } - } // getLicenseName - - - public org.jdom.Document getLicenseDocument() { - return this.license_doc; - } - - public String getRdf() - throws IOException { - String result = ""; - try { - result = creativeCommonsService.fetchLicenseRDF(license_doc); - } catch (Exception e) { - log.warn("An error occurred getting the rdf . . ." + e.getMessage()); - setSuccess(false); - } - return result; - } - - public boolean isSuccess() { - setSuccess(false); - JDOMXPath xp_Success; - String text = null; - try { - xp_Success = new JDOMXPath("//message"); - text = ((Element) xp_Success.selectSingleNode(this.license_doc)).getText(); - setErrorMessage(text); - } catch (Exception e) { - log.warn("There was an issue . . . " + text); - setSuccess(true); - } - return this.success; - } - - private void setSuccess(boolean success) { - this.success = success; - } - - public String getErrorMessage() { - return this.errorMessage; - } - - private void setErrorMessage(String errorMessage) { - this.errorMessage = errorMessage; - } - -} diff --git a/dspace-api/src/main/java/org/dspace/license/CreativeCommonsServiceImpl.java b/dspace-api/src/main/java/org/dspace/license/CreativeCommonsServiceImpl.java index 384b82ddc3..40e727d9df 100644 --- a/dspace-api/src/main/java/org/dspace/license/CreativeCommonsServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/license/CreativeCommonsServiceImpl.java @@ -13,7 +13,10 @@ import java.io.IOException; import java.io.InputStream; import java.io.StringWriter; import java.sql.SQLException; +import java.util.HashMap; +import java.util.LinkedList; import java.util.List; +import java.util.Map; import javax.xml.transform.Templates; import javax.xml.transform.TransformerConfigurationException; import javax.xml.transform.TransformerException; @@ -82,9 +85,18 @@ public class CreativeCommonsServiceImpl implements CreativeCommonsService, Initi protected BundleService bundleService; @Autowired(required = true) protected ItemService itemService; + @Autowired + protected CCLicenseConnectorService ccLicenseConnectorService; protected ConfigurationService configurationService = DSpaceServicesFactory.getInstance().getConfigurationService(); + private String defaultLanguage; + private String jurisdiction; + private static final String JURISDICTION_KEY = "jurisdiction"; + + + private Map> ccLicenses; + protected CreativeCommonsServiceImpl() { } @@ -101,10 +113,14 @@ public class CreativeCommonsServiceImpl implements CreativeCommonsService, Initi System.setProperty("http.proxyPort", proxyPort); } + ccLicenses = new HashMap<>(); + defaultLanguage = configurationService.getProperty("cc.license.locale", "en"); + jurisdiction = configurationService.getProperty("cc.license.jurisdiction", ""); + try { templates = TransformerFactory.newInstance().newTemplates( - new StreamSource(CreativeCommonsServiceImpl.class - .getResourceAsStream("CreativeCommons.xsl"))); + new StreamSource(CreativeCommonsServiceImpl.class + .getResourceAsStream("CreativeCommons.xsl"))); } catch (TransformerConfigurationException e) { throw new RuntimeException(e.getMessage(), e); } @@ -112,15 +128,10 @@ public class CreativeCommonsServiceImpl implements CreativeCommonsService, Initi } - @Override - public boolean isEnabled() { - return true; - } - // create the CC bundle if it doesn't exist // If it does, remove it and create a new one. protected Bundle getCcBundle(Context context, Item item) - throws SQLException, AuthorizeException, IOException { + throws SQLException, AuthorizeException, IOException { List bundles = itemService.getBundles(item, CC_BUNDLE_NAME); if ((bundles.size() > 0) && (bundles.get(0) != null)) { @@ -131,8 +142,8 @@ public class CreativeCommonsServiceImpl implements CreativeCommonsService, Initi @Override public void setLicenseRDF(Context context, Item item, String licenseRdf) - throws SQLException, IOException, - AuthorizeException { + throws SQLException, IOException, + AuthorizeException { Bundle bundle = getCcBundle(context, item); // set the format BitstreamFormat bs_rdf_format = bitstreamFormatService.findByShortDescription(context, "RDF XML"); @@ -144,7 +155,7 @@ public class CreativeCommonsServiceImpl implements CreativeCommonsService, Initi @Override public void setLicense(Context context, Item item, InputStream licenseStm, String mimeType) - throws SQLException, IOException, AuthorizeException { + throws SQLException, IOException, AuthorizeException { Bundle bundle = getCcBundle(context, item); // set the format @@ -160,17 +171,26 @@ public class CreativeCommonsServiceImpl implements CreativeCommonsService, Initi Bitstream bs = bitstreamService.create(context, bundle, licenseStm); bs.setSource(context, CC_BS_SOURCE); bs.setName(context, (mimeType != null && - (mimeType.equalsIgnoreCase("text/xml") || - mimeType.equalsIgnoreCase("text/rdf"))) ? - BSN_LICENSE_RDF : BSN_LICENSE_TEXT); + (mimeType.equalsIgnoreCase("text/xml") || + mimeType.equalsIgnoreCase("text/rdf"))) ? + BSN_LICENSE_RDF : BSN_LICENSE_TEXT); bs.setFormat(context, bs_format); bitstreamService.update(context, bs); } + /** + * Removes the license file from the item + * + * @param context - The relevant DSpace Context + * @param item - The item from which the license file needs to be removed + * @throws SQLException + * @throws IOException + * @throws AuthorizeException + */ @Override - public void removeLicense(Context context, Item item) - throws SQLException, IOException, AuthorizeException { + public void removeLicenseFile(Context context, Item item) + throws SQLException, IOException, AuthorizeException { // remove CC license bundle if one exists List bundles = itemService.getBundles(item, CC_BUNDLE_NAME); @@ -179,66 +199,74 @@ public class CreativeCommonsServiceImpl implements CreativeCommonsService, Initi } } - @Override - public boolean hasLicense(Context context, Item item) - throws SQLException, IOException { - // try to find CC license bundle - List bundles = itemService.getBundles(item, CC_BUNDLE_NAME); - - if (bundles.size() == 0) { - return false; - } - - // verify it has correct contents - try { - if ((getLicenseURL(context, item) == null)) { - return false; - } - } catch (AuthorizeException ae) { - return false; - } - - return true; - } - - @Override - public String getLicenseRDF(Context context, Item item) throws SQLException, - IOException, AuthorizeException { - return getStringFromBitstream(context, item, BSN_LICENSE_RDF); - } - @Override public Bitstream getLicenseRdfBitstream(Item item) throws SQLException, - IOException, AuthorizeException { + IOException, AuthorizeException { return getBitstream(item, BSN_LICENSE_RDF); } @Deprecated @Override public Bitstream getLicenseTextBitstream(Item item) throws SQLException, - IOException, AuthorizeException { + IOException, AuthorizeException { return getBitstream(item, BSN_LICENSE_TEXT); } @Override public String getLicenseURL(Context context, Item item) throws SQLException, IOException, AuthorizeException { - String licenseUri = getCCField("uri").ccItemValue(item); + String licenseUri = getCCField("uri"); if (StringUtils.isNotBlank(licenseUri)) { - return licenseUri; + return getLicenseURI(item); } // JSPUI backward compatibility see https://jira.duraspace.org/browse/DS-2604 return getStringFromBitstream(context, item, BSN_LICENSE_URL); } + /** + * Returns the stored license uri of the item + * + * @param item - The item for which to retrieve the stored license uri + * @return the stored license uri of the item + */ + @Override + public String getLicenseURI(Item item) { + String licenseUriField = getCCField("uri"); + if (StringUtils.isNotBlank(licenseUriField)) { + String metadata = itemService.getMetadata(item, licenseUriField); + if (StringUtils.isNotBlank(metadata)) { + return metadata; + } + } + return null; + } + + /** + * Returns the stored license name of the item + * + * @param item - The item for which to retrieve the stored license name + * @return the stored license name of the item + */ + @Override + public String getLicenseName( Item item) { + String licenseNameField = getCCField("name"); + if (StringUtils.isNotBlank(licenseNameField)) { + String metadata = itemService.getMetadata(item, licenseNameField); + if (StringUtils.isNotBlank(metadata)) { + return metadata; + } + } + return null; + } + @Override public String fetchLicenseRDF(Document license) { StringWriter result = new StringWriter(); try { templates.newTransformer().transform( - new JDOMSource(license), - new StreamResult(result) + new JDOMSource(license), + new StreamResult(result) ); } catch (TransformerException e) { throw new IllegalStateException(e.getMessage(), e); @@ -267,7 +295,7 @@ public class CreativeCommonsServiceImpl implements CreativeCommonsService, Initi */ protected void setBitstreamFromBytes(Context context, Item item, Bundle bundle, String bitstream_name, BitstreamFormat format, byte[] bytes) - throws SQLException, IOException, AuthorizeException { + throws SQLException, IOException, AuthorizeException { ByteArrayInputStream bais = new ByteArrayInputStream(bytes); Bitstream bs = bitstreamService.create(context, bundle, bais); @@ -297,7 +325,7 @@ public class CreativeCommonsServiceImpl implements CreativeCommonsService, Initi */ protected String getStringFromBitstream(Context context, Item item, String bitstream_name) throws SQLException, IOException, - AuthorizeException { + AuthorizeException { byte[] bytes = getBytesFromBitstream(context, item, bitstream_name); if (bytes == null) { @@ -320,7 +348,7 @@ public class CreativeCommonsServiceImpl implements CreativeCommonsService, Initi * to perform a particular action. */ protected Bitstream getBitstream(Item item, String bitstream_name) - throws SQLException, IOException, AuthorizeException { + throws SQLException, IOException, AuthorizeException { Bundle cc_bundle = null; // look for the CC bundle @@ -342,7 +370,7 @@ public class CreativeCommonsServiceImpl implements CreativeCommonsService, Initi } protected byte[] getBytesFromBitstream(Context context, Item item, String bitstream_name) - throws SQLException, IOException, AuthorizeException { + throws SQLException, IOException, AuthorizeException { Bitstream bs = getBitstream(item, bitstream_name); // no such bitstream @@ -361,26 +389,322 @@ public class CreativeCommonsServiceImpl implements CreativeCommonsService, Initi * Returns a metadata field handle for given field Id */ @Override - public LicenseMetadataValue getCCField(String fieldId) { - return new LicenseMetadataValue(configurationService.getProperty("cc.license." + fieldId)); + public String getCCField(String fieldId) { + return configurationService.getProperty("cc.license." + fieldId); } + /** + * Remove license information, delete also the bitstream + * + * @param context - DSpace Context + * @param item - the item + * @throws AuthorizeException Exception indicating the current user of the context does not have permission + * to perform a particular action. + * @throws IOException A general class of exceptions produced by failed or interrupted I/O operations. + * @throws SQLException An exception that provides information on a database access error or other errors. + */ @Override - public void removeLicense(Context context, LicenseMetadataValue uriField, - LicenseMetadataValue nameField, Item item) - throws AuthorizeException, IOException, SQLException { + public void removeLicense(Context context, Item item) + throws AuthorizeException, IOException, SQLException { + + String uriField = getCCField("uri"); + String nameField = getCCField("name"); + + String licenseUri = itemService.getMetadata(item, uriField); + // only remove any previous licenses - String licenseUri = uriField.ccItemValue(item); if (licenseUri != null) { - uriField.removeItemValue(context, item, licenseUri); + removeLicenseField(context, item, uriField); if (configurationService.getBooleanProperty("cc.submit.setname")) { - String licenseName = nameField.keyedItemValue(item, licenseUri); - nameField.removeItemValue(context, item, licenseName); + removeLicenseField(context, item, nameField); } if (configurationService.getBooleanProperty("cc.submit.addbitstream")) { - removeLicense(context, item); + removeLicenseFile(context, item); } } } + private void removeLicenseField(Context context, Item item, String field) throws SQLException { + String[] params = splitField(field); + itemService.clearMetadata(context, item, params[0], params[1], params[2], params[3]); + + } + + private void addLicenseField(Context context, Item item, String field, String value) throws SQLException { + String[] params = splitField(field); + itemService.addMetadata(context, item, params[0], params[1], params[2], params[3], value); + + } + + /** + * Find all CC Licenses using the default language found in the configuration + * + * @return A list of available CC Licenses + */ + @Override + public List findAllCCLicenses() { + return findAllCCLicenses(defaultLanguage); + } + + /** + * Find all CC Licenses for the provided language + * + * @param language - the language for which to find the CC Licenses + * @return A list of available CC Licenses for the provided language + */ + @Override + public List findAllCCLicenses(String language) { + + if (!ccLicenses.containsKey(language)) { + initLicenses(language); + } + return new LinkedList<>(ccLicenses.get(language).values()); + } + + /** + * Find the CC License corresponding to the provided ID using the default language found in the configuration + * + * @param id - the ID of the license to be found + * @return the corresponding license if found or null when not found + */ + @Override + public CCLicense findOne(String id) { + return findOne(id, defaultLanguage); + } + + /** + * Find the CC License corresponding to the provided ID and provided language + * + * @param id - the ID of the license to be found + * @param language - the language for which to find the CC License + * @return the corresponding license if found or null when not found + */ + @Override + public CCLicense findOne(String id, String language) { + if (!ccLicenses.containsKey(language)) { + initLicenses(language); + } + Map licenseMap = ccLicenses.get(language); + if (licenseMap.containsKey(id)) { + return licenseMap.get(id); + } + return null; + } + + /** + * Retrieves the licenses for a specific language and cache them in this service + * + * @param language - the language for which to find the CC Licenses + */ + private void initLicenses(final String language) { + Map licenseMap = ccLicenseConnectorService.retrieveLicenses(language); + ccLicenses.put(language, licenseMap); + } + + /** + * Retrieve the CC License URI for the provided license ID, based on the provided answers, using the default + * language found in the configuration + * + * @param licenseId - the ID of the license + * @param answerMap - the answers to the different field questions + * @return the corresponding license URI + */ + @Override + public String retrieveLicenseUri(String licenseId, Map answerMap) { + return retrieveLicenseUri(licenseId, defaultLanguage, answerMap); + + } + + /** + * Retrieve the CC License URI for the provided license ID and language based on the provided answers + * + * @param licenseId - the ID of the license + * @param language - the language for which to find the CC License URI + * @param answerMap - the answers to the different field questions + * @return the corresponding license URI + */ + @Override + public String retrieveLicenseUri(String licenseId, String language, Map answerMap) { + return ccLicenseConnectorService.retrieveRightsByQuestion(licenseId, language, answerMap); + + } + + /** + * Verify whether the answer map contains a valid response to all field questions and no answers that don't have a + * corresponding question in the license, using the default language found in the config to check the license + * + * @param licenseId - the ID of the license + * @param fullAnswerMap - the answers to the different field questions + * @return whether the information is valid + */ + @Override + public boolean verifyLicenseInformation(String licenseId, Map fullAnswerMap) { + return verifyLicenseInformation(licenseId, defaultLanguage, fullAnswerMap); + } + + /** + * Verify whether the answer map contains a valid response to all field questions and no answers that don't have a + * corresponding question in the license, using the provided language to check the license + * + * @param licenseId - the ID of the license + * @param language - the language for which to retrieve the full answerMap + * @param fullAnswerMap - the answers to the different field questions + * @return whether the information is valid + */ + @Override + public boolean verifyLicenseInformation(String licenseId, String language, Map fullAnswerMap) { + CCLicense ccLicense = findOne(licenseId, language); + + List ccLicenseFieldList = ccLicense.getCcLicenseFieldList(); + + for (String field : fullAnswerMap.keySet()) { + CCLicenseField ccLicenseField = findCCLicenseField(field, ccLicenseFieldList); + if (ccLicenseField == null) { + return false; + } + if (!containsAnswerEnum(fullAnswerMap.get(field), ccLicenseField)) { + return false; + } + } + return true; + } + + /** + * Retrieve the full answer map containing empty values when an answer for a field was not provided in the + * answerMap, using the default language found in the configuration + * + * @param licenseId - the ID of the license + * @param answerMap - the answers to the different field questions + * @return the answerMap supplemented with all other license fields with a blank answer + */ + @Override + public Map retrieveFullAnswerMap(String licenseId, Map answerMap) { + return retrieveFullAnswerMap(licenseId, defaultLanguage, answerMap); + } + + /** + * Retrieve the full answer map for a provided language, containing empty values when an answer for a field was not + * provided in the answerMap. + * + * @param licenseId - the ID of the license + * @param language - the language for which to retrieve the full answerMap + * @param answerMap - the answers to the different field questions + * @return the answerMap supplemented with all other license fields with a blank answer for the provided language + */ + @Override + public Map retrieveFullAnswerMap(String licenseId, String language, Map answerMap) { + CCLicense ccLicense = findOne(licenseId, language); + if (ccLicense == null) { + return null; + } + Map fullParamMap = new HashMap<>(answerMap); + List ccLicenseFieldList = ccLicense.getCcLicenseFieldList(); + for (CCLicenseField ccLicenseField : ccLicenseFieldList) { + if (!fullParamMap.containsKey(ccLicenseField.getId())) { + fullParamMap.put(ccLicenseField.getId(), ""); + } + } + + updateJurisdiction(fullParamMap); + + return fullParamMap; + } + + private void updateJurisdiction(final Map fullParamMap) { + if (fullParamMap.containsKey(JURISDICTION_KEY)) { + fullParamMap.put(JURISDICTION_KEY, jurisdiction); + } + } + + private boolean containsAnswerEnum(final String enumAnswer, final CCLicenseField ccLicenseField) { + List fieldEnums = ccLicenseField.getFieldEnum(); + for (CCLicenseFieldEnum fieldEnum : fieldEnums) { + if (StringUtils.equals(fieldEnum.getId(), enumAnswer)) { + return true; + } + } + return false; + } + + private CCLicenseField findCCLicenseField(final String field, final List ccLicenseFieldList) { + for (CCLicenseField ccLicenseField : ccLicenseFieldList) { + if (StringUtils.equals(ccLicenseField.getId(), field)) { + return ccLicenseField; + } + } + + return null; + } + + /** + * Update the license of the item with a new one based on the provided license URI + * + * @param context - The relevant DSpace context + * @param licenseUri - The license URI to be used in the update + * @param item - The item for which to update the license + * @return true when the update was successful, false when not + * @throws AuthorizeException + * @throws SQLException + */ + @Override + public boolean updateLicense(final Context context, final String licenseUri, final Item item) + throws AuthorizeException, SQLException { + try { + Document doc = ccLicenseConnectorService.retrieveLicenseRDFDoc(licenseUri); + if (doc == null) { + return false; + } + String licenseName = ccLicenseConnectorService.retrieveLicenseName(doc); + if (StringUtils.isBlank(licenseName)) { + return false; + } + + removeLicense(context, item); + addLicense(context, item, licenseUri, licenseName, doc); + + return true; + + } catch (IOException e) { + log.error("Error while updating the license of item: " + item.getID(), e); + } + return false; + } + + /** + * Add a new license to the item + * + * @param context - The relevant Dspace context + * @param item - The item to which the license will be added + * @param licenseUri - The license URI to add + * @param licenseName - The license name to add + * @param doc - The license to document to add + * @throws SQLException + * @throws IOException + * @throws AuthorizeException + */ + @Override + public void addLicense(Context context, Item item, String licenseUri, String licenseName, Document doc) + throws SQLException, IOException, AuthorizeException { + String uriField = getCCField("uri"); + String nameField = getCCField("name"); + + addLicenseField(context, item, uriField, licenseUri); + if (configurationService.getBooleanProperty("cc.submit.addbitstream")) { + setLicenseRDF(context, item, fetchLicenseRDF(doc)); + } + if (configurationService.getBooleanProperty("cc.submit.setname")) { + addLicenseField(context, item, nameField, licenseName); + } + } + + private String[] splitField(String fieldName) { + String[] params = new String[4]; + String[] fParams = fieldName.split("\\."); + for (int i = 0; i < fParams.length; i++) { + params[i] = fParams[i]; + } + params[3] = Item.ANY; + return params; + } + } diff --git a/dspace-api/src/main/java/org/dspace/license/LicenseMetadataValue.java b/dspace-api/src/main/java/org/dspace/license/LicenseMetadataValue.java deleted file mode 100644 index ec5c9e447b..0000000000 --- a/dspace-api/src/main/java/org/dspace/license/LicenseMetadataValue.java +++ /dev/null @@ -1,129 +0,0 @@ -/** - * The contents of this file are subject to the license and copyright - * detailed in the LICENSE and NOTICE files at the root of the source - * tree and available online at - * - * http://www.dspace.org/license/ - */ -package org.dspace.license; - -import java.io.IOException; -import java.sql.SQLException; -import java.util.ArrayList; -import java.util.List; - -import org.dspace.authorize.AuthorizeException; -import org.dspace.content.Item; -import org.dspace.content.MetadataValue; -import org.dspace.content.factory.ContentServiceFactory; -import org.dspace.content.service.ItemService; -import org.dspace.core.Context; - -/** - * Helper class for using CC-related Metadata fields - * - * @author kevinvandevelde at atmire.com - */ -public class LicenseMetadataValue { - - protected final ItemService itemService; - // Shibboleth for Creative Commons license data - i.e. characters that reliably indicate CC in a URI - protected static final String ccShib = "creativecommons"; - - private String[] params = new String[4]; - - public LicenseMetadataValue(String fieldName) { - if (fieldName != null && fieldName.length() > 0) { - String[] fParams = fieldName.split("\\."); - for (int i = 0; i < fParams.length; i++) { - params[i] = fParams[i]; - } - params[3] = Item.ANY; - } - itemService = ContentServiceFactory.getInstance().getItemService(); - } - - /** - * Returns first value that matches Creative Commons 'shibboleth', - * or null if no matching values. - * NB: this method will succeed only for metadata fields holding CC URIs - * - * @param item - the item to read - * @return value - the first CC-matched value, or null if no such value - */ - public String ccItemValue(Item item) { - List dcvalues = itemService.getMetadata(item, params[0], params[1], params[2], params[3]); - for (MetadataValue dcvalue : dcvalues) { - if ((dcvalue.getValue()).indexOf(ccShib) != -1) { - // return first value that matches the shib - return dcvalue.getValue(); - } - } - return null; - } - - /** - * Returns the value that matches the value mapped to the passed key if any. - * NB: this only delivers a license name (if present in field) given a license URI - * - * @param item - the item to read - * @param key - the key for desired value - * @return value - the value associated with key or null if no such value - * @throws IOException A general class of exceptions produced by failed or interrupted I/O operations. - * @throws SQLException An exception that provides information on a database access error or other errors. - * @throws AuthorizeException Exception indicating the current user of the context does not have permission - * to perform a particular action. - */ - public String keyedItemValue(Item item, String key) - throws AuthorizeException, IOException, SQLException { - CCLookup ccLookup = new CCLookup(); - ccLookup.issue(key); - String matchValue = ccLookup.getLicenseName(); - List dcvalues = itemService.getMetadata(item, params[0], params[1], params[2], params[3]); - for (MetadataValue dcvalue : dcvalues) { - if (dcvalue.getValue().equals(matchValue)) { - return dcvalue.getValue(); - } - } - return null; - } - - /** - * Removes the passed value from the set of values for the field in passed item. - * - * @param context The relevant DSpace Context. - * @param item - the item to update - * @param value - the value to remove - * @throws IOException A general class of exceptions produced by failed or interrupted I/O operations. - * @throws SQLException An exception that provides information on a database access error or other errors. - * @throws AuthorizeException Exception indicating the current user of the context does not have permission - * to perform a particular action. - */ - public void removeItemValue(Context context, Item item, String value) - throws AuthorizeException, IOException, SQLException { - if (value != null) { - List dcvalues = itemService.getMetadata(item, params[0], params[1], params[2], params[3]); - ArrayList arrayList = new ArrayList(); - for (MetadataValue dcvalue : dcvalues) { - if (!dcvalue.getValue().equals(value)) { - arrayList.add(dcvalue.getValue()); - } - } - itemService.clearMetadata(context, item, params[0], params[1], params[2], params[3]); - itemService.addMetadata(context, item, params[0], params[1], params[2], params[3], arrayList); - } - } - - /** - * Adds passed value to the set of values for the field in passed item. - * - * @param context The relevant DSpace Context. - * @param item - the item to update - * @param value - the value to add in this field - * @throws SQLException An exception that provides information on a database access error or other errors. - */ - public void addItemValue(Context context, Item item, String value) throws SQLException { - itemService.addMetadata(context, item, params[0], params[1], params[2], params[3], value); - } - -} diff --git a/dspace-api/src/main/java/org/dspace/license/service/CreativeCommonsService.java b/dspace-api/src/main/java/org/dspace/license/service/CreativeCommonsService.java index c99c38a127..fa32cb75ca 100644 --- a/dspace-api/src/main/java/org/dspace/license/service/CreativeCommonsService.java +++ b/dspace-api/src/main/java/org/dspace/license/service/CreativeCommonsService.java @@ -10,12 +10,14 @@ package org.dspace.license.service; import java.io.IOException; import java.io.InputStream; import java.sql.SQLException; +import java.util.List; +import java.util.Map; import org.dspace.authorize.AuthorizeException; import org.dspace.content.Bitstream; import org.dspace.content.Item; import org.dspace.core.Context; -import org.dspace.license.LicenseMetadataValue; +import org.dspace.license.CCLicense; import org.jdom.Document; /** @@ -29,13 +31,6 @@ public interface CreativeCommonsService { public static final String CC_BUNDLE_NAME = "CC-LICENSE"; - /** - * Simple accessor for enabling of CC - * - * @return is CC enabled? - */ - public boolean isEnabled(); - /** * setLicenseRDF * @@ -50,7 +45,7 @@ public interface CreativeCommonsService { * to perform a particular action. */ public void setLicenseRDF(Context context, Item item, String licenseRdf) - throws SQLException, IOException, AuthorizeException; + throws SQLException, IOException, AuthorizeException; /** @@ -72,19 +67,40 @@ public interface CreativeCommonsService { */ public void setLicense(Context context, Item item, InputStream licenseStm, String mimeType) - throws SQLException, IOException, AuthorizeException; + throws SQLException, IOException, AuthorizeException; - public void removeLicense(Context context, Item item) - throws SQLException, IOException, AuthorizeException; + /** + * Removes the license file from the item + * + * @param context - The relevant DSpace Context + * @param item - The item from which the license file needs to be removed + * @throws SQLException + * @throws IOException + * @throws AuthorizeException + */ + public void removeLicenseFile(Context context, Item item) + throws SQLException, IOException, AuthorizeException; - public boolean hasLicense(Context context, Item item) - throws SQLException, IOException; public String getLicenseURL(Context context, Item item) - throws SQLException, IOException, AuthorizeException; + throws SQLException, IOException, AuthorizeException; - public String getLicenseRDF(Context context, Item item) - throws SQLException, IOException, AuthorizeException; + + /** + * Returns the stored license uri of the item + * + * @param item - The item for which to retrieve the stored license uri + * @return the stored license uri of the item + */ + public String getLicenseURI(Item item); + + /** + * Returns the stored license name of the item + * + * @param item - The item for which to retrieve the stored license name + * @return the stored license name of the item + */ + public String getLicenseName(Item item); /** * Get Creative Commons license RDF, returning Bitstream object. @@ -97,7 +113,7 @@ public interface CreativeCommonsService { * to perform a particular action. */ public Bitstream getLicenseRdfBitstream(Item item) - throws SQLException, IOException, AuthorizeException; + throws SQLException, IOException, AuthorizeException; /** * Get Creative Commons license Text, returning Bitstream object. @@ -112,7 +128,7 @@ public interface CreativeCommonsService { * is no longer stored (see https://jira.duraspace.org/browse/DS-2604) */ public Bitstream getLicenseTextBitstream(Item item) - throws SQLException, IOException, AuthorizeException; + throws SQLException, IOException, AuthorizeException; /** * Get a few license-specific properties. We expect these to be cached at @@ -121,7 +137,7 @@ public interface CreativeCommonsService { * @param fieldId name of the property. * @return its value. */ - public LicenseMetadataValue getCCField(String fieldId); + public String getCCField(String fieldId); /** * Apply same transformation on the document to retrieve only the most @@ -138,15 +154,134 @@ public interface CreativeCommonsService { * Remove license information, delete also the bitstream * * @param context - DSpace Context - * @param uriField - the metadata field for license uri - * @param nameField - the metadata field for license name * @param item - the item * @throws AuthorizeException Exception indicating the current user of the context does not have permission * to perform a particular action. * @throws IOException A general class of exceptions produced by failed or interrupted I/O operations. * @throws SQLException An exception that provides information on a database access error or other errors. */ - public void removeLicense(Context context, LicenseMetadataValue uriField, - LicenseMetadataValue nameField, Item item) - throws AuthorizeException, IOException, SQLException; + public void removeLicense(Context context, Item item) + throws AuthorizeException, IOException, SQLException; + + /** + * Find all CC Licenses using the default language found in the configuration + * + * @return A list of available CC Licenses + */ + public List findAllCCLicenses(); + + /** + * Find all CC Licenses for the provided language + * + * @param language - the language for which to find the CC Licenses + * @return A list of available CC Licenses for the provided language + */ + public List findAllCCLicenses(String language); + + /** + * Find the CC License corresponding to the provided ID using the default language found in the configuration + * + * @param id - the ID of the license to be found + * @return the corresponding license if found or null when not found + */ + public CCLicense findOne(String id); + + /** + * Find the CC License corresponding to the provided ID and provided language + * + * @param id - the ID of the license to be found + * @param language - the language for which to find the CC License + * @return the corresponding license if found or null when not found + */ + public CCLicense findOne(String id, String language); + + /** + * Retrieve the CC License URI for the provided license ID, based on the provided answers, using the default + * language found in the configuration + * + * @param licenseId - the ID of the license + * @param answerMap - the answers to the different field questions + * @return the corresponding license URI + */ + public String retrieveLicenseUri(String licenseId, Map answerMap); + + /** + * Retrieve the CC License URI for the provided license ID and language based on the provided answers + * + * @param licenseId - the ID of the license + * @param language - the language for which to find the CC License URI + * @param answerMap - the answers to the different field questions + * @return the corresponding license URI + */ + public String retrieveLicenseUri(String licenseId, String language, Map answerMap); + + /** + * Retrieve the full answer map containing empty values when an answer for a field was not provided in the + * answerMap, using the default language found in the configuration + * + * @param licenseId - the ID of the license + * @param answerMap - the answers to the different field questions + * @return the answerMap supplemented with all other license fields with a blank answer + */ + public Map retrieveFullAnswerMap(String licenseId, Map answerMap); + + /** + * Retrieve the full answer map for a provided language, containing empty values when an answer for a field was not + * provided in the answerMap. + * + * @param licenseId - the ID of the license + * @param language - the language for which to retrieve the full answerMap + * @param answerMap - the answers to the different field questions + * @return the answerMap supplemented with all other license fields with a blank answer for the provided language + */ + public Map retrieveFullAnswerMap(String licenseId, String language, Map answerMap); + + /** + * Verify whether the answer map contains a valid response to all field questions and no answers that don't have a + * corresponding question in the license, using the default language found in the config to check the license + * + * @param licenseId - the ID of the license + * @param fullAnswerMap - the answers to the different field questions + * @return whether the information is valid + */ + public boolean verifyLicenseInformation(String licenseId, Map fullAnswerMap); + + /** + * Verify whether the answer map contains a valid response to all field questions and no answers that don't have a + * corresponding question in the license, using the provided language to check the license + * + * @param licenseId - the ID of the license + * @param language - the language for which to retrieve the full answerMap + * @param fullAnswerMap - the answers to the different field questions + * @return whether the information is valid + */ + public boolean verifyLicenseInformation(String licenseId, String language, Map fullAnswerMap); + + /** + * Update the license of the item with a new one based on the provided license URI + * + * @param context - The relevant DSpace context + * @param licenseUri - The license URI to be used in the update + * @param item - The item for which to update the license + * @return true when the update was successful, false when not + * @throws AuthorizeException + * @throws SQLException + */ + public boolean updateLicense(final Context context, String licenseUri, final Item item) + throws AuthorizeException, SQLException; + + /** + * Add a new license to the item + * + * @param context - The relevant Dspace context + * @param item - The item to which the license will be added + * @param licenseUri - The license URI to add + * @param licenseName - The license name to add + * @param doc - The license to document to add + * @throws SQLException + * @throws IOException + * @throws AuthorizeException + */ + public void addLicense(Context context, Item item, String licenseUri, String licenseName, Document doc) + throws SQLException, IOException, AuthorizeException; } diff --git a/dspace-api/src/main/java/org/dspace/rdf/negotiation/Negotiator.java b/dspace-api/src/main/java/org/dspace/rdf/negotiation/Negotiator.java index c28b9ec1e6..d011d305b1 100644 --- a/dspace-api/src/main/java/org/dspace/rdf/negotiation/Negotiator.java +++ b/dspace-api/src/main/java/org/dspace/rdf/negotiation/Negotiator.java @@ -15,6 +15,7 @@ import java.util.Iterator; import javax.servlet.http.HttpServletResponse; import org.apache.commons.lang3.StringUtils; +import org.apache.commons.validator.routines.UrlValidator; import org.apache.logging.log4j.Logger; import org.dspace.rdf.RDFUtil; import org.dspace.services.factory.DSpaceServicesFactory; @@ -197,6 +198,7 @@ public class Negotiator { if (extraPathInfo == null) { extraPathInfo = ""; } + UrlValidator urlValidator = new UrlValidator(UrlValidator.ALLOW_LOCAL_URLS); StringBuilder urlBuilder = new StringBuilder(); String lang = null; @@ -256,12 +258,15 @@ public class Negotiator { urlBuilder.append(handle).append("/").append(extraPathInfo); } String url = urlBuilder.toString(); - - log.debug("Will forward to '" + url + "'."); - response.setStatus(HttpServletResponse.SC_SEE_OTHER); - response.setHeader("Location", url); - response.flushBuffer(); - return true; + if (urlValidator.isValid(url)) { + log.debug("Will forward to '" + url + "'."); + response.setStatus(HttpServletResponse.SC_SEE_OTHER); + response.setHeader("Location", url); + response.flushBuffer(); + return true; + } else { + throw new IOException("Invalid URL '" + url + "', cannot redirect."); + } } // currently we cannot serve statistics as rdf @@ -287,10 +292,14 @@ public class Negotiator { urlBuilder.append("/handle/").append(handle); urlBuilder.append("/").append(lang); String url = urlBuilder.toString(); - log.debug("Will forward to '" + url + "'."); - response.setStatus(HttpServletResponse.SC_SEE_OTHER); - response.setHeader("Location", url); - response.flushBuffer(); - return true; + if (urlValidator.isValid(url)) { + log.debug("Will forward to '" + url + "'."); + response.setStatus(HttpServletResponse.SC_SEE_OTHER); + response.setHeader("Location", url); + response.flushBuffer(); + return true; + } else { + throw new IOException("Invalid URL '" + url + "', cannot redirect."); + } } } diff --git a/dspace-api/src/main/java/org/dspace/scripts/DSpaceRunnable.java b/dspace-api/src/main/java/org/dspace/scripts/DSpaceRunnable.java index 4ce1c5063a..d0fffdb57d 100644 --- a/dspace-api/src/main/java/org/dspace/scripts/DSpaceRunnable.java +++ b/dspace-api/src/main/java/org/dspace/scripts/DSpaceRunnable.java @@ -7,70 +7,72 @@ */ package org.dspace.scripts; -import java.sql.SQLException; +import java.io.InputStream; +import java.util.LinkedList; +import java.util.List; +import java.util.UUID; import org.apache.commons.cli.CommandLine; import org.apache.commons.cli.DefaultParser; +import org.apache.commons.cli.Option; import org.apache.commons.cli.Options; import org.apache.commons.cli.ParseException; -import org.dspace.authorize.service.AuthorizeService; -import org.dspace.core.Context; +import org.apache.commons.lang3.StringUtils; +import org.dspace.eperson.EPerson; +import org.dspace.scripts.configuration.ScriptConfiguration; import org.dspace.scripts.handler.DSpaceRunnableHandler; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.beans.factory.annotation.Required; /** - * This abstract class is the class that should be extended by each script. - * it provides the basic variables to be hold by the script as well as the means to initialize, parse and run the script - * Every DSpaceRunnable that is implemented in this way should be defined in the scripts.xml config file as a bean + * This is the class that should be extended for each Script. This class will contain the logic needed to run and it'll + * fetch the information that it needs from the {@link ScriptConfiguration} provided through the diamond operators. + * This will be the dspaceRunnableClass for the {@link ScriptConfiguration} beans. Specifically created for each + * script + * @param */ -public abstract class DSpaceRunnable implements Runnable { +public abstract class DSpaceRunnable implements Runnable { - /** - * The name of the script - */ - private String name; - /** - * The description of the script - */ - private String description; /** * The CommandLine object for the script that'll hold the information */ protected CommandLine commandLine; + /** - * The possible options for this script + * This EPerson identifier variable is the uuid of the eperson that's running the script */ - protected Options options; + private UUID epersonIdentifier; + /** * The handler that deals with this script. This handler can currently either be a RestDSpaceRunnableHandler or * a CommandlineDSpaceRunnableHandler depending from where the script is called */ protected DSpaceRunnableHandler handler; - @Autowired - private AuthorizeService authorizeService; + /** + * This method will return the Configuration that the implementing DSpaceRunnable uses + * @return The {@link ScriptConfiguration} that this implementing DspaceRunnable uses + */ + public abstract T getScriptConfiguration(); - public String getName() { - return name; + + private void setHandler(DSpaceRunnableHandler dSpaceRunnableHandler) { + this.handler = dSpaceRunnableHandler; } - @Required - public void setName(String name) { - this.name = name; - } - - public String getDescription() { - return description; - } - - @Required - public void setDescription(String description) { - this.description = description; - } - - public Options getOptions() { - return options; + /** + * This method sets the appropriate DSpaceRunnableHandler depending on where it was ran from and it parses + * the arguments given to the script + * @param args The arguments given to the script + * @param dSpaceRunnableHandler The DSpaceRunnableHandler object that defines from where the script was ran + * @param currentUser + * @throws ParseException If something goes wrong + */ + public void initialize(String[] args, DSpaceRunnableHandler dSpaceRunnableHandler, + EPerson currentUser) throws ParseException { + if (currentUser != null) { + this.setEpersonIdentifier(currentUser.getID()); + } + this.setHandler(dSpaceRunnableHandler); + this.parse(args); } /** @@ -80,18 +82,16 @@ public abstract class DSpaceRunnable implements Runnable { * @throws ParseException If something goes wrong */ private void parse(String[] args) throws ParseException { - commandLine = new DefaultParser().parse(getOptions(), args); + commandLine = new DefaultParser().parse(getScriptConfiguration().getOptions(), args); setup(); } /** - * This method will call upon the {@link DSpaceRunnableHandler#printHelp(Options, String)} method with the script's - * options and name + * This method has to be included in every script and handles the setup of the script by parsing the CommandLine + * and setting the variables + * @throws ParseException If something goes wrong */ - public void printHelp() { - handler.printHelp(options, name); - } - + public abstract void setup() throws ParseException; /** * This is the run() method from the Runnable interface that we implement. This method will handle the running @@ -108,22 +108,6 @@ public abstract class DSpaceRunnable implements Runnable { } } - private void setHandler(DSpaceRunnableHandler dSpaceRunnableHandler) { - this.handler = dSpaceRunnableHandler; - } - - /** - * This method sets the appropriate DSpaceRunnableHandler depending on where it was ran from and it parses - * the arguments given to the script - * @param args The arguments given to the script - * @param dSpaceRunnableHandler The DSpaceRunnableHandler object that defines from where the script was ran - * @throws ParseException If something goes wrong - */ - public void initialize(String[] args, DSpaceRunnableHandler dSpaceRunnableHandler) throws ParseException { - this.setHandler(dSpaceRunnableHandler); - this.parse(args); - } - /** * This method has to be included in every script and this will be the main execution block for the script that'll * contain all the logic needed @@ -132,25 +116,46 @@ public abstract class DSpaceRunnable implements Runnable { public abstract void internalRun() throws Exception; /** - * This method has to be included in every script and handles the setup of the script by parsing the CommandLine - * and setting the variables - * @throws ParseException If something goes wrong + * This method will call upon the {@link DSpaceRunnableHandler#printHelp(Options, String)} method with the script's + * options and name */ - public abstract void setup() throws ParseException; + public void printHelp() { + handler.printHelp(getScriptConfiguration().getOptions(), getScriptConfiguration().getName()); + } /** - * This method will return if the script is allowed to execute in the given context. This is by default set - * to the currentUser in the context being an admin, however this can be overwritten by each script individually - * if different rules apply - * @param context The relevant DSpace context - * @return A boolean indicating whether the script is allowed to execute or not + * This method will traverse all the options and it'll grab options defined as an InputStream type to then save + * the filename specified by that option in a list of Strings that'll be returned in the end + * @return The list of Strings representing filenames from the options given to the script */ - public boolean isAllowedToExecute(Context context) { - try { - return authorizeService.isAdmin(context); - } catch (SQLException e) { - handler.logError("Error occured when trying to verify permissions for script: " + name); + public List getFileNamesFromInputStreamOptions() { + List fileNames = new LinkedList<>(); + + for (Option option : getScriptConfiguration().getOptions().getOptions()) { + if (option.getType() == InputStream.class && + StringUtils.isNotBlank(commandLine.getOptionValue(option.getOpt()))) { + fileNames.add(commandLine.getOptionValue(option.getOpt())); + } } - return false; + + return fileNames; + } + + /** + * Generic getter for the epersonIdentifier + * This EPerson identifier variable is the uuid of the eperson that's running the script + * @return the epersonIdentifier value of this DSpaceRunnable + */ + public UUID getEpersonIdentifier() { + return epersonIdentifier; + } + + /** + * Generic setter for the epersonIdentifier + * This EPerson identifier variable is the uuid of the eperson that's running the script + * @param epersonIdentifier The epersonIdentifier to be set on this DSpaceRunnable + */ + public void setEpersonIdentifier(UUID epersonIdentifier) { + this.epersonIdentifier = epersonIdentifier; } } diff --git a/dspace-api/src/main/java/org/dspace/scripts/Process.java b/dspace-api/src/main/java/org/dspace/scripts/Process.java index bc9204d429..574ba59760 100644 --- a/dspace-api/src/main/java/org/dspace/scripts/Process.java +++ b/dspace-api/src/main/java/org/dspace/scripts/Process.java @@ -8,6 +8,7 @@ package org.dspace.scripts; import java.util.Date; +import java.util.LinkedList; import java.util.List; import javax.persistence.Column; import javax.persistence.Entity; @@ -80,6 +81,9 @@ public class Process implements ReloadableEntity { @Temporal(TemporalType.TIMESTAMP) private Date creationTime; + public static final String BITSTREAM_TYPE_METADATAFIELD = "dspace.process.filetype"; + public static final String OUTPUT_TYPE = "script_output"; + protected Process() { } @@ -174,6 +178,9 @@ public class Process implements ReloadableEntity { * @return The Bitstreams that are used or created by the process */ public List getBitstreams() { + if (bitstreams == null) { + bitstreams = new LinkedList<>(); + } return bitstreams; } diff --git a/dspace-api/src/main/java/org/dspace/scripts/ProcessLogLevel.java b/dspace-api/src/main/java/org/dspace/scripts/ProcessLogLevel.java new file mode 100644 index 0000000000..306ea3dde6 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/scripts/ProcessLogLevel.java @@ -0,0 +1,14 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.scripts; + +public enum ProcessLogLevel { + INFO, + WARNING, + ERROR +} \ No newline at end of file diff --git a/dspace-api/src/main/java/org/dspace/scripts/ProcessQueryParameterContainer.java b/dspace-api/src/main/java/org/dspace/scripts/ProcessQueryParameterContainer.java new file mode 100644 index 0000000000..d571834246 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/scripts/ProcessQueryParameterContainer.java @@ -0,0 +1,78 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.scripts; + +import java.util.HashMap; +import java.util.Map; + +/** + * This is a container class in which the variables can be stored that a {@link Process} must adhere to when being + * retrieved from the DB through the search methods + */ +public class ProcessQueryParameterContainer { + + + private Map queryParameterMap = new HashMap<>(); + + /** + * Generic getter for the queryParameterMap + * @return the queryParameterMap value of this ProcessQueryParameterContainer + */ + public Map getQueryParameterMap() { + return queryParameterMap; + } + + private String sortProperty = "startTime"; + private String sortOrder = "desc"; + /** + * Generic setter for the queryParameterMap + * @param queryParameterMap The queryParameterMap to be set on this ProcessQueryParameterContainer + */ + public void setQueryParameterMap(Map queryParameterMap) { + this.queryParameterMap = queryParameterMap; + } + + public void addToQueryParameterMap(String key, Object object) { + if (queryParameterMap == null) { + queryParameterMap = new HashMap<>(); + } + queryParameterMap.put(key, object); + } + + /** + * Generic getter for the sortProperty + * @return the sortProperty value of this ProcessQueryParameterContainer + */ + public String getSortProperty() { + return sortProperty; + } + + /** + * Generic setter for the sortProperty + * @param sortProperty The sortProperty to be set on this ProcessQueryParameterContainer + */ + public void setSortProperty(String sortProperty) { + this.sortProperty = sortProperty; + } + + /** + * Generic getter for the sortOrder + * @return the sortOrder value of this ProcessQueryParameterContainer + */ + public String getSortOrder() { + return sortOrder; + } + + /** + * Generic setter for the sortOrder + * @param sortOrder The sortOrder to be set on this ProcessQueryParameterContainer + */ + public void setSortOrder(String sortOrder) { + this.sortOrder = sortOrder; + } +} diff --git a/dspace-api/src/main/java/org/dspace/scripts/ProcessServiceImpl.java b/dspace-api/src/main/java/org/dspace/scripts/ProcessServiceImpl.java index cb5a5c9944..aa193f30bc 100644 --- a/dspace-api/src/main/java/org/dspace/scripts/ProcessServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/scripts/ProcessServiceImpl.java @@ -7,21 +7,43 @@ */ package org.dspace.scripts; +import java.io.BufferedWriter; +import java.io.File; +import java.io.FileInputStream; +import java.io.FileWriter; +import java.io.IOException; +import java.io.InputStream; import java.sql.SQLException; +import java.text.SimpleDateFormat; import java.util.ArrayList; import java.util.Collections; import java.util.Comparator; import java.util.Date; +import java.util.HashSet; import java.util.List; +import java.util.Set; import java.util.regex.Pattern; +import org.apache.commons.collections4.ListUtils; +import org.apache.commons.io.FileUtils; import org.apache.commons.lang3.StringUtils; import org.apache.logging.log4j.Logger; +import org.dspace.authorize.AuthorizeException; +import org.dspace.authorize.service.AuthorizeService; +import org.dspace.content.Bitstream; +import org.dspace.content.Item; +import org.dspace.content.MetadataField; +import org.dspace.content.MetadataValue; import org.dspace.content.ProcessStatus; import org.dspace.content.dao.ProcessDAO; +import org.dspace.content.service.BitstreamFormatService; +import org.dspace.content.service.BitstreamService; +import org.dspace.content.service.MetadataFieldService; +import org.dspace.core.Constants; import org.dspace.core.Context; import org.dspace.core.LogManager; import org.dspace.eperson.EPerson; +import org.dspace.eperson.service.EPersonService; import org.dspace.scripts.service.ProcessService; import org.springframework.beans.factory.annotation.Autowired; @@ -35,6 +57,21 @@ public class ProcessServiceImpl implements ProcessService { @Autowired private ProcessDAO processDAO; + @Autowired + private BitstreamService bitstreamService; + + @Autowired + private BitstreamFormatService bitstreamFormatService; + + @Autowired + private AuthorizeService authorizeService; + + @Autowired + private MetadataFieldService metadataFieldService; + + @Autowired + private EPersonService ePersonService; + @Override public Process create(Context context, EPerson ePerson, String scriptName, List parameters) throws SQLException { @@ -113,11 +150,35 @@ public class ProcessServiceImpl implements ProcessService { } @Override - public void delete(Context context, Process process) throws SQLException { + public void appendFile(Context context, Process process, InputStream is, String type, String fileName) + throws IOException, SQLException, AuthorizeException { + Bitstream bitstream = bitstreamService.create(context, is); + if (getBitstream(context, process, type) != null) { + throw new IllegalArgumentException("Cannot create another file of type: " + type + " for this process" + + " with id: " + process.getID()); + } + bitstream.setName(context, fileName); + bitstreamService.setFormat(context, bitstream, bitstreamFormatService.guessFormat(context, bitstream)); + MetadataField dspaceProcessFileTypeField = metadataFieldService + .findByString(context, Process.BITSTREAM_TYPE_METADATAFIELD, '.'); + bitstreamService.addMetadata(context, bitstream, dspaceProcessFileTypeField, null, type); + authorizeService.addPolicy(context, bitstream, Constants.READ, context.getCurrentUser()); + authorizeService.addPolicy(context, bitstream, Constants.WRITE, context.getCurrentUser()); + authorizeService.addPolicy(context, bitstream, Constants.DELETE, context.getCurrentUser()); + bitstreamService.update(context, bitstream); + process.addBitstream(bitstream); + update(context, process); + } + + @Override + public void delete(Context context, Process process) throws SQLException, IOException, AuthorizeException { + + for (Bitstream bitstream : ListUtils.emptyIfNull(process.getBitstreams())) { + bitstreamService.delete(context, bitstream); + } processDAO.delete(context, process); log.info(LogManager.getHeader(context, "process_delete", "Process with ID " + process.getID() + " and name " + process.getName() + " has been deleted")); - } @Override @@ -141,8 +202,112 @@ public class ProcessServiceImpl implements ProcessService { return parameterList; } + @Override + public Bitstream getBitstreamByName(Context context, Process process, String bitstreamName) { + for (Bitstream bitstream : getBitstreams(context, process)) { + if (StringUtils.equals(bitstream.getName(), bitstreamName)) { + return bitstream; + } + } + + return null; + } + + @Override + public Bitstream getBitstream(Context context, Process process, String type) { + List allBitstreams = process.getBitstreams(); + + if (type == null) { + return null; + } else { + if (allBitstreams != null) { + for (Bitstream bitstream : allBitstreams) { + if (StringUtils.equals(bitstreamService.getMetadata(bitstream, + Process.BITSTREAM_TYPE_METADATAFIELD), type)) { + return bitstream; + } + } + } + } + return null; + } + + @Override + public List getBitstreams(Context context, Process process) { + return process.getBitstreams(); + } + public int countTotal(Context context) throws SQLException { return processDAO.countRows(context); } + @Override + public List getFileTypesForProcessBitstreams(Context context, Process process) { + List list = getBitstreams(context, process); + Set fileTypesSet = new HashSet<>(); + for (Bitstream bitstream : list) { + List metadata = bitstreamService.getMetadata(bitstream, + Process.BITSTREAM_TYPE_METADATAFIELD, Item.ANY); + if (metadata != null && !metadata.isEmpty()) { + fileTypesSet.add(metadata.get(0).getValue()); + } + } + return new ArrayList<>(fileTypesSet); + } + + @Override + public List search(Context context, ProcessQueryParameterContainer processQueryParameterContainer, + int limit, int offset) throws SQLException { + return processDAO.search(context, processQueryParameterContainer, limit, offset); + } + + @Override + public int countSearch(Context context, ProcessQueryParameterContainer processQueryParameterContainer) + throws SQLException { + return processDAO.countTotalWithParameters(context, processQueryParameterContainer); + } + + + @Override + public void appendLog(int processId, String scriptName, String output, ProcessLogLevel processLogLevel) + throws IOException { + File tmpDir = FileUtils.getTempDirectory(); + File tempFile = new File(tmpDir, scriptName + processId + ".log"); + FileWriter out = new FileWriter(tempFile, true); + try { + try (BufferedWriter writer = new BufferedWriter(out)) { + writer.append(formatLogLine(processId, scriptName, output, processLogLevel)); + writer.newLine(); + } + } finally { + out.close(); + } + } + + @Override + public void createLogBitstream(Context context, Process process) + throws IOException, SQLException, AuthorizeException { + File tmpDir = FileUtils.getTempDirectory(); + File tempFile = new File(tmpDir, process.getName() + process.getID() + ".log"); + FileInputStream inputStream = FileUtils.openInputStream(tempFile); + appendFile(context, process, inputStream, Process.OUTPUT_TYPE, process.getName() + process.getID() + ".log"); + inputStream.close(); + tempFile.delete(); + } + + private String formatLogLine(int processId, String scriptName, String output, ProcessLogLevel processLogLevel) { + SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss.SSS"); + StringBuilder sb = new StringBuilder(); + sb.append(sdf.format(new Date())); + sb.append(" "); + sb.append(processLogLevel); + sb.append(" "); + sb.append(scriptName); + sb.append(" - "); + sb.append(processId); + sb.append(" @ "); + sb.append(output); + return sb.toString(); + } + } diff --git a/dspace-api/src/main/java/org/dspace/scripts/ScriptServiceImpl.java b/dspace-api/src/main/java/org/dspace/scripts/ScriptServiceImpl.java index e2a6acf3a8..4eb7cdbbc1 100644 --- a/dspace-api/src/main/java/org/dspace/scripts/ScriptServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/scripts/ScriptServiceImpl.java @@ -7,33 +7,46 @@ */ package org.dspace.scripts; +import java.lang.reflect.InvocationTargetException; import java.util.List; import java.util.stream.Collectors; -import org.apache.commons.lang3.StringUtils; import org.dspace.core.Context; +import org.dspace.kernel.ServiceManager; +import org.dspace.scripts.configuration.ScriptConfiguration; import org.dspace.scripts.service.ScriptService; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; /** * The implementation for the {@link ScriptService} */ public class ScriptServiceImpl implements ScriptService { + private static final Logger log = LoggerFactory.getLogger(ScriptServiceImpl.class); @Autowired - private List dSpaceRunnables; + private ServiceManager serviceManager; @Override - public DSpaceRunnable getScriptForName(String name) { - return dSpaceRunnables.stream() - .filter(dSpaceRunnable -> StringUtils.equalsIgnoreCase(dSpaceRunnable.getName(), name)) - .findFirst() - .orElse(null); + public ScriptConfiguration getScriptConfiguration(String name) { + return serviceManager.getServiceByName(name, ScriptConfiguration.class); } @Override - public List getDSpaceRunnables(Context context) { - return dSpaceRunnables.stream().filter( - dSpaceRunnable -> dSpaceRunnable.isAllowedToExecute(context)).collect(Collectors.toList()); + public List getScriptConfigurations(Context context) { + return serviceManager.getServicesByType(ScriptConfiguration.class).stream().filter( + scriptConfiguration -> scriptConfiguration.isAllowedToExecute(context)).collect(Collectors.toList()); + } + + @Override + public DSpaceRunnable createDSpaceRunnableForScriptConfiguration(ScriptConfiguration scriptToExecute) + throws IllegalAccessException, InstantiationException { + try { + return (DSpaceRunnable) scriptToExecute.getDspaceRunnableClass().getDeclaredConstructor().newInstance(); + } catch (InvocationTargetException | NoSuchMethodException e) { + log.error(e.getMessage(), e); + throw new RuntimeException(e); + } } } diff --git a/dspace-api/src/main/java/org/dspace/scripts/configuration/ScriptConfiguration.java b/dspace-api/src/main/java/org/dspace/scripts/configuration/ScriptConfiguration.java new file mode 100644 index 0000000000..4b15c22f44 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/scripts/configuration/ScriptConfiguration.java @@ -0,0 +1,92 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.scripts.configuration; + +import org.apache.commons.cli.Options; +import org.dspace.core.Context; +import org.dspace.scripts.DSpaceRunnable; +import org.springframework.beans.factory.BeanNameAware; + +/** + * This class represents an Abstract class that a ScriptConfiguration can inherit to further implement this + * and represent a script's configuration + */ +public abstract class ScriptConfiguration implements BeanNameAware { + + /** + * The possible options for this script + */ + protected Options options; + + private String description; + + private String name; + + /** + * Generic getter for the description + * @return the description value of this ScriptConfiguration + */ + public String getDescription() { + return description; + } + + /** + * Generic setter for the description + * @param description The description to be set on this ScriptConfiguration + */ + public void setDescription(String description) { + this.description = description; + } + + /** + * Generic getter for the name + * @return the name value of this ScriptConfiguration + */ + public String getName() { + return name; + } + + /** + * Generic setter for the name + * @param name The name to be set on this ScriptConfiguration + */ + public void setName(String name) { + this.name = name; + } + + /** + * Generic getter for the dspaceRunnableClass + * @return the dspaceRunnableClass value of this ScriptConfiguration + */ + public abstract Class getDspaceRunnableClass(); + + /** + * Generic setter for the dspaceRunnableClass + * @param dspaceRunnableClass The dspaceRunnableClass to be set on this IndexDiscoveryScriptConfiguration + */ + public abstract void setDspaceRunnableClass(Class dspaceRunnableClass); + /** + * This method will return if the script is allowed to execute in the given context. This is by default set + * to the currentUser in the context being an admin, however this can be overwritten by each script individually + * if different rules apply + * @param context The relevant DSpace context + * @return A boolean indicating whether the script is allowed to execute or not + */ + public abstract boolean isAllowedToExecute(Context context); + + /** + * The getter for the options of the Script + * @return the options value of this ScriptConfiguration + */ + public abstract Options getOptions(); + + @Override + public void setBeanName(String beanName) { + this.name = beanName; + } +} diff --git a/dspace-api/src/main/java/org/dspace/scripts/handler/DSpaceRunnableHandler.java b/dspace-api/src/main/java/org/dspace/scripts/handler/DSpaceRunnableHandler.java index 01ca2fafd9..078ba6bfa2 100644 --- a/dspace-api/src/main/java/org/dspace/scripts/handler/DSpaceRunnableHandler.java +++ b/dspace-api/src/main/java/org/dspace/scripts/handler/DSpaceRunnableHandler.java @@ -7,9 +7,14 @@ */ package org.dspace.scripts.handler; +import java.io.IOException; +import java.io.InputStream; import java.sql.SQLException; +import java.util.Optional; import org.apache.commons.cli.Options; +import org.dspace.authorize.AuthorizeException; +import org.dspace.core.Context; /** * This is an interface meant to be implemented by any DSpaceRunnableHandler to specify specific execution methods @@ -78,4 +83,28 @@ public interface DSpaceRunnableHandler { * @param name The name of the script */ public void printHelp(Options options, String name); + + /** + * This method will grab the InputStream for the file defined by the given file name. The exact implementation will + * differ based on whether it's a REST call or CommandLine call. The REST Call will look for Bitstreams in the + * Database whereas the CommandLine call will look on the filesystem + * @param context The relevant DSpace context + * @param fileName The filename for the file that holds the InputStream + * @return The InputStream for the file defined by the given file name + * @throws IOException If something goes wrong + * @throws AuthorizeException If something goes wrong + */ + public Optional getFileStream(Context context, String fileName) throws IOException, AuthorizeException; + + /** + * This method will write the InputStream to either a file on the filesystem or a bitstream in the database + * depending on whether it's coming from a CommandLine call or REST call respectively + * @param context The relevant DSpace context + * @param fileName The filename + * @param inputStream The inputstream to be written + * @param type The type of the file + * @throws IOException If something goes wrong + */ + public void writeFilestream(Context context, String fileName, InputStream inputStream, String type) + throws IOException, SQLException, AuthorizeException; } diff --git a/dspace-api/src/main/java/org/dspace/scripts/handler/impl/CommandLineDSpaceRunnableHandler.java b/dspace-api/src/main/java/org/dspace/scripts/handler/impl/CommandLineDSpaceRunnableHandler.java index 97925c1843..6775b9a455 100644 --- a/dspace-api/src/main/java/org/dspace/scripts/handler/impl/CommandLineDSpaceRunnableHandler.java +++ b/dspace-api/src/main/java/org/dspace/scripts/handler/impl/CommandLineDSpaceRunnableHandler.java @@ -7,9 +7,16 @@ */ package org.dspace.scripts.handler.impl; +import java.io.File; +import java.io.IOException; +import java.io.InputStream; +import java.util.Optional; + import org.apache.commons.cli.HelpFormatter; import org.apache.commons.cli.Options; +import org.apache.commons.io.FileUtils; import org.apache.logging.log4j.Logger; +import org.dspace.core.Context; import org.dspace.scripts.handler.DSpaceRunnableHandler; /** @@ -84,4 +91,20 @@ public class CommandLineDSpaceRunnableHandler implements DSpaceRunnableHandler { formatter.printHelp(name, options); } } + + @Override + public Optional getFileStream(Context context, String fileName) throws IOException { + File file = new File(fileName); + if (!(file.exists() && file.isFile())) { + return Optional.empty(); + } + return Optional.of(FileUtils.openInputStream(file)); + } + + @Override + public void writeFilestream(Context context, String fileName, InputStream inputStream, String type) + throws IOException { + File file = new File(fileName); + FileUtils.copyInputStreamToFile(inputStream, file); + } } diff --git a/dspace-api/src/main/java/org/dspace/scripts/service/ProcessService.java b/dspace-api/src/main/java/org/dspace/scripts/service/ProcessService.java index e277ab32f4..27c0c75a35 100644 --- a/dspace-api/src/main/java/org/dspace/scripts/service/ProcessService.java +++ b/dspace-api/src/main/java/org/dspace/scripts/service/ProcessService.java @@ -7,13 +7,19 @@ */ package org.dspace.scripts.service; +import java.io.IOException; +import java.io.InputStream; import java.sql.SQLException; import java.util.List; +import org.dspace.authorize.AuthorizeException; +import org.dspace.content.Bitstream; import org.dspace.core.Context; import org.dspace.eperson.EPerson; import org.dspace.scripts.DSpaceCommandLineParameter; import org.dspace.scripts.Process; +import org.dspace.scripts.ProcessLogLevel; +import org.dspace.scripts.ProcessQueryParameterContainer; /** * An interface for the ProcessService with methods regarding the Process workload @@ -104,13 +110,28 @@ public interface ProcessService { */ public void complete(Context context, Process process) throws SQLException; + /** + * The method will create a bitstream from the given inputstream with the given type as metadata and given name + * as name and attach it to the given process + * @param context The relevant DSpace context + * @param process The process for which the bitstream will be made + * @param is The inputstream for the bitstream + * @param type The type of the bitstream + * @param fileName The name of the bitstream + * @throws IOException If something goes wrong + * @throws SQLException If something goes wrong + * @throws AuthorizeException If something goes wrong + */ + public void appendFile(Context context, Process process, InputStream is, String type, String fileName) + throws IOException, SQLException, AuthorizeException; + /** * This method will delete the given Process object from the database * @param context The relevant DSpace context * @param process The Process object to be deleted * @throws SQLException If something goes wrong */ - public void delete(Context context, Process process) throws SQLException; + public void delete(Context context, Process process) throws SQLException, IOException, AuthorizeException; /** * This method will be used to update the given Process object in the database @@ -128,6 +149,32 @@ public interface ProcessService { */ public List getParameters(Process process); + /** + * This method will return the Bitstream that matches the given name for the given Process + * @param context The relevant DSpace context + * @param process The process that should hold the requested Bitstream + * @param bitstreamName The name of the requested Bitstream + * @return The Bitstream from the given Process that matches the given bitstream name + */ + public Bitstream getBitstreamByName(Context context, Process process, String bitstreamName); + + /** + * This method will return the Bitstream for a given process with a given type + * @param context The relevant DSpace context + * @param process The process that holds the Bitstreams to be searched in + * @param type The type that the Bitstream must have + * @return The Bitstream of the given type for the given Process + */ + public Bitstream getBitstream(Context context, Process process, String type); + + /** + * This method will return all the Bitstreams for a given process + * @param context The relevant DSpace context + * @param process The process that holds the Bitstreams to be searched in + * @return The list of Bitstreams + */ + public List getBitstreams(Context context, Process process); + /** * Returns the total amount of Process objects in the dataase * @param context The relevant DSpace context @@ -136,4 +183,56 @@ public interface ProcessService { */ int countTotal(Context context) throws SQLException; + /** + * This will return a list of Strings where each String represents the type of a Bitstream in the Process given + * @param context The DSpace context + * @param process The Process object that we'll use to find the bitstreams + * @return A list of Strings where each String represents a fileType that is in the Process + */ + public List getFileTypesForProcessBitstreams(Context context, Process process); + + /** + * Returns a list of all Processes in the database which match the given field requirements. If the + * requirements are not null, they will be combined with an AND operation. + * @param context The relevant DSpace context + * @param processQueryParameterContainer The {@link ProcessQueryParameterContainer} containing all the values + * that the returned {@link Process} objects must adhere to + * @param limit The limit for the amount of Processes returned + * @param offset The offset for the Processes to be returned + * @return The list of all Processes which match the metadata requirements + * @throws SQLException If something goes wrong + */ + List search(Context context, ProcessQueryParameterContainer processQueryParameterContainer, int limit, + int offset) throws SQLException; + + /** + * Count all the processes which match the requirements. The requirements are evaluated like the search + * method. + * @param context The relevant DSpace context + * @param processQueryParameterContainer The {@link ProcessQueryParameterContainer} containing all the values + * that the returned {@link Process} objects must adhere to + * @return The number of results matching the query + * @throws SQLException If something goes wrong + */ + int countSearch(Context context, ProcessQueryParameterContainer processQueryParameterContainer) throws SQLException; + /** + * This method will append the given output to the {@link Process} its logs + * @param processId The ID of the {@link Process} to append the log for + * @param scriptName The name of the Script that Process runs + * @param output The output to append + * @param processLogLevel The loglevel of the output + * @throws IOException If something goes wrong + */ + void appendLog(int processId, String scriptName, String output, ProcessLogLevel processLogLevel) throws IOException; + + /** + * This method will create a {@link Bitstream} containing the logs for the given {@link Process} + * @param context The relevant DSpace context + * @param process The {@link Process} for which we're making the {@link Bitstream} + * @throws IOException If something goes wrong + * @throws SQLException If something goes wrong + * @throws AuthorizeException If something goes wrong + */ + void createLogBitstream(Context context, Process process) + throws IOException, SQLException, AuthorizeException; } diff --git a/dspace-api/src/main/java/org/dspace/scripts/service/ScriptService.java b/dspace-api/src/main/java/org/dspace/scripts/service/ScriptService.java index fc680bd612..3716123822 100644 --- a/dspace-api/src/main/java/org/dspace/scripts/service/ScriptService.java +++ b/dspace-api/src/main/java/org/dspace/scripts/service/ScriptService.java @@ -11,6 +11,7 @@ import java.util.List; import org.dspace.core.Context; import org.dspace.scripts.DSpaceRunnable; +import org.dspace.scripts.configuration.ScriptConfiguration; /** * This service will deal with logic to handle DSpaceRunnable objects @@ -18,16 +19,29 @@ import org.dspace.scripts.DSpaceRunnable; public interface ScriptService { /** - * This method will return the DSpaceRunnable that has the name that's equal to the name given in the parameters + * This method will return the ScriptConfiguration that has the name that's equal to the name given in the + * parameters * @param name The name that the script has to match - * @return The matching DSpaceRunnable script + * @return The matching ScriptConfiguration */ - DSpaceRunnable getScriptForName(String name); + ScriptConfiguration getScriptConfiguration(String name); /** - * This method will return a list of DSpaceRunnable objects for which the given Context is authorized to use them + * This method will return a list of ScriptConfiguration objects for which the given Context is authorized * @param context The relevant DSpace context - * @return The list of accessible DSpaceRunnable scripts for this context + * @return The list of accessible ScriptConfiguration scripts for this context */ - List getDSpaceRunnables(Context context); + List getScriptConfigurations(Context context); + + /** + * This method will create a new instance of the DSpaceRunnable that's linked with this Scriptconfiguration + * It'll grab the DSpaceRunnable class from the ScriptConfiguration's variables and create a new instance of it + * to return + * @param scriptToExecute The relevant ScriptConfiguration + * @return The new instance of the DSpaceRunnable class + * @throws IllegalAccessException If something goes wrong + * @throws InstantiationException If something goes wrong + */ + DSpaceRunnable createDSpaceRunnableForScriptConfiguration(ScriptConfiguration scriptToExecute) + throws IllegalAccessException, InstantiationException; } diff --git a/dspace-api/src/main/java/org/dspace/statistics/SolrLoggerServiceImpl.java b/dspace-api/src/main/java/org/dspace/statistics/SolrLoggerServiceImpl.java index e1ff0c69b8..cd46f8dc8a 100644 --- a/dspace-api/src/main/java/org/dspace/statistics/SolrLoggerServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/statistics/SolrLoggerServiceImpl.java @@ -252,8 +252,11 @@ public class SolrLoggerServiceImpl implements SolrLoggerService, InitializingBea solr.add(doc1); - //commits are executed automatically using the solr autocommit -// solr.commit(false, false); + // commits are executed automatically using the solr autocommit + boolean useAutoCommit = configurationService.getBooleanProperty("solr-statistics.autoCommit", true); + if (!useAutoCommit) { + solr.commit(false, false); + } } catch (RuntimeException re) { throw re; @@ -289,7 +292,10 @@ public class SolrLoggerServiceImpl implements SolrLoggerService, InitializingBea solr.add(doc1); // commits are executed automatically using the solr autocommit - // solr.commit(false, false); + boolean useAutoCommit = configurationService.getBooleanProperty("solr-statistics.autoCommit", true); + if (!useAutoCommit) { + solr.commit(false, false); + } } catch (RuntimeException re) { throw re; @@ -842,18 +848,18 @@ public class SolrLoggerServiceImpl implements SolrLoggerService, InitializingBea } @Override - public void query(String query, int max) + public void query(String query, int max, int facetMinCount) throws SolrServerException, IOException { - query(query, null, null, 0, max, null, null, null, null, null, false); + query(query, null, null, 0, max, null, null, null, null, null, false, facetMinCount); } @Override public ObjectCount[] queryFacetField(String query, String filterQuery, String facetField, int max, boolean showTotal, - List facetQueries) + List facetQueries, int facetMinCount) throws SolrServerException, IOException { QueryResponse queryResponse = query(query, filterQuery, facetField, - 0, max, null, null, null, facetQueries, null, false); + 0, max, null, null, null, facetQueries, null, false, facetMinCount); if (queryResponse == null) { return new ObjectCount[0]; } @@ -887,50 +893,55 @@ public class SolrLoggerServiceImpl implements SolrLoggerService, InitializingBea @Override public ObjectCount[] queryFacetDate(String query, String filterQuery, int max, String dateType, String dateStart, - String dateEnd, boolean showTotal, Context context) + String dateEnd, boolean showTotal, Context context, int facetMinCount) throws SolrServerException, IOException { QueryResponse queryResponse = query(query, filterQuery, null, 0, max, - dateType, dateStart, dateEnd, null, null, false); + dateType, dateStart, dateEnd, null, null, false, facetMinCount); if (queryResponse == null) { return new ObjectCount[0]; } - FacetField dateFacet = queryResponse.getFacetDate("time"); - // TODO: check if this cannot crash I checked it, it crashed!!! - // Create an array for our result - ObjectCount[] result = new ObjectCount[dateFacet.getValueCount() - + (showTotal ? 1 : 0)]; - // Run over our datefacet & store all the values - for (int i = 0; i < dateFacet.getValues().size(); i++) { - FacetField.Count dateCount = dateFacet.getValues().get(i); - result[i] = new ObjectCount(); - result[i].setCount(dateCount.getCount()); - result[i].setValue(getDateView(dateCount.getName(), dateType, context)); + List rangeFacets = queryResponse.getFacetRanges(); + for (RangeFacet rangeFacet: rangeFacets) { + if (rangeFacet.getName().equalsIgnoreCase("time")) { + RangeFacet timeFacet = rangeFacet; + // Create an array for our result + ObjectCount[] result = new ObjectCount[timeFacet.getCounts().size() + + (showTotal ? 1 : 0)]; + // Run over our datefacet & store all the values + for (int i = 0; i < timeFacet.getCounts().size(); i++) { + RangeFacet.Count dateCount = (RangeFacet.Count) timeFacet.getCounts().get(i); + result[i] = new ObjectCount(); + result[i].setCount(dateCount.getCount()); + result[i].setValue(getDateView(dateCount.getValue(), dateType, context)); + } + if (showTotal) { + result[result.length - 1] = new ObjectCount(); + result[result.length - 1].setCount(queryResponse.getResults() + .getNumFound()); + // TODO: Make sure that this total is gotten out of the msgs.xml + result[result.length - 1].setValue("total"); + } + return result; + } } - if (showTotal) { - result[result.length - 1] = new ObjectCount(); - result[result.length - 1].setCount(queryResponse.getResults() - .getNumFound()); - // TODO: Make sure that this total is gotten out of the msgs.xml - result[result.length - 1].setValue("total"); - } - return result; + return new ObjectCount[0]; } @Override - public Map queryFacetQuery(String query, - String filterQuery, List facetQueries) + public Map queryFacetQuery(String query, String filterQuery, List facetQueries, + int facetMinCount) throws SolrServerException, IOException { QueryResponse response = query(query, filterQuery, null, 0, 1, null, null, - null, facetQueries, null, false); + null, facetQueries, null, false, facetMinCount); return response.getFacetQuery(); } @Override - public ObjectCount queryTotal(String query, String filterQuery) + public ObjectCount queryTotal(String query, String filterQuery, int facetMinCount) throws SolrServerException, IOException { QueryResponse queryResponse = query(query, filterQuery, null, 0, -1, null, - null, null, null, null, false); + null, null, null, null, false, facetMinCount); ObjectCount objCount = new ObjectCount(); objCount.setCount(queryResponse.getResults().getNumFound()); @@ -985,7 +996,8 @@ public class SolrLoggerServiceImpl implements SolrLoggerService, InitializingBea @Override public QueryResponse query(String query, String filterQuery, String facetField, int rows, int max, String dateType, String dateStart, - String dateEnd, List facetQueries, String sort, boolean ascending) + String dateEnd, List facetQueries, String sort, boolean ascending, + int facetMinCount) throws SolrServerException, IOException { if (solr == null) { return null; @@ -993,20 +1005,20 @@ public class SolrLoggerServiceImpl implements SolrLoggerService, InitializingBea // System.out.println("QUERY"); SolrQuery solrQuery = new SolrQuery().setRows(rows).setQuery(query) - .setFacetMinCount(1); + .setFacetMinCount(facetMinCount); addAdditionalSolrYearCores(solrQuery); // Set the date facet if present if (dateType != null) { - solrQuery.setParam("facet.date", "time") + solrQuery.setParam("facet.range", "time") . // EXAMPLE: NOW/MONTH+1MONTH - setParam("facet.date.end", + setParam("f.time.facet.range.end", "NOW/" + dateType + dateEnd + dateType).setParam( - "facet.date.gap", "+1" + dateType) + "f.time.facet.range.gap", "+1" + dateType) . // EXAMPLE: NOW/MONTH-" + nbMonths + "MONTHS - setParam("facet.date.start", + setParam("f.time.facet.range.start", "NOW/" + dateType + dateStart + dateType + "S") .setFacet(true); } @@ -1555,7 +1567,8 @@ public class SolrLoggerServiceImpl implements SolrLoggerService, InitializingBea * initialization at the same time. */ protected synchronized void initSolrYearCores() { - if (statisticYearCoresInit || !(solr instanceof HttpSolrClient)) { + if (statisticYearCoresInit || !(solr instanceof HttpSolrClient) || !configurationService.getBooleanProperty( + "usage-statistics.shardedByYear", false)) { return; } diff --git a/dspace-api/src/main/java/org/dspace/statistics/content/DatasetTimeGenerator.java b/dspace-api/src/main/java/org/dspace/statistics/content/DatasetTimeGenerator.java index 12c8bab6d3..1152ee669c 100644 --- a/dspace-api/src/main/java/org/dspace/statistics/content/DatasetTimeGenerator.java +++ b/dspace-api/src/main/java/org/dspace/statistics/content/DatasetTimeGenerator.java @@ -31,7 +31,7 @@ public class DatasetTimeGenerator extends DatasetGenerator { /** * Default constructor */ - private DatasetTimeGenerator() { } + public DatasetTimeGenerator() { } /** * Sets the date interval. diff --git a/dspace-api/src/main/java/org/dspace/statistics/content/StatisticsBSAdapter.java b/dspace-api/src/main/java/org/dspace/statistics/content/StatisticsBSAdapter.java index ec6aecde98..7fc2167e05 100644 --- a/dspace-api/src/main/java/org/dspace/statistics/content/StatisticsBSAdapter.java +++ b/dspace-api/src/main/java/org/dspace/statistics/content/StatisticsBSAdapter.java @@ -68,11 +68,11 @@ public class StatisticsBSAdapter { switch (visitType) { case ITEM_VISITS: return solrLoggerService - .queryTotal("type: " + Constants.ITEM + " AND id: " + item.getID(), resolveFilterQueries()) + .queryTotal("type: " + Constants.ITEM + " AND id: " + item.getID(), resolveFilterQueries(), 0) .getCount(); case BITSTREAM_VISITS: return solrLoggerService.queryTotal("type: " + Constants.BITSTREAM + " AND owningItem: " + item.getID(), - resolveFilterQueries()).getCount(); + resolveFilterQueries(), 0).getCount(); case TOTAL_VISITS: return getNumberOfVisits(ITEM_VISITS, item) + getNumberOfVisits(BITSTREAM_VISITS, item); default: diff --git a/dspace-api/src/main/java/org/dspace/statistics/content/StatisticsData.java b/dspace-api/src/main/java/org/dspace/statistics/content/StatisticsData.java index 1b09859362..9e307ecb40 100644 --- a/dspace-api/src/main/java/org/dspace/statistics/content/StatisticsData.java +++ b/dspace-api/src/main/java/org/dspace/statistics/content/StatisticsData.java @@ -115,13 +115,14 @@ public abstract class StatisticsData { * Run the accumulated query and return its results. * * @param context The relevant DSpace Context. + * @param facetMinCount Minimum count of results facet must have to return a result * @return accumulated query results * @throws SQLException An exception that provides information on a database access error or other errors. * @throws SolrServerException Exception from the Solr server to the solrj Java client. * @throws IOException A general class of exceptions produced by failed or interrupted I/O operations. * @throws ParseException if the dataset cannot be parsed */ - public abstract Dataset createDataset(Context context) throws SQLException, + public abstract Dataset createDataset(Context context, int facetMinCount) throws SQLException, SolrServerException, IOException, ParseException; } diff --git a/dspace-api/src/main/java/org/dspace/statistics/content/StatisticsDataSearches.java b/dspace-api/src/main/java/org/dspace/statistics/content/StatisticsDataSearches.java index 662108c1d7..b8c2a63c84 100644 --- a/dspace-api/src/main/java/org/dspace/statistics/content/StatisticsDataSearches.java +++ b/dspace-api/src/main/java/org/dspace/statistics/content/StatisticsDataSearches.java @@ -50,7 +50,7 @@ public class StatisticsDataSearches extends StatisticsData { @Override - public Dataset createDataset(Context context) + public Dataset createDataset(Context context, int facetMinCount) throws SQLException, SolrServerException, IOException, ParseException { // Check if we already have one. // If we do then give it back. @@ -85,7 +85,7 @@ public class StatisticsDataSearches extends StatisticsData { ObjectCount[] topCounts = solrLoggerService .queryFacetField(query, fqBuffer.toString(), typeGenerator.getType(), typeGenerator.getMax(), - (typeGenerator.isPercentage() || typeGenerator.isIncludeTotal()), null); + (typeGenerator.isPercentage() || typeGenerator.isIncludeTotal()), null, 0); long totalCount = -1; if (typeGenerator.isPercentage() && 0 < topCounts.length) { //Retrieve the total required to calculate the percentage @@ -133,14 +133,15 @@ public class StatisticsDataSearches extends StatisticsData { queryString = "\"\""; } - ObjectCount totalPageViews = getTotalPageViews("query:" + queryString, defaultFilterQuery); + ObjectCount totalPageViews = getTotalPageViews("query:" + queryString, defaultFilterQuery + , facetMinCount); dataset.addValueToMatrix(i, 3, pageViewFormat .format((float) totalPageViews.getCount() / queryCount.getCount())); } } } else if (typeGenerator.getMode() == DatasetSearchGenerator.Mode.SEARCH_OVERVIEW_TOTAL) { //Retrieve the total counts ! - ObjectCount totalCount = solrLoggerService.queryTotal(query, getSearchFilterQuery()); + ObjectCount totalCount = solrLoggerService.queryTotal(query, getSearchFilterQuery(), facetMinCount); //Retrieve the filtered count by using the default filter query StringBuilder fqBuffer = new StringBuilder(defaultFilterQuery); @@ -149,7 +150,7 @@ public class StatisticsDataSearches extends StatisticsData { } fqBuffer.append(getSearchFilterQuery()); - ObjectCount totalFiltered = solrLoggerService.queryTotal(query, fqBuffer.toString()); + ObjectCount totalFiltered = solrLoggerService.queryTotal(query, fqBuffer.toString(), facetMinCount); fqBuffer = new StringBuilder(defaultFilterQuery); @@ -159,7 +160,7 @@ public class StatisticsDataSearches extends StatisticsData { fqBuffer.append("statistics_type:") .append(SolrLoggerServiceImpl.StatisticsType.SEARCH_RESULT.text()); - ObjectCount totalPageViews = getTotalPageViews(query, defaultFilterQuery); + ObjectCount totalPageViews = getTotalPageViews(query, defaultFilterQuery, facetMinCount); dataset = new Dataset(1, 3); dataset.setRowLabel(0, ""); @@ -221,7 +222,7 @@ public class StatisticsDataSearches extends StatisticsData { return query; } - protected ObjectCount getTotalPageViews(String query, String defaultFilterQuery) + protected ObjectCount getTotalPageViews(String query, String defaultFilterQuery, int facetMinCount) throws SolrServerException, IOException { StringBuilder fqBuffer; fqBuffer = new StringBuilder(defaultFilterQuery); @@ -232,7 +233,7 @@ public class StatisticsDataSearches extends StatisticsData { //Retrieve the number of page views by this query ! - return solrLoggerService.queryTotal(query, fqBuffer.toString()); + return solrLoggerService.queryTotal(query, fqBuffer.toString(), facetMinCount); } /** diff --git a/dspace-api/src/main/java/org/dspace/statistics/content/StatisticsDataVisits.java b/dspace-api/src/main/java/org/dspace/statistics/content/StatisticsDataVisits.java index 7ad9e9cf88..9010edacf3 100644 --- a/dspace-api/src/main/java/org/dspace/statistics/content/StatisticsDataVisits.java +++ b/dspace-api/src/main/java/org/dspace/statistics/content/StatisticsDataVisits.java @@ -58,7 +58,7 @@ import org.dspace.statistics.util.LocationUtils; *

  • Add a {@link DatasetDSpaceObjectGenerator} for the appropriate object type.
  • *
  • Add other generators as required to get the statistic you want.
  • *
  • Add {@link org.dspace.statistics.content.filter filters} as required.
  • - *
  • {@link #createDataset(Context)} will run the query and return a result matrix. + *
  • {@link #createDataset(Context, int)} will run the query and return a result matrix. * Subsequent calls skip the query and return the same matrix.
  • * * @@ -117,7 +117,7 @@ public class StatisticsDataVisits extends StatisticsData { } @Override - public Dataset createDataset(Context context) throws SQLException, + public Dataset createDataset(Context context, int facetMinCount) throws SQLException, SolrServerException, ParseException, IOException { // Check if we already have one. // If we do then give it back. @@ -214,7 +214,8 @@ public class StatisticsDataVisits extends StatisticsData { // We are asking from our current query all the visits faceted by date ObjectCount[] results = solrLoggerService .queryFacetDate(query, filterQuery, dataSetQuery.getMax(), dateFacet.getDateType(), - dateFacet.getStartDate(), dateFacet.getEndDate(), showTotal, context); + dateFacet.getStartDate(), dateFacet.getEndDate(), showTotal, context, + facetMinCount); dataset = new Dataset(1, results.length); // Now that we have our results put em in a matrix for (int j = 0; j < results.length; j++) { @@ -230,15 +231,15 @@ public class StatisticsDataVisits extends StatisticsData { // the datasettimequery ObjectCount[] maxObjectCounts = solrLoggerService .queryFacetField(query, filterQuery, dataSetQuery.getFacetField(), dataSetQuery.getMax(), - false, null); + false, null, facetMinCount); for (int j = 0; j < maxObjectCounts.length; j++) { ObjectCount firstCount = maxObjectCounts[j]; String newQuery = dataSetQuery.getFacetField() + ": " + ClientUtils .escapeQueryChars(firstCount.getValue()) + " AND " + query; ObjectCount[] maxDateFacetCounts = solrLoggerService .queryFacetDate(newQuery, filterQuery, dataSetQuery.getMax(), dateFacet.getDateType(), - dateFacet.getStartDate(), dateFacet.getEndDate(), showTotal, context); - + dateFacet.getStartDate(), dateFacet.getEndDate(), showTotal, context, + facetMinCount); // Make sure we have a dataSet if (dataset == null) { @@ -283,7 +284,8 @@ public class StatisticsDataVisits extends StatisticsData { ObjectCount[] topCounts1 = null; // if (firsDataset.getQueries().size() == 1) { - topCounts1 = queryFacetField(firsDataset, firsDataset.getQueries().get(0).getQuery(), filterQuery); + topCounts1 = + queryFacetField(firsDataset, firsDataset.getQueries().get(0).getQuery(), filterQuery, facetMinCount); // } else { // TODO: do this // } @@ -292,7 +294,7 @@ public class StatisticsDataVisits extends StatisticsData { DatasetQuery secondDataSet = datasetQueries.get(1); // Now do the second one ObjectCount[] topCounts2 = queryFacetField(secondDataSet, secondDataSet.getQueries().get(0).getQuery(), - filterQuery); + filterQuery, facetMinCount); // Now that have results for both of them lets do x.y queries List facetQueries = new ArrayList(); for (ObjectCount count2 : topCounts2) { @@ -325,7 +327,7 @@ public class StatisticsDataVisits extends StatisticsData { } Map facetResult = solrLoggerService - .queryFacetQuery(query, filterQuery, facetQueries); + .queryFacetQuery(query, filterQuery, facetQueries, facetMinCount); // TODO: the show total @@ -671,7 +673,7 @@ public class StatisticsDataVisits extends StatisticsData { case Constants.ITEM: Item item = itemService.findByIdOrLegacyId(context, dsoId); - if (item == null) { + if (item == null || item.getHandle() == null) { break; } @@ -680,7 +682,7 @@ public class StatisticsDataVisits extends StatisticsData { case Constants.COLLECTION: Collection coll = collectionService.findByIdOrLegacyId(context, dsoId); - if (coll == null) { + if (coll == null || coll.getHandle() == null) { break; } @@ -689,7 +691,7 @@ public class StatisticsDataVisits extends StatisticsData { case Constants.COMMUNITY: Community comm = communityService.findByIdOrLegacyId(context, dsoId); - if (comm == null) { + if (comm == null || comm.getHandle() == null) { break; } @@ -704,12 +706,12 @@ public class StatisticsDataVisits extends StatisticsData { protected ObjectCount[] queryFacetField(DatasetQuery dataset, String query, - String filterQuery) + String filterQuery, int facetMinCount) throws SolrServerException, IOException { String facetType = dataset.getFacetField() == null ? "id" : dataset .getFacetField(); return solrLoggerService.queryFacetField(query, filterQuery, facetType, - dataset.getMax(), false, null); + dataset.getMax(), false, null, facetMinCount); } public static class DatasetQuery { diff --git a/dspace-api/src/main/java/org/dspace/statistics/content/StatisticsDataWorkflow.java b/dspace-api/src/main/java/org/dspace/statistics/content/StatisticsDataWorkflow.java index 7d3a7ff37a..409b79cb69 100644 --- a/dspace-api/src/main/java/org/dspace/statistics/content/StatisticsDataWorkflow.java +++ b/dspace-api/src/main/java/org/dspace/statistics/content/StatisticsDataWorkflow.java @@ -65,7 +65,7 @@ public class StatisticsDataWorkflow extends StatisticsData { @Override - public Dataset createDataset(Context context) + public Dataset createDataset(Context context, int facetMinCount) throws SQLException, SolrServerException, IOException, ParseException { // Check if we already have one. // If we do then give it back. @@ -92,16 +92,16 @@ public class StatisticsDataWorkflow extends StatisticsData { DatasetTypeGenerator typeGenerator = (DatasetTypeGenerator) datasetGenerator; ObjectCount[] topCounts = solrLoggerService .queryFacetField(query, defaultFilterQuery, typeGenerator.getType(), typeGenerator.getMax(), - typeGenerator.isIncludeTotal(), null); + typeGenerator.isIncludeTotal(), null, facetMinCount); //Retrieve our total field counts Map totalFieldCounts = new HashMap(); if (averageMonths != -1) { - totalFieldCounts = getTotalFacetCounts(typeGenerator); + totalFieldCounts = getTotalFacetCounts(typeGenerator, facetMinCount); } long monthDifference = 1; - if (getOldestWorkflowItemDate() != null) { - monthDifference = getMonthsDifference(new Date(), getOldestWorkflowItemDate()); + if (getOldestWorkflowItemDate(facetMinCount) != null) { + monthDifference = getMonthsDifference(new Date(), getOldestWorkflowItemDate(facetMinCount)); } dataset = new Dataset(topCounts.length, (averageMonths != -1 ? 3 : 2)); @@ -168,10 +168,10 @@ public class StatisticsDataWorkflow extends StatisticsData { * @throws org.apache.solr.client.solrj.SolrServerException passed through. * @throws java.io.IOException passed through. */ - protected Map getTotalFacetCounts(DatasetTypeGenerator typeGenerator) + protected Map getTotalFacetCounts(DatasetTypeGenerator typeGenerator, int facetMinCount) throws SolrServerException, IOException { ObjectCount[] objectCounts = solrLoggerService - .queryFacetField(getQuery(), null, typeGenerator.getType(), -1, false, null); + .queryFacetField(getQuery(), null, typeGenerator.getType(), -1, false, null, facetMinCount); Map result = new HashMap<>(); for (ObjectCount objectCount : objectCounts) { result.put(objectCount.getValue(), objectCount.getCount()); @@ -179,14 +179,14 @@ public class StatisticsDataWorkflow extends StatisticsData { return result; } - protected Date getOldestWorkflowItemDate() + protected Date getOldestWorkflowItemDate(int facetMinCount) throws SolrServerException, IOException { ConfigurationService configurationService = DSpaceServicesFactory.getInstance().getConfigurationService(); String workflowStartDate = configurationService.getProperty("usage-statistics.workflow-start-date"); if (workflowStartDate == null) { //Query our solr for it ! QueryResponse oldestRecord = solrLoggerService - .query(getQuery(), null, null, 1, 0, null, null, null, null, "time", true); + .query(getQuery(), null, null, 1, 0, null, null, null, null, "time", true, facetMinCount); if (0 < oldestRecord.getResults().getNumFound()) { SolrDocument solrDocument = oldestRecord.getResults().get(0); Date oldestDate = (Date) solrDocument.getFieldValue("time"); diff --git a/dspace-api/src/main/java/org/dspace/statistics/content/StatisticsDisplay.java b/dspace-api/src/main/java/org/dspace/statistics/content/StatisticsDisplay.java index 9bd54c189f..a1058c907f 100644 --- a/dspace-api/src/main/java/org/dspace/statistics/content/StatisticsDisplay.java +++ b/dspace-api/src/main/java/org/dspace/statistics/content/StatisticsDisplay.java @@ -83,8 +83,9 @@ public abstract class StatisticsDisplay { return statisticsData.getDataset(); } - public Dataset getDataset(Context context) throws SQLException, SolrServerException, IOException, ParseException { - return statisticsData.createDataset(context); + public Dataset getDataset(Context context, int facetMinCount) throws SQLException, SolrServerException, IOException, + ParseException { + return statisticsData.createDataset(context, facetMinCount); } public void addCss(String style) { diff --git a/dspace-api/src/main/java/org/dspace/statistics/export/FailedOpenURLTrackerServiceImpl.java b/dspace-api/src/main/java/org/dspace/statistics/export/FailedOpenURLTrackerServiceImpl.java new file mode 100644 index 0000000000..cb8e64cc65 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/statistics/export/FailedOpenURLTrackerServiceImpl.java @@ -0,0 +1,59 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.statistics.export; + +import java.sql.SQLException; +import java.util.List; + +import org.dspace.core.Context; +import org.dspace.statistics.export.dao.OpenURLTrackerDAO; +import org.dspace.statistics.export.service.FailedOpenURLTrackerService; +import org.springframework.beans.factory.annotation.Autowired; + +/** + * Implementation of the service that handles the OpenURLTracker database operations + */ +public class FailedOpenURLTrackerServiceImpl implements FailedOpenURLTrackerService { + + @Autowired(required = true) + protected OpenURLTrackerDAO openURLTrackerDAO; + + /** + * Removes an OpenURLTracker from the database + * @param context + * @param openURLTracker + * @throws SQLException + */ + @Override + public void remove(Context context, OpenURLTracker openURLTracker) throws SQLException { + openURLTrackerDAO.delete(context, openURLTracker); + } + + /** + * Returns all OpenURLTrackers from the database + * @param context + * @return all OpenURLTrackers + * @throws SQLException + */ + @Override + public List findAll(Context context) throws SQLException { + return openURLTrackerDAO.findAll(context, OpenURLTracker.class); + } + + /** + * Creates a new OpenURLTracker + * @param context + * @return the creatred OpenURLTracker + * @throws SQLException + */ + @Override + public OpenURLTracker create(Context context) throws SQLException { + OpenURLTracker openURLTracker = openURLTrackerDAO.create(context, new OpenURLTracker()); + return openURLTracker; + } +} diff --git a/dspace-api/src/main/java/org/dspace/statistics/export/IrusExportUsageEventListener.java b/dspace-api/src/main/java/org/dspace/statistics/export/IrusExportUsageEventListener.java new file mode 100644 index 0000000000..a8101c51de --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/statistics/export/IrusExportUsageEventListener.java @@ -0,0 +1,74 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.statistics.export; + +import java.util.UUID; + +import org.apache.log4j.Logger; +import org.dspace.content.Bitstream; +import org.dspace.content.Item; +import org.dspace.core.Context; +import org.dspace.core.LogManager; +import org.dspace.services.ConfigurationService; +import org.dspace.services.model.Event; +import org.dspace.statistics.export.processor.BitstreamEventProcessor; +import org.dspace.statistics.export.processor.ItemEventProcessor; +import org.dspace.usage.AbstractUsageEventListener; +import org.dspace.usage.UsageEvent; +import org.springframework.beans.factory.annotation.Autowired; + +/** + * Class to receive usage events and send corresponding data to IRUS + */ +public class IrusExportUsageEventListener extends AbstractUsageEventListener { + /* Log4j logger*/ + private static Logger log = Logger.getLogger(IrusExportUsageEventListener.class); + + @Autowired + ConfigurationService configurationService; + + /** + * Receives an event and processes to create a URL to send to IRUS when certain conditions are met + * + * @param event includes all the information related to the event that occurred + */ + public void receiveEvent(Event event) { + if (configurationService.getBooleanProperty("irus.statistics.tracker.enabled", false)) { + if (event instanceof UsageEvent) { + UsageEvent ue = (UsageEvent) event; + Context context = ue.getContext(); + + try { + //Check for item investigation + if (ue.getObject() instanceof Item) { + ItemEventProcessor itemEventProcessor = new ItemEventProcessor(context, ue.getRequest(), + (Item) ue.getObject()); + itemEventProcessor.processEvent(); + } else if (ue.getObject() instanceof Bitstream) { + + BitstreamEventProcessor bitstreamEventProcessor = + new BitstreamEventProcessor(context, ue.getRequest(), (Bitstream) ue.getObject()); + bitstreamEventProcessor.processEvent(); + } + } catch (Exception e) { + UUID id; + id = ue.getObject().getID(); + + int type; + try { + type = ue.getObject().getType(); + } catch (Exception e1) { + type = -1; + } + log.error(LogManager.getHeader(ue.getContext(), "Error while processing export of use event", + "Id: " + id + " type: " + type), e); + } + } + } + } +} diff --git a/dspace-api/src/main/java/org/dspace/statistics/export/OpenURLTracker.java b/dspace-api/src/main/java/org/dspace/statistics/export/OpenURLTracker.java new file mode 100644 index 0000000000..b853f255e8 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/statistics/export/OpenURLTracker.java @@ -0,0 +1,121 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.statistics.export; + +import java.util.Date; +import javax.persistence.Column; +import javax.persistence.Entity; +import javax.persistence.GeneratedValue; +import javax.persistence.GenerationType; +import javax.persistence.Id; +import javax.persistence.SequenceGenerator; +import javax.persistence.Table; +import javax.persistence.Temporal; +import javax.persistence.TemporalType; + +import org.dspace.core.ReloadableEntity; +import org.hibernate.proxy.HibernateProxyHelper; + +/** + * Class that represents an OpenURLTracker which tracks a failed transmission to IRUS + */ +@Entity +@Table(name = "OpenUrlTracker") +public class OpenURLTracker implements ReloadableEntity { + + @Id + @Column(name = "tracker_id") + @GeneratedValue(strategy = GenerationType.SEQUENCE, generator = "openurltracker_seq") + @SequenceGenerator(name = "openurltracker_seq", sequenceName = "openurltracker_seq", allocationSize = 1) + private Integer id; + + @Column(name = "tracker_url", length = 1000) + private String url; + + @Column(name = "uploaddate") + @Temporal(TemporalType.DATE) + private Date uploadDate; + + protected OpenURLTracker() { + } + + /** + * Gets the OpenURLTracker id + * @return the id + */ + @Override + public Integer getID() { + return id; + } + + /** + * Gets the OpenURLTracker url + * @return the url + */ + public String getUrl() { + return url; + } + + /** + * Sets the OpenURLTracker url + * @param url + */ + public void setUrl(String url) { + this.url = url; + } + + /** + * Returns the upload date + * @return upload date + */ + public Date getUploadDate() { + return uploadDate; + } + + /** + * Set the upload date + * @param uploadDate + */ + public void setUploadDate(Date uploadDate) { + this.uploadDate = uploadDate; + } + + /** + * Determines whether two objects of this class are equal by comparing the ID + * @param o - object to compare + * @return whether the objects are equal + */ + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + Class objClass = HibernateProxyHelper.getClassWithoutInitializingProxy(o); + if (getClass() != objClass) { + return false; + } + + final OpenURLTracker that = (OpenURLTracker) o; + if (!this.getID().equals(that.getID())) { + return false; + } + + return true; + } + + /** + * Returns the hash code value for the object + * @return hash code + */ + @Override + public int hashCode() { + int hash = 8; + hash = 74 * hash + this.getID(); + return hash; + } +} diff --git a/dspace-api/src/main/java/org/dspace/statistics/export/RetryFailedOpenUrlTracker.java b/dspace-api/src/main/java/org/dspace/statistics/export/RetryFailedOpenUrlTracker.java new file mode 100644 index 0000000000..6b1bea0de1 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/statistics/export/RetryFailedOpenUrlTracker.java @@ -0,0 +1,84 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.statistics.export; + +import org.apache.commons.cli.ParseException; +import org.apache.commons.lang3.StringUtils; +import org.dspace.core.Context; +import org.dspace.scripts.DSpaceRunnable; +import org.dspace.statistics.export.factory.OpenURLTrackerLoggerServiceFactory; +import org.dspace.statistics.export.service.OpenUrlService; +import org.dspace.utils.DSpace; + +/** + * Script to retry the failed url transmissions to IRUS + * This script also has an option to add new failed urls for testing purposes + */ +public class RetryFailedOpenUrlTracker extends DSpaceRunnable { + + private String lineToAdd = null; + private boolean help = false; + private boolean retryFailed = false; + + private OpenUrlService openUrlService; + + /** + * Run the script + * When the -a option is used, a new "failed" url will be added to the database + * + * @throws Exception + */ + public void internalRun() throws Exception { + if (help) { + printHelp(); + return; + } + Context context = new Context(); + context.turnOffAuthorisationSystem(); + + if (StringUtils.isNotBlank(lineToAdd)) { + openUrlService.logfailed(context, lineToAdd); + handler.logInfo("Created dummy entry in OpenUrlTracker with URL: " + lineToAdd); + } + if (retryFailed) { + handler.logInfo("Reprocessing failed URLs stored in the db"); + openUrlService.reprocessFailedQueue(context); + } + context.restoreAuthSystemState(); + context.complete(); + } + + public RetryFailedOpenUrlTrackerScriptConfiguration getScriptConfiguration() { + return new DSpace().getServiceManager().getServiceByName("retry-tracker", + RetryFailedOpenUrlTrackerScriptConfiguration.class); + } + + /** + * Setups the parameters + * + * @throws ParseException + */ + public void setup() throws ParseException { + openUrlService = OpenURLTrackerLoggerServiceFactory.getInstance().getOpenUrlService(); + + if (!(commandLine.hasOption('a') || commandLine.hasOption('r') || commandLine.hasOption('h'))) { + throw new ParseException("At least one of the parameters (-a, -r, -h) is required!"); + } + + if (commandLine.hasOption('h')) { + help = true; + } + if (commandLine.hasOption('a')) { + lineToAdd = commandLine.getOptionValue('a'); + } + if (commandLine.hasOption('r')) { + retryFailed = true; + } + } + +} diff --git a/dspace-api/src/main/java/org/dspace/statistics/export/RetryFailedOpenUrlTrackerScriptConfiguration.java b/dspace-api/src/main/java/org/dspace/statistics/export/RetryFailedOpenUrlTrackerScriptConfiguration.java new file mode 100644 index 0000000000..b5d65aa4e5 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/statistics/export/RetryFailedOpenUrlTrackerScriptConfiguration.java @@ -0,0 +1,74 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.statistics.export; + +import java.sql.SQLException; + +import org.apache.commons.cli.Options; +import org.dspace.authorize.service.AuthorizeService; +import org.dspace.core.Context; +import org.dspace.scripts.configuration.ScriptConfiguration; +import org.springframework.beans.factory.annotation.Autowired; + +/** + * The {@link ScriptConfiguration} for the {@link RetryFailedOpenUrlTracker} script + */ +public class RetryFailedOpenUrlTrackerScriptConfiguration + extends ScriptConfiguration { + + @Autowired + private AuthorizeService authorizeService; + + private Class dspaceRunnableClass; + + @Override + public Class getDspaceRunnableClass() { + return dspaceRunnableClass; + } + + /** + * Generic setter for the dspaceRunnableClass + * + * @param dspaceRunnableClass The dspaceRunnableClass to be set on this RetryFailedOpenUrlTrackerScriptConfiguration + */ + @Override + public void setDspaceRunnableClass(Class dspaceRunnableClass) { + this.dspaceRunnableClass = dspaceRunnableClass; + } + + @Override + public boolean isAllowedToExecute(Context context) { + try { + return authorizeService.isAdmin(context); + } catch (SQLException e) { + throw new RuntimeException("SQLException occurred when checking if the current user is an admin", e); + } + } + + @Override + public Options getOptions() { + if (options == null) { + Options options = new Options(); + + options.addOption("a", true, "Add a new \"failed\" row to the table with a url (test purposes only)"); + options.getOption("a").setType(String.class); + + options.addOption("r", false, + "Retry sending requests to all urls stored in the table with failed requests. " + + "This includes the url that can be added through the -a option."); + options.getOption("r").setType(boolean.class); + + options.addOption("h", "help", false, "print this help message"); + options.getOption("h").setType(boolean.class); + + super.options = options; + } + return options; + } + +} diff --git a/dspace-api/src/main/java/org/dspace/statistics/export/dao/OpenURLTrackerDAO.java b/dspace-api/src/main/java/org/dspace/statistics/export/dao/OpenURLTrackerDAO.java new file mode 100644 index 0000000000..e3b957db1d --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/statistics/export/dao/OpenURLTrackerDAO.java @@ -0,0 +1,21 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.statistics.export.dao; + +import org.dspace.core.GenericDAO; +import org.dspace.statistics.export.OpenURLTracker; + +/** + * Database Access Object interface class for the OpenURLTracker object. + * The implementation of this class is responsible for all database calls for the OpenURLTracker object and is + * autowired by spring + * This class should only be accessed from a single service and should never be exposed outside of the API + */ +public interface OpenURLTrackerDAO extends GenericDAO { + +} diff --git a/dspace-api/src/main/java/org/dspace/statistics/export/dao/impl/OpenURLTrackerDAOImpl.java b/dspace-api/src/main/java/org/dspace/statistics/export/dao/impl/OpenURLTrackerDAOImpl.java new file mode 100644 index 0000000000..d057f45bac --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/statistics/export/dao/impl/OpenURLTrackerDAOImpl.java @@ -0,0 +1,26 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.statistics.export.dao.impl; + +import org.dspace.core.AbstractHibernateDAO; +import org.dspace.statistics.export.OpenURLTracker; +import org.dspace.statistics.export.dao.OpenURLTrackerDAO; + +/** + * Hibernate implementation of the Database Access Object interface class for the OpenURLTracker object. + * This class is responsible for all database calls for the OpenURLTracker object and is autowired by spring + * This class should never be accessed directly. + * + */ +public class OpenURLTrackerDAOImpl extends AbstractHibernateDAO implements OpenURLTrackerDAO { + + protected OpenURLTrackerDAOImpl() { + super(); + } + +} diff --git a/dspace-api/src/main/java/org/dspace/statistics/export/factory/OpenURLTrackerLoggerServiceFactory.java b/dspace-api/src/main/java/org/dspace/statistics/export/factory/OpenURLTrackerLoggerServiceFactory.java new file mode 100644 index 0000000000..b31b076f68 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/statistics/export/factory/OpenURLTrackerLoggerServiceFactory.java @@ -0,0 +1,41 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.statistics.export.factory; + +import org.dspace.services.factory.DSpaceServicesFactory; +import org.dspace.statistics.export.service.FailedOpenURLTrackerService; +import org.dspace.statistics.export.service.OpenUrlService; + +/** + * The service factory for the OpenUrlTracker related services + */ +public abstract class OpenURLTrackerLoggerServiceFactory { + + /** + * Returns the FailedOpenURLTrackerService + * @return FailedOpenURLTrackerService instance + */ + public abstract FailedOpenURLTrackerService getOpenUrlTrackerLoggerService(); + + /** + * Retrieve the OpenURLTrackerLoggerServiceFactory + * @return OpenURLTrackerLoggerServiceFactory instance + */ + public static OpenURLTrackerLoggerServiceFactory getInstance() { + return DSpaceServicesFactory.getInstance().getServiceManager() + .getServiceByName("openURLTrackerLoggerServiceFactory", + OpenURLTrackerLoggerServiceFactory.class); + + } + + /** + * Returns the OpenUrlService + * @return OpenUrlService instance + */ + public abstract OpenUrlService getOpenUrlService(); +} diff --git a/dspace-api/src/main/java/org/dspace/statistics/export/factory/OpenURLTrackerLoggerServiceFactoryImpl.java b/dspace-api/src/main/java/org/dspace/statistics/export/factory/OpenURLTrackerLoggerServiceFactoryImpl.java new file mode 100644 index 0000000000..f585fdf376 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/statistics/export/factory/OpenURLTrackerLoggerServiceFactoryImpl.java @@ -0,0 +1,42 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.statistics.export.factory; + +import org.dspace.statistics.export.service.FailedOpenURLTrackerService; +import org.dspace.statistics.export.service.OpenUrlService; +import org.springframework.beans.factory.annotation.Autowired; + +/** + * The service factory implementation for the OpenUrlTracker related services + */ +public class OpenURLTrackerLoggerServiceFactoryImpl extends OpenURLTrackerLoggerServiceFactory { + + @Autowired(required = true) + private FailedOpenURLTrackerService failedOpenURLTrackerService; + + @Autowired(required = true) + private OpenUrlService openUrlService; + + /** + * Returns the FailedOpenURLTrackerService + * @return FailedOpenURLTrackerService instance + */ + @Override + public FailedOpenURLTrackerService getOpenUrlTrackerLoggerService() { + return failedOpenURLTrackerService; + } + + /** + * Returns the OpenUrlService + * @return OpenUrlService instance + */ + @Override + public OpenUrlService getOpenUrlService() { + return openUrlService; + } +} diff --git a/dspace-api/src/main/java/org/dspace/statistics/export/processor/BitstreamEventProcessor.java b/dspace-api/src/main/java/org/dspace/statistics/export/processor/BitstreamEventProcessor.java new file mode 100644 index 0000000000..85cb7bc14c --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/statistics/export/processor/BitstreamEventProcessor.java @@ -0,0 +1,129 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.statistics.export.processor; + +import java.io.IOException; +import java.io.UnsupportedEncodingException; +import java.net.URLEncoder; +import java.sql.SQLException; +import javax.servlet.http.HttpServletRequest; + +import org.dspace.content.Bitstream; +import org.dspace.content.Bundle; +import org.dspace.content.Item; +import org.dspace.core.Context; +import org.dspace.services.ConfigurationService; +import org.dspace.services.factory.DSpaceServicesFactory; +import org.dspace.statistics.util.SpiderDetector; + +/** + * Processor that handles Bitstream events from the IrusExportUsageEventListener + */ +public class BitstreamEventProcessor extends ExportEventProcessor { + + private ConfigurationService configurationService = DSpaceServicesFactory.getInstance().getConfigurationService(); + + + private Item item; + private Bitstream bitstream; + + /** + * Creates a new BitstreamEventProcessor that will set the params and obtain the parent item of the bitstream + * + * @param context + * @param request + * @param bitstream + * @throws SQLException + */ + public BitstreamEventProcessor(Context context, HttpServletRequest request, Bitstream bitstream) + throws SQLException { + super(context, request); + this.bitstream = bitstream; + this.item = getItem(request); + } + + /** + * Returns the parent item of the bitsream + * + * @return parent item of the bitstream + * @throws SQLException + */ + private Item getItem(HttpServletRequest request) throws SQLException { + if (0 < bitstream.getBundles().size()) { + if (!SpiderDetector.isSpider(request)) { + Bundle bundle = bitstream.getBundles().get(0); + if (bundle.getName() == null || !bundle.getName().equals("ORIGINAL")) { + return null; + } + + if (0 < bundle.getItems().size()) { + Item item = bundle.getItems().get(0); + return item; + } + } + } + return null; + } + + /** + * Process the event + * Check if the item should be processed + * Create the url to be transmitted based on item and bitstream data + * + * @throws SQLException + * @throws IOException + */ + public void processEvent() throws SQLException, IOException { + if (shouldProcessItem(item)) { + String baseParam = getBaseParameters(item); + String fullParam = addObjectSpecificData(baseParam, bitstream); + processObject(fullParam); + } + } + + /** + * Adds additional item and bitstream data to the url + * + * @param string to which the additional data needs to be added + * @param bitstream + * @return the string with additional data + * @throws UnsupportedEncodingException + */ + protected String addObjectSpecificData(final String string, Bitstream bitstream) + throws UnsupportedEncodingException { + StringBuilder data = new StringBuilder(string); + + String bitstreamInfo = getBitstreamInfo(bitstream); + data.append("&").append(URLEncoder.encode("svc_dat", UTF_8)).append("=") + .append(URLEncoder.encode(bitstreamInfo, UTF_8)); + data.append("&").append(URLEncoder.encode("rft_dat", UTF_8)).append("=") + .append(URLEncoder.encode(BITSTREAM_DOWNLOAD, UTF_8)); + + return data.toString(); + } + + /** + * Get Bitstream info used for the url + * + * @param bitstream + * @return bitstream info + */ + private String getBitstreamInfo(final Bitstream bitstream) { + + String dspaceRestUrl = configurationService.getProperty("dspace.server.url"); + + StringBuilder sb = new StringBuilder(); + + sb.append(dspaceRestUrl); + sb.append("/api/core/bitstreams/"); + sb.append(bitstream.getID()); + sb.append("/content"); + + return sb.toString(); + } +} diff --git a/dspace-api/src/main/java/org/dspace/statistics/export/processor/ExportEventProcessor.java b/dspace-api/src/main/java/org/dspace/statistics/export/processor/ExportEventProcessor.java new file mode 100644 index 0000000000..021481c54a --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/statistics/export/processor/ExportEventProcessor.java @@ -0,0 +1,258 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.statistics.export.processor; + +import java.io.IOException; +import java.io.UnsupportedEncodingException; +import java.net.URLEncoder; +import java.sql.SQLException; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Date; +import java.util.List; +import javax.servlet.http.HttpServletRequest; + +import org.apache.commons.codec.CharEncoding; +import org.apache.commons.lang3.StringUtils; +import org.apache.log4j.Logger; +import org.dspace.content.DCDate; +import org.dspace.content.Entity; +import org.dspace.content.EntityType; +import org.dspace.content.Item; +import org.dspace.content.MetadataValue; +import org.dspace.content.factory.ContentServiceFactory; +import org.dspace.content.service.EntityService; +import org.dspace.content.service.ItemService; +import org.dspace.core.Context; +import org.dspace.core.Utils; +import org.dspace.services.ConfigurationService; +import org.dspace.services.factory.DSpaceServicesFactory; +import org.dspace.statistics.export.factory.OpenURLTrackerLoggerServiceFactory; +import org.dspace.statistics.export.service.OpenUrlService; + +/** + * Abstract export event processor that contains all shared logic to handle both Items and Bitstreams + * from the IrusExportUsageEventListener + */ +public abstract class ExportEventProcessor { + + private static Logger log = Logger.getLogger(ExportEventProcessor.class); + + protected static final String ENTITY_TYPE_DEFAULT = "Publication"; + + protected static final String ITEM_VIEW = "Investigation"; + protected static final String BITSTREAM_DOWNLOAD = "Request"; + + protected final static String UTF_8 = CharEncoding.UTF_8; + + private ConfigurationService configurationService = DSpaceServicesFactory.getInstance().getConfigurationService(); + private EntityService entityService = ContentServiceFactory.getInstance().getEntityService(); + private ItemService itemService = ContentServiceFactory.getInstance().getItemService(); + private OpenUrlService openUrlService = OpenURLTrackerLoggerServiceFactory.getInstance().getOpenUrlService(); + + + private Context context; + private HttpServletRequest request; + + /** + * Creates a new ExportEventProcessor based on the params and initializes the services + * + * @param context + * @param request + */ + ExportEventProcessor(Context context, HttpServletRequest request) { + this.context = context; + this.request = request; + } + + /** + * Processes the event + * + * @throws SQLException + * @throws IOException + */ + public abstract void processEvent() throws SQLException, IOException; + + /** + * Process the url obtained from the object to be transmitted + * + * @param urlParameters + * @throws IOException + * @throws SQLException + */ + protected void processObject(String urlParameters) throws IOException, SQLException { + String baseUrl; + if (StringUtils.equals(configurationService.getProperty("irus.statistics.tracker.environment"), "production")) { + baseUrl = configurationService.getProperty("irus.statistics.tracker.produrl"); + } else { + baseUrl = configurationService.getProperty("irus.statistics.tracker.testurl"); + } + + openUrlService.processUrl(context, baseUrl + "?" + urlParameters); + } + + /** + * Get the base parameters for the url to be transmitted + * + * @param item + * @return the parameter string to be used in the url + * @throws UnsupportedEncodingException + */ + protected String getBaseParameters(Item item) + throws UnsupportedEncodingException { + + //We have a valid url collect the rest of the data + String clientIP = request.getRemoteAddr(); + if (configurationService.getBooleanProperty("useProxies", false) && request + .getHeader("X-Forwarded-For") != null) { + /* This header is a comma delimited list */ + for (String xfip : request.getHeader("X-Forwarded-For").split(",")) { + /* proxy itself will sometime populate this header with the same value in + remote address. ordering in spec is vague, we'll just take the last + not equal to the proxy + */ + if (!request.getHeader("X-Forwarded-For").contains(clientIP)) { + clientIP = xfip.trim(); + } + } + } + String clientUA = StringUtils.defaultIfBlank(request.getHeader("USER-AGENT"), ""); + String referer = StringUtils.defaultIfBlank(request.getHeader("referer"), ""); + + //Start adding our data + StringBuilder data = new StringBuilder(); + data.append(URLEncoder.encode("url_ver", UTF_8) + "=" + + URLEncoder.encode(configurationService.getProperty("irus.statistics.tracker.urlversion"), UTF_8)); + data.append("&").append(URLEncoder.encode("req_id", UTF_8)).append("=") + .append(URLEncoder.encode(clientIP, UTF_8)); + data.append("&").append(URLEncoder.encode("req_dat", UTF_8)).append("=") + .append(URLEncoder.encode(clientUA, UTF_8)); + + String hostName = Utils.getHostName(configurationService.getProperty("dspace.ui.url")); + + data.append("&").append(URLEncoder.encode("rft.artnum", UTF_8)).append("="). + append(URLEncoder.encode("oai:" + hostName + ":" + item + .getHandle(), UTF_8)); + data.append("&").append(URLEncoder.encode("rfr_dat", UTF_8)).append("=") + .append(URLEncoder.encode(referer, UTF_8)); + data.append("&").append(URLEncoder.encode("rfr_id", UTF_8)).append("=") + .append(URLEncoder.encode(hostName, UTF_8)); + data.append("&").append(URLEncoder.encode("url_tim", UTF_8)).append("=") + .append(URLEncoder.encode(getCurrentDateString(), UTF_8)); + + return data.toString(); + } + + /** + * Get the current date + * + * @return the current date as a string + */ + protected String getCurrentDateString() { + return new DCDate(new Date()).toString(); + } + + /** + * Checks if an item should be processed + * + * @param item to be checked + * @return whether the item should be processed + * @throws SQLException + */ + protected boolean shouldProcessItem(Item item) throws SQLException { + if (item == null) { + return false; + } + if (!item.isArchived()) { + return false; + } + if (itemService.canEdit(context, item)) { + return false; + } + if (!shouldProcessItemType(item)) { + return false; + } + if (!shouldProcessEntityType(item)) { + return false; + } + return true; + } + + /** + * Checks if the item's entity type should be processed + * + * @param item to be checked + * @return whether the item should be processed + * @throws SQLException + */ + protected boolean shouldProcessEntityType(Item item) throws SQLException { + Entity entity = entityService.findByItemId(context, item.getID()); + EntityType type = entityService.getType(context, entity); + + String[] entityTypeStrings = configurationService.getArrayProperty("irus.statistics.tracker.entity-types"); + List entityTypes = new ArrayList<>(); + + if (entityTypeStrings.length != 0) { + entityTypes.addAll(Arrays.asList(entityTypeStrings)); + } else { + entityTypes.add(ENTITY_TYPE_DEFAULT); + } + + if (type != null && entityTypes.contains(type.getLabel())) { + return true; + } + return false; + } + + /** + * Checks if the item should be excluded based on the its type + * + * @param item to be checked + * @return whether the item should be processed + */ + protected boolean shouldProcessItemType(Item item) { + String trackerTypeMetadataField = configurationService.getProperty("irus.statistics.tracker.type-field"); + String[] metadataValues = configurationService.getArrayProperty("irus.statistics.tracker.type-value"); + List trackerTypeMetadataValues; + if (metadataValues.length > 0) { + trackerTypeMetadataValues = new ArrayList<>(); + for (String metadataValue : metadataValues) { + trackerTypeMetadataValues.add(metadataValue.toLowerCase()); + } + } else { + trackerTypeMetadataValues = null; + } + + if (trackerTypeMetadataField != null && trackerTypeMetadataValues != null) { + + // Contains the schema, element and if present qualifier of the metadataField + String[] metadataFieldSplit = trackerTypeMetadataField.split("\\."); + + List types = itemService + .getMetadata(item, metadataFieldSplit[0], metadataFieldSplit[1], + metadataFieldSplit.length == 2 ? null : metadataFieldSplit[2], Item.ANY); + + if (!types.isEmpty()) { + //Find out if we have a type that needs to be excluded + for (MetadataValue type : types) { + if (trackerTypeMetadataValues.contains(type.getValue().toLowerCase())) { + //We have found no type so process this item + return false; + } + } + return true; + } else { + // No types in this item, so not excluded + return true; + } + } else { + // No types to be excluded + return true; + } + } +} diff --git a/dspace-api/src/main/java/org/dspace/statistics/export/processor/ItemEventProcessor.java b/dspace-api/src/main/java/org/dspace/statistics/export/processor/ItemEventProcessor.java new file mode 100644 index 0000000000..507ca92382 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/statistics/export/processor/ItemEventProcessor.java @@ -0,0 +1,91 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.statistics.export.processor; + +import java.io.IOException; +import java.io.UnsupportedEncodingException; +import java.net.URLEncoder; +import java.sql.SQLException; +import javax.servlet.http.HttpServletRequest; + +import org.dspace.content.Item; +import org.dspace.core.Context; +import org.dspace.services.ConfigurationService; +import org.dspace.services.factory.DSpaceServicesFactory; + + +/** + * Processor that handles Item events from the IrusExportUsageEventListener + */ +public class ItemEventProcessor extends ExportEventProcessor { + + private ConfigurationService configurationService = DSpaceServicesFactory.getInstance().getConfigurationService(); + + private Item item; + + /** + * Creates a new ItemEventProcessor that will set the params + * + * @param context + * @param request + * @param item + */ + public ItemEventProcessor(Context context, HttpServletRequest request, Item item) { + super(context, request); + this.item = item; + } + + /** + * Process the event + * Check if the item should be processed + * Create the url to be transmitted based on item data + * + * @throws SQLException + * @throws IOException + */ + public void processEvent() throws SQLException, IOException { + if (shouldProcessItem(item)) { + String baseParam = getBaseParameters(item); + String fullParam = addObjectSpecificData(baseParam, item); + processObject(fullParam); + } + } + + /** + * Adds additional item data to the url + * + * @param string to which the additional data needs to be added + * @param item + * @return the string with additional data + * @throws UnsupportedEncodingException + */ + protected String addObjectSpecificData(final String string, Item item) throws UnsupportedEncodingException { + StringBuilder data = new StringBuilder(string); + String itemInfo = getItemInfo(item); + data.append("&").append(URLEncoder.encode("svc_dat", UTF_8)).append("=") + .append(URLEncoder.encode(itemInfo, UTF_8)); + data.append("&").append(URLEncoder.encode("rft_dat", UTF_8)).append("=") + .append(URLEncoder.encode(ITEM_VIEW, UTF_8)); + return data.toString(); + } + + /** + * Get Item info used for the url + * + * @param item + * @return item info + */ + private String getItemInfo(final Item item) { + StringBuilder sb = new StringBuilder(configurationService.getProperty("dspace.ui.url")); + sb.append("/handle/").append(item.getHandle()); + + return sb.toString(); + } + + +} diff --git a/dspace-api/src/main/java/org/dspace/statistics/export/service/FailedOpenURLTrackerService.java b/dspace-api/src/main/java/org/dspace/statistics/export/service/FailedOpenURLTrackerService.java new file mode 100644 index 0000000000..9b482e3d54 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/statistics/export/service/FailedOpenURLTrackerService.java @@ -0,0 +1,44 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.statistics.export.service; + +import java.sql.SQLException; +import java.util.List; + +import org.dspace.core.Context; +import org.dspace.statistics.export.OpenURLTracker; + +/** + * Interface of the service that handles the OpenURLTracker database operations + */ +public interface FailedOpenURLTrackerService { + + /** + * Removes an OpenURLTracker from the database + * @param context + * @param openURLTracker + * @throws SQLException + */ + void remove(Context context, OpenURLTracker openURLTracker) throws SQLException; + + /** + * Returns all OpenURLTrackers from the database + * @param context + * @return all OpenURLTrackers + * @throws SQLException + */ + List findAll(Context context) throws SQLException; + + /** + * Creates a new OpenURLTracker + * @param context + * @return the creatred OpenURLTracker + * @throws SQLException + */ + OpenURLTracker create(Context context) throws SQLException; +} diff --git a/dspace-api/src/main/java/org/dspace/statistics/export/service/OpenUrlService.java b/dspace-api/src/main/java/org/dspace/statistics/export/service/OpenUrlService.java new file mode 100644 index 0000000000..881cfb62d3 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/statistics/export/service/OpenUrlService.java @@ -0,0 +1,44 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.statistics.export.service; + +import java.io.IOException; +import java.sql.SQLException; + +import org.dspace.core.Context; + +/** + * The Service responsible for processing urls + */ +public interface OpenUrlService { + /** + * Process the url + * @param c - the context + * @param urlStr - the url to be processed + * @throws IOException + * @throws SQLException + */ + void processUrl(Context c, String urlStr) throws SQLException; + + /** + * Will process all urls stored in the database and try contacting them again + * @param context + * @throws SQLException + */ + void reprocessFailedQueue(Context context) throws SQLException; + + /** + * Will log the failed url in the database + * @param context + * @param url + * @throws SQLException + */ + void logfailed(Context context, String url) throws SQLException; + + +} diff --git a/dspace-api/src/main/java/org/dspace/statistics/export/service/OpenUrlServiceImpl.java b/dspace-api/src/main/java/org/dspace/statistics/export/service/OpenUrlServiceImpl.java new file mode 100644 index 0000000000..8555bb0986 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/statistics/export/service/OpenUrlServiceImpl.java @@ -0,0 +1,139 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.statistics.export.service; + +import java.io.IOException; +import java.net.HttpURLConnection; +import java.net.URL; +import java.net.URLConnection; +import java.sql.SQLException; +import java.util.Date; +import java.util.List; + +import org.apache.commons.lang.StringUtils; +import org.apache.log4j.Logger; +import org.dspace.core.Context; +import org.dspace.statistics.export.OpenURLTracker; +import org.springframework.beans.factory.annotation.Autowired; + +/** + * Implementation of the OpenUrlService interface + */ +public class OpenUrlServiceImpl implements OpenUrlService { + + private Logger log = Logger.getLogger(OpenUrlService.class); + + @Autowired + protected FailedOpenURLTrackerService failedOpenUrlTrackerService; + + /** + * Processes the url + * When the contacting the url fails, the url will be logged in a db table + * @param c - the context + * @param urlStr - the url to be processed + * @throws SQLException + */ + public void processUrl(Context c, String urlStr) throws SQLException { + log.debug("Prepared to send url to tracker URL: " + urlStr); + + try { + int responseCode = getResponseCodeFromUrl(urlStr); + if (responseCode != HttpURLConnection.HTTP_OK) { + logfailed(c, urlStr); + } else if (log.isDebugEnabled()) { + log.debug("Successfully posted " + urlStr + " on " + new Date()); + } + } catch (Exception e) { + log.error("Failed to send url to tracker URL: " + urlStr); + logfailed(c, urlStr); + } + } + + /** + * Returns the response code from accessing the url + * @param urlStr + * @return response code from the url + * @throws IOException + */ + protected int getResponseCodeFromUrl(final String urlStr) throws IOException { + URLConnection conn; + URL url = new URL(urlStr); + conn = url.openConnection(); + + HttpURLConnection httpURLConnection = (HttpURLConnection) conn; + int responseCode = httpURLConnection.getResponseCode(); + httpURLConnection.disconnect(); + + return responseCode; + } + + /** + * Retry to send a failed url + * @param context + * @param tracker - db object containing the failed url + * @throws SQLException + */ + protected void tryReprocessFailed(Context context, OpenURLTracker tracker) throws SQLException { + boolean success = false; + try { + + int responseCode = getResponseCodeFromUrl(tracker.getUrl()); + + if (responseCode == HttpURLConnection.HTTP_OK) { + success = true; + } + } catch (Exception e) { + success = false; + } finally { + if (success) { + failedOpenUrlTrackerService + .remove(context, tracker); + // If the tracker was able to post successfully, we remove it from the database + log.info("Successfully posted " + tracker.getUrl() + " from " + tracker.getUploadDate()); + } else { + // Still no luck - write an error msg but keep the entry in the table for future executions + log.error("Failed attempt from " + tracker.getUrl() + " originating from " + tracker.getUploadDate()); + } + } + } + + /** + * Reprocess all url trackers present in the database + * @param context + * @throws SQLException + */ + public void reprocessFailedQueue(Context context) throws SQLException { + if (failedOpenUrlTrackerService == null) { + log.error("Error retrieving the \"failedOpenUrlTrackerService\" instance, aborting the processing"); + return; + } + List openURLTrackers = failedOpenUrlTrackerService.findAll(context); + for (OpenURLTracker openURLTracker : openURLTrackers) { + tryReprocessFailed(context, openURLTracker); + } + } + + /** + * Log a failed url in the database + * @param context + * @param url + * @throws SQLException + */ + public void logfailed(Context context, String url) throws SQLException { + Date now = new Date(); + if (StringUtils.isBlank(url)) { + return; + } + + OpenURLTracker tracker = failedOpenUrlTrackerService.create(context); + tracker.setUploadDate(now); + tracker.setUrl(url); + } + + +} diff --git a/dspace-api/src/main/java/org/dspace/statistics/service/SolrLoggerService.java b/dspace-api/src/main/java/org/dspace/statistics/service/SolrLoggerService.java index 53c94f2668..5db2d9f7df 100644 --- a/dspace-api/src/main/java/org/dspace/statistics/service/SolrLoggerService.java +++ b/dspace-api/src/main/java/org/dspace/statistics/service/SolrLoggerService.java @@ -116,7 +116,7 @@ public interface SolrLoggerService { List fieldNames, List> fieldValuesList) throws SolrServerException, IOException; - public void query(String query, int max) + public void query(String query, int max, int facetMinCount) throws SolrServerException, IOException; /** @@ -130,13 +130,14 @@ public interface SolrLoggerService { * @param showTotal a boolean determining whether the total amount should be given * back as the last element of the array * @param facetQueries list of facet queries + * @param facetMinCount Minimum count of results facet must have to return a result * @return an array containing our results * @throws SolrServerException Exception from the Solr server to the solrj Java client. * @throws java.io.IOException passed through. */ public ObjectCount[] queryFacetField(String query, String filterQuery, String facetField, int max, boolean showTotal, - List facetQueries) + List facetQueries, int facetMinCount) throws SolrServerException, IOException; /** @@ -154,25 +155,27 @@ public interface SolrLoggerService { * @param showTotal a boolean determining whether the total amount should be given * back as the last element of the array * @param context The relevant DSpace Context. + * @param facetMinCount Minimum count of results facet must have to return a result * @return and array containing our results * @throws SolrServerException Exception from the Solr server to the solrj Java client. * @throws java.io.IOException passed through. */ public ObjectCount[] queryFacetDate(String query, String filterQuery, int max, String dateType, String dateStart, - String dateEnd, boolean showTotal, Context context) + String dateEnd, boolean showTotal, Context context, int facetMinCount) throws SolrServerException, IOException; - public Map queryFacetQuery(String query, - String filterQuery, List facetQueries) + public Map queryFacetQuery(String query, String filterQuery, List facetQueries, + int facetMinCount) throws SolrServerException, IOException; - public ObjectCount queryTotal(String query, String filterQuery) + public ObjectCount queryTotal(String query, String filterQuery, int facetMinCount) throws SolrServerException, IOException; public QueryResponse query(String query, String filterQuery, String facetField, int rows, int max, String dateType, String dateStart, - String dateEnd, List facetQueries, String sort, boolean ascending) + String dateEnd, List facetQueries, String sort, boolean ascending, + int facetMinCount) throws SolrServerException, IOException; /** diff --git a/dspace-api/src/main/java/org/dspace/statistics/util/LocationUtils.java b/dspace-api/src/main/java/org/dspace/statistics/util/LocationUtils.java index 0b08085f52..073dc45551 100644 --- a/dspace-api/src/main/java/org/dspace/statistics/util/LocationUtils.java +++ b/dspace-api/src/main/java/org/dspace/statistics/util/LocationUtils.java @@ -8,7 +8,9 @@ package org.dspace.statistics.util; import java.io.IOException; +import java.util.HashMap; import java.util.Locale; +import java.util.Map; import java.util.MissingResourceException; import java.util.Properties; import java.util.ResourceBundle; @@ -34,7 +36,8 @@ public class LocationUtils { /** * Default constructor */ - private LocationUtils() { } + private LocationUtils() { + } /** * Map DSpace continent codes onto ISO country codes. @@ -53,7 +56,7 @@ public class LocationUtils { if (countryToContinent.isEmpty()) { try { countryToContinent.load(LocationUtils.class - .getResourceAsStream("country-continent-codes.properties")); + .getResourceAsStream("country-continent-codes.properties")); } catch (IOException e) { logger.error("Could not load country/continent map file", e); } @@ -105,7 +108,7 @@ public class LocationUtils { names = ResourceBundle.getBundle(CONTINENT_NAMES_BUNDLE, locale); } catch (MissingResourceException e) { logger.error("Could not load continent code/name resource bundle", - e); + e); return I18nUtil .getMessage("org.dspace.statistics.util.LocationUtils.unknown-continent"); } @@ -115,7 +118,7 @@ public class LocationUtils { name = names.getString(continentCode); } catch (MissingResourceException e) { logger.info("No continent code " + continentCode + " in bundle " - + names.getLocale().getDisplayName()); + + names.getLocale().getDisplayName()); return I18nUtil .getMessage("org.dspace.statistics.util.LocationUtils.unknown-continent"); } @@ -134,6 +137,36 @@ public class LocationUtils { return getCountryName(countryCode, Locale.getDefault()); } + /** + * Revert a country name back into a country code (iso2) + * Source: https://stackoverflow.com/a/38588988 + * + * @param countryName Name of country (according to Locale) + * @return Corresponding iso2 country code + */ + static public String getCountryCode(String countryName) { + // Get all country codes in a string array. + String[] isoCountryCodes = Locale.getISOCountries(); + Map countryMap = new HashMap<>(); + Locale locale; + String name; + + // Iterate through all country codes: + for (String code : isoCountryCodes) { + // Create a locale using each country code + locale = new Locale("", code); + // Get country name for each code. + name = locale.getDisplayCountry(); + // Map all country names and codes in key - value pairs. + countryMap.put(name, code); + } + + // Return the country code for the given country name using the map. + // Here you will need some validation or better yet + // a list of countries to give to user to choose from. + return countryMap.get(countryName); // "NL" for Netherlands. + } + /** * Map ISO country codes onto localized country names. * diff --git a/dspace-api/src/main/java/org/dspace/storage/rdbms/DatabaseRegistryUpdater.java b/dspace-api/src/main/java/org/dspace/storage/rdbms/DatabaseRegistryUpdater.java index c9e126f8fa..74653d8996 100644 --- a/dspace-api/src/main/java/org/dspace/storage/rdbms/DatabaseRegistryUpdater.java +++ b/dspace-api/src/main/java/org/dspace/storage/rdbms/DatabaseRegistryUpdater.java @@ -9,7 +9,6 @@ package org.dspace.storage.rdbms; import java.io.File; import java.io.IOException; -import java.sql.Connection; import java.sql.SQLException; import javax.xml.parsers.ParserConfigurationException; import javax.xml.transform.TransformerException; @@ -24,8 +23,8 @@ import org.dspace.services.ConfigurationService; import org.dspace.services.factory.DSpaceServicesFactory; import org.dspace.workflow.factory.WorkflowServiceFactory; import org.dspace.xmlworkflow.service.XmlWorkflowService; -import org.flywaydb.core.api.MigrationInfo; -import org.flywaydb.core.api.callback.FlywayCallback; +import org.flywaydb.core.api.callback.Callback; +import org.flywaydb.core.api.callback.Event; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.xml.sax.SAXException; @@ -50,7 +49,7 @@ import org.xml.sax.SAXException; * * @author Tim Donohue */ -public class DatabaseRegistryUpdater implements FlywayCallback { +public class DatabaseRegistryUpdater implements Callback { /** * logging category */ @@ -107,73 +106,38 @@ public class DatabaseRegistryUpdater implements FlywayCallback { } } + + /** + * Events supported by this callback. + * @param event Flyway event + * @param context Flyway context + * @return true if AFTER_MIGRATE event + */ @Override - public void beforeClean(Connection connection) { - - } - - @Override - public void afterClean(Connection connection) { - - } - - @Override - public void beforeMigrate(Connection connection) { - - } - - @Override - public void afterMigrate(Connection connection) { + public boolean supports(Event event, org.flywaydb.core.api.callback.Context context) { // Must run AFTER all migrations complete, since it is dependent on Hibernate + return event.equals(Event.AFTER_MIGRATE); + } + + /** + * Whether event can be handled in a transaction or whether it must be handle outside of transaction. + * @param event Flyway event + * @param context Flyway context + * @return true + */ + @Override + public boolean canHandleInTransaction(Event event, org.flywaydb.core.api.callback.Context context) { + // Always return true, as our handle() method is updating the database. + return true; + } + + /** + * What to run when the callback is triggered. + * @param event Flyway event + * @param context Flyway context + */ + @Override + public void handle(Event event, org.flywaydb.core.api.callback.Context context) { updateRegistries(); } - - @Override - public void beforeEachMigrate(Connection connection, MigrationInfo migrationInfo) { - - } - - @Override - public void afterEachMigrate(Connection connection, MigrationInfo migrationInfo) { - - } - - @Override - public void beforeValidate(Connection connection) { - - } - - @Override - public void afterValidate(Connection connection) { - - } - - @Override - public void beforeBaseline(Connection connection) { - - } - - @Override - public void afterBaseline(Connection connection) { - - } - - @Override - public void beforeRepair(Connection connection) { - - } - - @Override - public void afterRepair(Connection connection) { - - } - - @Override - public void beforeInfo(Connection connection) { - - } - - @Override - public void afterInfo(Connection connection) { - } } diff --git a/dspace-api/src/main/java/org/dspace/storage/rdbms/DatabaseUtils.java b/dspace-api/src/main/java/org/dspace/storage/rdbms/DatabaseUtils.java index cadd3eee52..4432949a85 100644 --- a/dspace-api/src/main/java/org/dspace/storage/rdbms/DatabaseUtils.java +++ b/dspace-api/src/main/java/org/dspace/storage/rdbms/DatabaseUtils.java @@ -36,11 +36,13 @@ import org.dspace.workflow.factory.WorkflowServiceFactory; import org.flywaydb.core.Flyway; import org.flywaydb.core.api.FlywayException; import org.flywaydb.core.api.MigrationInfo; -import org.flywaydb.core.api.callback.FlywayCallback; -import org.flywaydb.core.internal.dbsupport.DbSupport; -import org.flywaydb.core.internal.dbsupport.DbSupportFactory; -import org.flywaydb.core.internal.dbsupport.SqlScript; +import org.flywaydb.core.api.callback.Callback; +import org.flywaydb.core.api.configuration.FluentConfiguration; import org.flywaydb.core.internal.info.MigrationInfoDumper; +import org.flywaydb.core.internal.license.VersionPrinter; +import org.springframework.dao.DataAccessException; +import org.springframework.jdbc.core.JdbcTemplate; +import org.springframework.jdbc.datasource.SingleConnectionDataSource; /** * Utility class used to manage the Database. This class is used by the @@ -58,9 +60,6 @@ public class DatabaseUtils { */ private static final Logger log = org.apache.logging.log4j.LogManager.getLogger(DatabaseUtils.class); - // Our Flyway DB object (initialized by setupFlyway()) - private static Flyway flywaydb; - // When this temp file exists, the "checkReindexDiscovery()" method will auto-reindex Discovery // Reindex flag file is at [dspace]/solr/search/conf/reindex.flag // See also setReindexDiscovery()/getReindexDiscover() @@ -76,6 +75,9 @@ public class DatabaseUtils { public static final String DBMS_ORACLE = "oracle"; public static final String DBMS_H2 = "h2"; + // Name of the table that Flyway uses for its migration history + public static final String FLYWAY_TABLE = "schema_version"; + /** * Default constructor */ @@ -100,8 +102,13 @@ public class DatabaseUtils { // Get a reference to our configured DataSource DataSource dataSource = getDataSource(); - // Point Flyway API to our database - Flyway flyway = setupFlyway(dataSource); + // Initialize Flyway against our database + FluentConfiguration flywayConfiguration = setupFlyway(dataSource); + Flyway flyway = flywayConfiguration.load(); + + // Now, check our Flyway database table to see if it needs upgrading + // *before* any other Flyway commands can be run. This is a safety check. + FlywayUpgradeUtils.upgradeFlywayTable(flyway, dataSource.getConnection()); // "test" = Test Database Connection if (argv[0].equalsIgnoreCase("test")) { @@ -140,7 +147,7 @@ public class DatabaseUtils { // If Flyway is NOT yet initialized, also print the determined version information // NOTE: search is case sensitive, as flyway table name is ALWAYS lowercase, // See: http://flywaydb.org/documentation/faq.html#case-sensitive - if (!tableExists(connection, flyway.getTable(), true)) { + if (!tableExists(connection, flyway.getConfiguration().getTable(), true)) { System.out .println("\nNOTE: This database is NOT yet initialized for auto-migrations (via Flyway)."); // Determine which version of DSpace this looks like @@ -265,7 +272,7 @@ public class DatabaseUtils { // "clean" = Run Flyway clean script // If clean is disabled, return immediately - if (flyway.isCleanDisabled()) { + if (flyway.getConfiguration().isCleanDisabled()) { System.out.println( "\nWARNING: 'clean' command is currently disabled, as it is dangerous to run in Production " + "scenarios!"); @@ -413,6 +420,8 @@ public class DatabaseUtils { "PostgreSQL '" + PostgresUtils.PGCRYPTO + "' extension installed/up-to-date? " + pgcryptoUpToDate + "" + " " + ((pgcryptoVersion != null) ? "(version=" + pgcryptoVersion + ")" : "(not installed)")); } + // Finally, print out our version of Flyway + System.out.println("FlywayDB Version: " + VersionPrinter.getVersion()); } /** @@ -505,70 +514,79 @@ public class DatabaseUtils { } /** - * Setup/Initialize the Flyway API to run against our DSpace database + * Setup/Initialize the Flyway Configuration to run against our DSpace database * and point at our migration scripts. * * @param datasource DataSource object initialized by DatabaseManager - * @return initialized Flyway object + * @return initialized FluentConfiguration (Flyway configuration object) */ - private synchronized static Flyway setupFlyway(DataSource datasource) { + private synchronized static FluentConfiguration setupFlyway(DataSource datasource) { ConfigurationService config = DSpaceServicesFactory.getInstance().getConfigurationService(); - if (flywaydb == null) { - try (Connection connection = datasource.getConnection()) { - // Initialize Flyway DB API (http://flywaydb.org/), used to perform DB migrations - flywaydb = new Flyway(); - flywaydb.setDataSource(datasource); - flywaydb.setEncoding("UTF-8"); + // Initialize Flyway Configuration (http://flywaydb.org/), used to perform DB migrations + FluentConfiguration flywayConfiguration = Flyway.configure(); - // Default cleanDisabled to "true" (which disallows the ability to run 'database clean') - flywaydb.setCleanDisabled(config.getBooleanProperty("db.cleanDisabled", true)); + try (Connection connection = datasource.getConnection()) { + flywayConfiguration.dataSource(datasource); + flywayConfiguration.encoding("UTF-8"); - // Migration scripts are based on DBMS Keyword (see full path below) - String dbType = getDbType(connection); - connection.close(); + // Default cleanDisabled to "true" (which disallows the ability to run 'database clean') + flywayConfiguration.cleanDisabled(config.getBooleanProperty("db.cleanDisabled", true)); - // Determine location(s) where Flyway will load all DB migrations - ArrayList scriptLocations = new ArrayList(); + // Migration scripts are based on DBMS Keyword (see full path below) + String dbType = getDbType(connection); + connection.close(); - // First, add location for custom SQL migrations, if any (based on DB Type) - // e.g. [dspace.dir]/etc/[dbtype]/ - // (We skip this for H2 as it's only used for unit testing) - if (!dbType.equals(DBMS_H2)) { - scriptLocations.add("filesystem:" + config.getProperty("dspace.dir") + - "/etc/" + dbType); - } + // Determine location(s) where Flyway will load all DB migrations + ArrayList scriptLocations = new ArrayList<>(); - // Also add the Java package where Flyway will load SQL migrations from (based on DB Type) - scriptLocations.add("classpath:org.dspace.storage.rdbms.sqlmigration." + dbType); - - // Also add the Java package where Flyway will load Java migrations from - // NOTE: this also loads migrations from any sub-package - scriptLocations.add("classpath:org.dspace.storage.rdbms.migration"); - - //Add all potential workflow migration paths - List workflowFlywayMigrationLocations = WorkflowServiceFactory.getInstance() - .getWorkflowService() - .getFlywayMigrationLocations(); - scriptLocations.addAll(workflowFlywayMigrationLocations); - - // Now tell Flyway which locations to load SQL / Java migrations from - log.info("Loading Flyway DB migrations from: " + StringUtils.join(scriptLocations, ", ")); - flywaydb.setLocations(scriptLocations.toArray(new String[scriptLocations.size()])); - - // Set flyway callbacks (i.e. classes which are called post-DB migration and similar) - // In this situation, we have a Registry Updater that runs PRE-migration - // NOTE: DatabaseLegacyReindexer only indexes in Legacy Lucene & RDBMS indexes. It can be removed - // once those are obsolete. - List flywayCallbacks = DSpaceServicesFactory.getInstance().getServiceManager() - .getServicesByType(FlywayCallback.class); - flywaydb.setCallbacks(flywayCallbacks.toArray(new FlywayCallback[flywayCallbacks.size()])); - } catch (SQLException e) { - log.error("Unable to setup Flyway against DSpace database", e); + // First, add location for custom SQL migrations, if exists (based on DB Type) + // e.g. [dspace.dir]/etc/[dbtype]/ + // (We skip this for H2 as it's only used for unit testing) + String etcDirPath = config.getProperty("dspace.dir") + "/etc/" + dbType; + File etcDir = new File(etcDirPath); + if (etcDir.exists() && !dbType.equals(DBMS_H2)) { + scriptLocations.add("filesystem:" + etcDirPath); } + + // Also add the Java package where Flyway will load SQL migrations from (based on DB Type) + scriptLocations.add("classpath:org/dspace/storage/rdbms/sqlmigration/" + dbType); + + // Also add the Java package where Flyway will load Java migrations from + // NOTE: this also loads migrations from any sub-package + scriptLocations.add("classpath:org/dspace/storage/rdbms/migration"); + + //Add all potential workflow migration paths + List workflowFlywayMigrationLocations = WorkflowServiceFactory.getInstance() + .getWorkflowService() + .getFlywayMigrationLocations(); + scriptLocations.addAll(workflowFlywayMigrationLocations); + + // Now tell Flyway which locations to load SQL / Java migrations from + log.info("Loading Flyway DB migrations from: " + StringUtils.join(scriptLocations, ", ")); + flywayConfiguration.locations(scriptLocations.toArray(new String[scriptLocations.size()])); + + // Tell Flyway NOT to throw a validation error if it finds older "Ignored" migrations. + // For DSpace, we sometimes have to insert "old" migrations in after a major release + // if further development/bug fixes are needed in older versions. So, "Ignored" migrations are + // nothing to worry about...you can always trigger them to run using "database migrate ignored" from CLI + flywayConfiguration.ignoreIgnoredMigrations(true); + + // Set Flyway callbacks (i.e. classes which are called post-DB migration and similar) + List flywayCallbacks = DSpaceServicesFactory.getInstance().getServiceManager() + .getServicesByType(Callback.class); + + flywayConfiguration.callbacks(flywayCallbacks.toArray(new Callback[flywayCallbacks.size()])); + + // Tell Flyway to use the "schema_version" table in the database to manage its migration history + // As of Flyway v5, the default table is named "flyway_schema_history" + // We are using the older name ("schema_version") for backwards compatibility. + flywayConfiguration.table(FLYWAY_TABLE); + } catch (SQLException e) { + log.error("Unable to setup Flyway against DSpace database", e); } - return flywaydb; + return flywayConfiguration; } /** @@ -645,36 +663,48 @@ public class DatabaseUtils { try { // Setup Flyway API against our database - Flyway flyway = setupFlyway(datasource); + FluentConfiguration flywayConfiguration = setupFlyway(datasource); - // Set whethe Flyway will run migrations "out of order". By default, this is false, + // Set whether Flyway will run migrations "out of order". By default, this is false, // and Flyway ONLY runs migrations that have a higher version number. - flyway.setOutOfOrder(outOfOrder); + flywayConfiguration.outOfOrder(outOfOrder); // If a target version was specified, tell Flyway to ONLY migrate to that version // (i.e. all later migrations are left as "pending"). By default we always migrate to latest version. if (!StringUtils.isBlank(targetVersion)) { - flyway.setTargetAsString(targetVersion); + flywayConfiguration.target(targetVersion); } + // Initialized Flyway object (will be created by flywayConfiguration.load() below) + Flyway flyway; + // Does the necessary Flyway table ("schema_version") exist in this database? // If not, then this is the first time Flyway has run, and we need to initialize // NOTE: search is case sensitive, as flyway table name is ALWAYS lowercase, // See: http://flywaydb.org/documentation/faq.html#case-sensitive - if (!tableExists(connection, flyway.getTable(), true)) { + if (!tableExists(connection, flywayConfiguration.getTable(), true)) { // Try to determine our DSpace database version, so we know what to tell Flyway to do - String dbVersion = determineDBVersion(connection); + String dspaceVersion = determineDBVersion(connection); - // If this is a fresh install, dbVersion will be null - if (dbVersion == null) { - // Initialize the Flyway database table with defaults (version=1) - flyway.baseline(); - } else { - // Otherwise, pass our determined DB version to Flyway to initialize database table - flyway.setBaselineVersionAsString(dbVersion); - flyway.setBaselineDescription("Initializing from DSpace " + dbVersion + " database schema"); - flyway.baseline(); + // If this is NOT a fresh install (i.e. dspaceVersion is not null) + if (dspaceVersion != null) { + // Pass our determined DSpace version to Flyway to initialize database table + flywayConfiguration.baselineVersion(dspaceVersion); + flywayConfiguration.baselineDescription( + "Initializing from DSpace " + dspaceVersion + " database schema"); } + + // Initialize Flyway in DB with baseline version (either dspaceVersion or default of 1) + flyway = flywayConfiguration.load(); + flyway.baseline(); + } else { + // Otherwise, this database already ran Flyway before + // So, just load our Flyway configuration, initializing latest Flyway. + flyway = flywayConfiguration.load(); + + // Now, check our Flyway database table to see if it needs upgrading + // *before* any other Flyway commands can be run. + FlywayUpgradeUtils.upgradeFlywayTable(flyway, connection); } // Determine pending Database migrations @@ -1049,16 +1079,13 @@ public class DatabaseUtils { */ public static void executeSql(Connection connection, String sqlToExecute) throws SQLException { try { - // Create a Flyway DbSupport object (based on our connection) - // This is how Flyway determines the database *type* (e.g. Postgres vs Oracle) - DbSupport dbSupport = DbSupportFactory.createDbSupport(connection, false); - - // Load our SQL string & execute via Flyway's SQL parser - SqlScript script = new SqlScript(sqlToExecute, dbSupport); - script.execute(dbSupport.getJdbcTemplate()); - } catch (FlywayException fe) { - // If any FlywayException (Runtime) is thrown, change it to a SQLException - throw new SQLException("Flyway executeSql() error occurred", fe); + // Run the SQL using Spring JDBC as documented in Flyway's guide for using Spring JDBC directly + // https://flywaydb.org/documentation/migrations#spring + new JdbcTemplate(new SingleConnectionDataSource(connection, true)) + .execute(sqlToExecute); + } catch (DataAccessException dae) { + // If any Exception is thrown, change it to a SQLException + throw new SQLException("Flyway executeSql() error occurred", dae); } } @@ -1329,13 +1356,6 @@ public class DatabaseUtils { return dataSource; } - /** - * In case of a unit test the flyway db is cached to long leading to exceptions, we need to clear the object - */ - public static void clearFlywayDBCache() { - flywaydb = null; - } - /** * Returns the current Flyway schema_version being used by the given database. * (i.e. the version of the highest numbered migration that this database has run) @@ -1346,7 +1366,7 @@ public class DatabaseUtils { */ public static String getCurrentFlywayState(Connection connection) throws SQLException { PreparedStatement statement = connection - .prepareStatement("SELECT \"version\" FROM \"schema_version\" ORDER BY \"version\" desc"); + .prepareStatement("SELECT \"version\" FROM \"" + FLYWAY_TABLE + "\" ORDER BY \"version\" desc"); ResultSet resultSet = statement.executeQuery(); resultSet.next(); return resultSet.getString("version"); diff --git a/dspace-api/src/main/java/org/dspace/storage/rdbms/FlywayUpgradeUtils.java b/dspace-api/src/main/java/org/dspace/storage/rdbms/FlywayUpgradeUtils.java new file mode 100644 index 0000000000..7bd524b612 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/storage/rdbms/FlywayUpgradeUtils.java @@ -0,0 +1,117 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.storage.rdbms; + +import static org.dspace.storage.rdbms.DatabaseUtils.FLYWAY_TABLE; +import static org.dspace.storage.rdbms.DatabaseUtils.executeSql; +import static org.dspace.storage.rdbms.DatabaseUtils.getCurrentFlywayState; +import static org.dspace.storage.rdbms.DatabaseUtils.getDbType; +import static org.dspace.storage.rdbms.DatabaseUtils.getSchemaName; + +import java.sql.Connection; +import java.sql.SQLException; +import java.util.HashMap; +import java.util.Map; + +import org.apache.commons.text.StringSubstitutor; +import org.apache.logging.log4j.Logger; +import org.dspace.storage.rdbms.migration.MigrationUtils; +import org.flywaydb.core.Flyway; + +/** + * Utility class used to detect issues with the Flyway migration history table and attempt to correct/fix them. + * These issues can occur when attempting to upgrade your database across multiple versions/releases of Flyway. + *

    + * As documented in this issue ticket, Flyway does not normally support skipping over any + * major release (for example going from v3 to v5 is unsuppored): https://github.com/flyway/flyway/issues/2126 + *

    + * This class allows us to do a migration (where needed) through multiple major versions of Flyway. + * + * @author Tim Donohue + */ +public class FlywayUpgradeUtils { + /** + * log4j category + */ + private static final Logger log = org.apache.logging.log4j.LogManager.getLogger(FlywayUpgradeUtils.class); + + // Resource path of all Flyway upgrade scripts + private static final String UPGRADE_SCRIPT_PATH = "org/dspace/storage/rdbms/flywayupgrade/"; + + + /** + * Default constructor + */ + private FlywayUpgradeUtils() { } + + /** + * Ensures the Flyway migration history table (FLYWAY_TABLE) is upgraded to the latest version of Flyway safely. + *

    + * Unfortunately, Flyway does not always support skipping major versions (e.g. upgrading directly from Flyway + * v3.x to 5.x is not possible, see https://github.com/flyway/flyway/issues/2126). + *

    + * While sometimes it's possible to do so, other times you MUST upgrade through each major version. This method + * ensures we upgrade the Flyway history table through each version of Flyway where deemed necessary. + * + * @param flyway initialized/configured Flyway object + * @param connection current database connection + */ + protected static synchronized void upgradeFlywayTable(Flyway flyway, Connection connection) + throws SQLException { + // Whether the Flyway table needs updating or not + boolean needsUpgrade = false; + + // Determine if Flyway needs updating by running a simple info() command. + // This command will not run any pending migrations, but it will throw an exception + // if the Flyway migration history table is NOT valid for the current version of Flyway + try { + flyway.info(); + } catch (Exception e) { + // ignore error, but log info statement to say we will try to upgrade to fix problem + log.info("Flyway table '{}' appears to be outdated. Will attempt to upgrade it automatically. " + + "Flyway Exception was '{}'", FLYWAY_TABLE, e.toString()); + needsUpgrade = true; + } + + if (needsUpgrade) { + // Get the DSpace version info from the LAST migration run. + String lastMigration = getCurrentFlywayState(connection); + // If this is an older DSpace 5.x compatible database, then it used Flyway 3.x. + // Because we cannot upgrade directly from Flyway 3.x -> 6.x, we need to FIRST update this + // database to be compatible with Flyway 4.2.0 (which can be upgraded directly to Flyway 6.x) + if (lastMigration.startsWith("5.")) { + // Based on type of DB, get path to our Flyway 4.x upgrade script + String dbtype = getDbType(connection); + String scriptPath = UPGRADE_SCRIPT_PATH + dbtype + "/upgradeToFlyway4x.sql"; + + log.info("Attempting to upgrade Flyway table '{}' using script at '{}'", + FLYWAY_TABLE, scriptPath); + // Load the Flyway v4.2.0 upgrade SQL script as a String + String flywayUpgradeSQL = MigrationUtils.getResourceAsString(scriptPath); + + // As this Flyway upgrade SQL was borrowed from Flyway v4.2.0 directly, it contains some inline + // variables which need replacing, namely ${schema} and ${table} variables. + // We'll use the StringSubstitutor to replace those variables with their proper values. + Map valuesMap = new HashMap<>(); + valuesMap.put("schema", getSchemaName(connection)); + valuesMap.put("table", FLYWAY_TABLE); + StringSubstitutor sub = new StringSubstitutor(valuesMap); + flywayUpgradeSQL = sub.replace(flywayUpgradeSQL); + + // Run the script to update the Flyway table to be compatible with FLyway v4.x + executeSql(connection, flywayUpgradeSQL); + } + // NOTE: no other DSpace versions require a specialized Flyway upgrade script at this time. + // DSpace 4 didn't use Flyway. DSpace 6 used Flyway v4, which Flyway v6 can update automatically. + + // After any Flyway table upgrade, we MUST run a Flyway repair() to cleanup migration checksums if needed + log.info("Repairing Flyway table '{}' after upgrade...", FLYWAY_TABLE); + flyway.repair(); + } + } +} diff --git a/dspace-api/src/main/java/org/dspace/storage/rdbms/GroupServiceInitializer.java b/dspace-api/src/main/java/org/dspace/storage/rdbms/GroupServiceInitializer.java index 11018d37e0..7338dd75bc 100644 --- a/dspace-api/src/main/java/org/dspace/storage/rdbms/GroupServiceInitializer.java +++ b/dspace-api/src/main/java/org/dspace/storage/rdbms/GroupServiceInitializer.java @@ -7,13 +7,11 @@ */ package org.dspace.storage.rdbms; -import java.sql.Connection; - import org.apache.logging.log4j.Logger; import org.dspace.core.Context; import org.dspace.eperson.service.GroupService; -import org.flywaydb.core.api.MigrationInfo; -import org.flywaydb.core.api.callback.FlywayCallback; +import org.flywaydb.core.api.callback.Callback; +import org.flywaydb.core.api.callback.Event; import org.springframework.beans.factory.annotation.Autowired; /** @@ -22,7 +20,7 @@ import org.springframework.beans.factory.annotation.Autowired; * * @author kevinvandevelde at atmire.com */ -public class GroupServiceInitializer implements FlywayCallback { +public class GroupServiceInitializer implements Callback { private final Logger log = org.apache.logging.log4j.LogManager.getLogger(GroupServiceInitializer.class); @@ -53,73 +51,36 @@ public class GroupServiceInitializer implements FlywayCallback { } + /** + * Events supported by this callback. + * @param event Flyway event + * @param context Flyway context + * @return true if AFTER_MIGRATE event + */ @Override - public void beforeClean(Connection connection) { - + public boolean supports(Event event, org.flywaydb.core.api.callback.Context context) { + // Must run AFTER all migrations complete, since it is dependent on Hibernate + return event.equals(Event.AFTER_MIGRATE); } + /** + * Whether event can be handled in a transaction or whether it must be handle outside of transaction. + * @param event Flyway event + * @param context Flyway context + * @return true + */ @Override - public void afterClean(Connection connection) { - + public boolean canHandleInTransaction(Event event, org.flywaydb.core.api.callback.Context context) { + return true; } + /** + * What to run when the callback is triggered. + * @param event Flyway event + * @param context Flyway context + */ @Override - public void beforeMigrate(Connection connection) { - - } - - @Override - public void afterMigrate(Connection connection) { + public void handle(Event event, org.flywaydb.core.api.callback.Context context) { initGroups(); } - - @Override - public void beforeEachMigrate(Connection connection, MigrationInfo migrationInfo) { - - } - - @Override - public void afterEachMigrate(Connection connection, MigrationInfo migrationInfo) { - - } - - @Override - public void beforeValidate(Connection connection) { - - } - - @Override - public void afterValidate(Connection connection) { - - } - - @Override - public void beforeBaseline(Connection connection) { - - } - - @Override - public void afterBaseline(Connection connection) { - - } - - @Override - public void beforeRepair(Connection connection) { - - } - - @Override - public void afterRepair(Connection connection) { - - } - - @Override - public void beforeInfo(Connection connection) { - - } - - @Override - public void afterInfo(Connection connection) { - - } } diff --git a/dspace-api/src/main/java/org/dspace/storage/rdbms/PostgreSQLCryptoChecker.java b/dspace-api/src/main/java/org/dspace/storage/rdbms/PostgreSQLCryptoChecker.java index 48f2e4e6f0..5798f4254c 100644 --- a/dspace-api/src/main/java/org/dspace/storage/rdbms/PostgreSQLCryptoChecker.java +++ b/dspace-api/src/main/java/org/dspace/storage/rdbms/PostgreSQLCryptoChecker.java @@ -13,8 +13,9 @@ import java.sql.Statement; import org.apache.logging.log4j.Logger; import org.flywaydb.core.api.FlywayException; -import org.flywaydb.core.api.MigrationInfo; -import org.flywaydb.core.api.callback.FlywayCallback; +import org.flywaydb.core.api.callback.Callback; +import org.flywaydb.core.api.callback.Context; +import org.flywaydb.core.api.callback.Event; /** * This is a FlywayCallback class which automatically verifies that "pgcrypto" @@ -28,7 +29,7 @@ import org.flywaydb.core.api.callback.FlywayCallback; * * @author Tim Donohue */ -public class PostgreSQLCryptoChecker implements FlywayCallback { +public class PostgreSQLCryptoChecker implements Callback { private Logger log = org.apache.logging.log4j.LogManager.getLogger(PostgreSQLCryptoChecker.class); /** @@ -96,76 +97,43 @@ public class PostgreSQLCryptoChecker implements FlywayCallback { } } + /** + * Events supported by this callback. + * @param event Flyway event + * @param context Flyway context + * @return true if BEFORE_BASELINE, BEFORE_MIGRATE or BEFORE_CLEAN + */ @Override - public void beforeClean(Connection connection) { - // If pgcrypto is installed, remove it - removePgCrypto(connection); + public boolean supports(Event event, Context context) { + return event.equals(Event.BEFORE_BASELINE) || event.equals(Event.BEFORE_MIGRATE) || + event.equals(Event.BEFORE_CLEAN); } + /** + * Whether event can be handled in a transaction or whether it must be handle outside of transaction. + * @param event Flyway event + * @param context Flyway context + * @return true + */ @Override - public void afterClean(Connection connection) { - + public boolean canHandleInTransaction(Event event, Context context) { + return true; } + /** + * What to run when the callback is triggered. + * @param event Flyway event + * @param context Flyway context + */ @Override - public void beforeMigrate(Connection connection) { - // Before migrating database, check for pgcrypto - checkPgCrypto(connection); - } - - @Override - public void afterMigrate(Connection connection) { - - } - - @Override - public void beforeEachMigrate(Connection connection, MigrationInfo migrationInfo) { - - } - - @Override - public void afterEachMigrate(Connection connection, MigrationInfo migrationInfo) { - - } - - @Override - public void beforeValidate(Connection connection) { - - } - - @Override - public void afterValidate(Connection connection) { - - } - - @Override - public void beforeBaseline(Connection connection) { - // Before initializing database, check for pgcrypto - checkPgCrypto(connection); - } - - @Override - public void afterBaseline(Connection connection) { - - } - - @Override - public void beforeRepair(Connection connection) { - - } - - @Override - public void afterRepair(Connection connection) { - - } - - @Override - public void beforeInfo(Connection connection) { - - } - - @Override - public void afterInfo(Connection connection) { + public void handle(Event event, Context context) { + // If, before initializing or migrating database, check for pgcrypto + // Else, before Cleaning database, remove pgcrypto (if exists) + if (event.equals(Event.BEFORE_BASELINE) || event.equals(Event.BEFORE_MIGRATE)) { + checkPgCrypto(context.getConnection()); + } else if (event.equals(Event.BEFORE_CLEAN)) { + removePgCrypto(context.getConnection()); + } } } diff --git a/dspace-api/src/main/java/org/dspace/storage/rdbms/SiteServiceInitializer.java b/dspace-api/src/main/java/org/dspace/storage/rdbms/SiteServiceInitializer.java index a4b7129546..d755150f79 100644 --- a/dspace-api/src/main/java/org/dspace/storage/rdbms/SiteServiceInitializer.java +++ b/dspace-api/src/main/java/org/dspace/storage/rdbms/SiteServiceInitializer.java @@ -7,13 +7,16 @@ */ package org.dspace.storage.rdbms; -import java.sql.Connection; - import org.apache.logging.log4j.Logger; +import org.dspace.authorize.service.AuthorizeService; +import org.dspace.content.Site; import org.dspace.content.service.SiteService; +import org.dspace.core.Constants; import org.dspace.core.Context; -import org.flywaydb.core.api.MigrationInfo; -import org.flywaydb.core.api.callback.FlywayCallback; +import org.dspace.eperson.Group; +import org.dspace.eperson.service.GroupService; +import org.flywaydb.core.api.callback.Callback; +import org.flywaydb.core.api.callback.Event; import org.springframework.beans.factory.annotation.Autowired; /** @@ -22,29 +25,44 @@ import org.springframework.beans.factory.annotation.Autowired; * * @author kevinvandevelde at atmire.com */ -public class SiteServiceInitializer implements FlywayCallback { +public class SiteServiceInitializer implements Callback { private Logger log = org.apache.logging.log4j.LogManager.getLogger(SiteServiceInitializer.class); @Autowired(required = true) protected SiteService siteService; + @Autowired + private AuthorizeService authorizeService; + + @Autowired + private GroupService groupService; + public void initializeSiteObject() { // After every migrate, ensure default Site is setup correctly. Context context = null; try { context = new Context(); context.turnOffAuthorisationSystem(); - // While it's not really a formal "registry", we need to ensure the - // default, required Groups exist in the DSpace database + // Create Site object if it doesn't exist in database + Site site = null; if (siteService.findSite(context) == null) { - siteService.createSite(context); + site = siteService.createSite(context); } context.restoreAuthSystemState(); + // Give Anonymous users READ permissions on the Site Object (if doesn't exist) + if (!authorizeService.authorizeActionBoolean(context, site, Constants.READ)) { + context.turnOffAuthorisationSystem(); + Group anonGroup = groupService.findByName(context, Group.ANONYMOUS); + if (anonGroup != null) { + authorizeService.addPolicy(context, site, Constants.READ, anonGroup); + } + context.restoreAuthSystemState(); + } // Commit changes and close context context.complete(); } catch (Exception e) { - log.error("Error attempting to add/update default DSpace Groups", e); + log.error("Error attempting to add/update default Site object", e); } finally { // Clean up our context, if it still exists & it was never completed if (context != null && context.isValid()) { @@ -55,73 +73,36 @@ public class SiteServiceInitializer implements FlywayCallback { } + /** + * Events supported by this callback. + * @param event Flyway event + * @param context Flyway context + * @return true if AFTER_MIGRATE event + */ @Override - public void beforeClean(Connection connection) { - + public boolean supports(Event event, org.flywaydb.core.api.callback.Context context) { + // Must run AFTER all migrations complete, since it is dependent on Hibernate + return event.equals(Event.AFTER_MIGRATE); } + /** + * Whether event can be handled in a transaction or whether it must be handle outside of transaction. + * @param event Flyway event + * @param context Flyway context + * @return true + */ @Override - public void afterClean(Connection connection) { - + public boolean canHandleInTransaction(Event event, org.flywaydb.core.api.callback.Context context) { + return true; } + /** + * What to run when the callback is triggered. + * @param event Flyway event + * @param context Flyway context + */ @Override - public void beforeMigrate(Connection connection) { - - } - - @Override - public void afterMigrate(Connection connection) { + public void handle(Event event, org.flywaydb.core.api.callback.Context context) { initializeSiteObject(); } - - @Override - public void beforeEachMigrate(Connection connection, MigrationInfo migrationInfo) { - - } - - @Override - public void afterEachMigrate(Connection connection, MigrationInfo migrationInfo) { - - } - - @Override - public void beforeValidate(Connection connection) { - - } - - @Override - public void afterValidate(Connection connection) { - - } - - @Override - public void beforeBaseline(Connection connection) { - - } - - @Override - public void afterBaseline(Connection connection) { - - } - - @Override - public void beforeRepair(Connection connection) { - - } - - @Override - public void afterRepair(Connection connection) { - - } - - @Override - public void beforeInfo(Connection connection) { - - } - - @Override - public void afterInfo(Connection connection) { - - } } diff --git a/dspace-api/src/main/java/org/dspace/storage/rdbms/migration/MigrationUtils.java b/dspace-api/src/main/java/org/dspace/storage/rdbms/migration/MigrationUtils.java index ce481d0caf..624d0cb55a 100644 --- a/dspace-api/src/main/java/org/dspace/storage/rdbms/migration/MigrationUtils.java +++ b/dspace-api/src/main/java/org/dspace/storage/rdbms/migration/MigrationUtils.java @@ -7,12 +7,19 @@ */ package org.dspace.storage.rdbms.migration; +import java.io.IOException; +import java.io.InputStreamReader; +import java.io.Reader; +import java.io.UncheckedIOException; import java.sql.Connection; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.SQLException; +import java.util.Objects; import org.apache.commons.lang3.StringUtils; +import org.dspace.core.Constants; +import org.springframework.util.FileCopyUtils; /** * This Utility class offers utility methods which may be of use to perform @@ -270,4 +277,25 @@ public class MigrationUtils { return checksum; } + + /** + * Read a given Resource, converting to a String. This is used by several Java-based + * migrations to read a SQL migration into a string, so that it can be executed under + * specific scenarios. + * @param resourcePath relative path of resource to read + * @return String contents of Resource + */ + public static String getResourceAsString(String resourcePath) { + // Read the resource, copying to a string + try (Reader reader = + new InputStreamReader( + Objects.requireNonNull(MigrationUtils.class.getClassLoader().getResourceAsStream(resourcePath)), + Constants.DEFAULT_ENCODING)) { + return FileCopyUtils.copyToString(reader); + } catch (IOException e) { + throw new UncheckedIOException(e); + } catch (NullPointerException e) { + throw new IllegalStateException("Resource at " + resourcePath + " was not found", e); + } + } } diff --git a/dspace-api/src/main/java/org/dspace/storage/rdbms/migration/V1_3_9__Drop_constraint_for_DSpace_1_4_schema.java b/dspace-api/src/main/java/org/dspace/storage/rdbms/migration/V1_3_9__Drop_constraint_for_DSpace_1_4_schema.java index c3a79783ad..56c5b474d9 100644 --- a/dspace-api/src/main/java/org/dspace/storage/rdbms/migration/V1_3_9__Drop_constraint_for_DSpace_1_4_schema.java +++ b/dspace-api/src/main/java/org/dspace/storage/rdbms/migration/V1_3_9__Drop_constraint_for_DSpace_1_4_schema.java @@ -8,11 +8,10 @@ package org.dspace.storage.rdbms.migration; import java.io.IOException; -import java.sql.Connection; import java.sql.SQLException; -import org.flywaydb.core.api.migration.MigrationChecksumProvider; -import org.flywaydb.core.api.migration.jdbc.JdbcMigration; +import org.flywaydb.core.api.migration.BaseJavaMigration; +import org.flywaydb.core.api.migration.Context; /** * This class is in support of the "V1.4__Upgrade_to_DSpace_1.4_schema.sql" @@ -38,22 +37,22 @@ import org.flywaydb.core.api.migration.jdbc.JdbcMigration; * @author Tim Donohue */ public class V1_3_9__Drop_constraint_for_DSpace_1_4_schema - implements JdbcMigration, MigrationChecksumProvider { + extends BaseJavaMigration { /* The checksum to report for this migration (when successful) */ private int checksum = -1; /** * Actually migrate the existing database * - * @param connection SQL Connection object + * @param context Flyway Migration Context * @throws IOException A general class of exceptions produced by failed or interrupted I/O operations. * @throws SQLException An exception that provides information on a database access error or other errors. */ @Override - public void migrate(Connection connection) + public void migrate(Context context) throws IOException, SQLException { // Drop the constraint associated with "name" column of "community" - checksum = MigrationUtils.dropDBConstraint(connection, "community", "name", "key"); + checksum = MigrationUtils.dropDBConstraint(context.getConnection(), "community", "name", "key"); } /** diff --git a/dspace-api/src/main/java/org/dspace/storage/rdbms/migration/V1_5_9__Drop_constraint_for_DSpace_1_6_schema.java b/dspace-api/src/main/java/org/dspace/storage/rdbms/migration/V1_5_9__Drop_constraint_for_DSpace_1_6_schema.java index 77eb7a070d..6d82055e53 100644 --- a/dspace-api/src/main/java/org/dspace/storage/rdbms/migration/V1_5_9__Drop_constraint_for_DSpace_1_6_schema.java +++ b/dspace-api/src/main/java/org/dspace/storage/rdbms/migration/V1_5_9__Drop_constraint_for_DSpace_1_6_schema.java @@ -8,11 +8,10 @@ package org.dspace.storage.rdbms.migration; import java.io.IOException; -import java.sql.Connection; import java.sql.SQLException; -import org.flywaydb.core.api.migration.MigrationChecksumProvider; -import org.flywaydb.core.api.migration.jdbc.JdbcMigration; +import org.flywaydb.core.api.migration.BaseJavaMigration; +import org.flywaydb.core.api.migration.Context; /** * This class is in support of the "V1.6__Upgrade_to_DSpace_1.6_schema.sql" @@ -38,26 +37,29 @@ import org.flywaydb.core.api.migration.jdbc.JdbcMigration; * @author Tim Donohue */ public class V1_5_9__Drop_constraint_for_DSpace_1_6_schema - implements JdbcMigration, MigrationChecksumProvider { + extends BaseJavaMigration { /* The checksum to report for this migration (when successful) */ private int checksum = -1; /** * Actually migrate the existing database * - * @param connection SQL Connection object + * @param context Flyway Migration Context * @throws IOException A general class of exceptions produced by failed or interrupted I/O operations. * @throws SQLException An exception that provides information on a database access error or other errors. */ @Override - public void migrate(Connection connection) + public void migrate(Context context) throws IOException, SQLException { // Drop the constraint associated with "collection_id" column of "community2collection" table - int return1 = MigrationUtils.dropDBConstraint(connection, "community2collection", "collection_id", "pkey"); + int return1 = MigrationUtils.dropDBConstraint(context.getConnection(), "community2collection", + "collection_id", "pkey"); // Drop the constraint associated with "child_comm_id" column of "community2community" table - int return2 = MigrationUtils.dropDBConstraint(connection, "community2community", "child_comm_id", "pkey"); + int return2 = MigrationUtils.dropDBConstraint(context.getConnection(), "community2community", + "child_comm_id", "pkey"); // Drop the constraint associated with "item_id" column of "collection2item" table - int return3 = MigrationUtils.dropDBConstraint(connection, "collection2item", "item_id", "pkey"); + int return3 = MigrationUtils.dropDBConstraint(context.getConnection(), "collection2item", + "item_id", "pkey"); // Checksum will just be the sum of those three return values checksum = return1 + return2 + return3; diff --git a/dspace-api/src/main/java/org/dspace/storage/rdbms/migration/V5_0_2014_09_25__DS_1582_Metadata_For_All_Objects_drop_constraint.java b/dspace-api/src/main/java/org/dspace/storage/rdbms/migration/V5_0_2014_09_25__DS_1582_Metadata_For_All_Objects_drop_constraint.java index 17598ade6c..ea72d99b6e 100644 --- a/dspace-api/src/main/java/org/dspace/storage/rdbms/migration/V5_0_2014_09_25__DS_1582_Metadata_For_All_Objects_drop_constraint.java +++ b/dspace-api/src/main/java/org/dspace/storage/rdbms/migration/V5_0_2014_09_25__DS_1582_Metadata_For_All_Objects_drop_constraint.java @@ -8,11 +8,10 @@ package org.dspace.storage.rdbms.migration; import java.io.IOException; -import java.sql.Connection; import java.sql.SQLException; -import org.flywaydb.core.api.migration.MigrationChecksumProvider; -import org.flywaydb.core.api.migration.jdbc.JdbcMigration; +import org.flywaydb.core.api.migration.BaseJavaMigration; +import org.flywaydb.core.api.migration.Context; /** * This class is in support of the DS-1582 Metadata for All Objects feature. @@ -39,22 +38,22 @@ import org.flywaydb.core.api.migration.jdbc.JdbcMigration; * @author Tim Donohue */ public class V5_0_2014_09_25__DS_1582_Metadata_For_All_Objects_drop_constraint - implements JdbcMigration, MigrationChecksumProvider { + extends BaseJavaMigration { /* The checksum to report for this migration (when successful) */ private int checksum = -1; /** * Actually migrate the existing database * - * @param connection SQL Connection object + * @param context Flyway Migration Context * @throws IOException A general class of exceptions produced by failed or interrupted I/O operations. * @throws SQLException An exception that provides information on a database access error or other errors. */ @Override - public void migrate(Connection connection) + public void migrate(Context context) throws IOException, SQLException { // Drop the constraint associated with "item_id" column of "metadatavalue" - checksum = MigrationUtils.dropDBConstraint(connection, "metadatavalue", "item_id", "fkey"); + checksum = MigrationUtils.dropDBConstraint(context.getConnection(), "metadatavalue", "item_id", "fkey"); } /** diff --git a/dspace-api/src/main/java/org/dspace/storage/rdbms/migration/V5_7_2017_05_05__DS_3431_Add_Policies_for_BasicWorkflow.java b/dspace-api/src/main/java/org/dspace/storage/rdbms/migration/V5_7_2017_05_05__DS_3431_Add_Policies_for_BasicWorkflow.java index 3b8e551b12..58fdc78d06 100644 --- a/dspace-api/src/main/java/org/dspace/storage/rdbms/migration/V5_7_2017_05_05__DS_3431_Add_Policies_for_BasicWorkflow.java +++ b/dspace-api/src/main/java/org/dspace/storage/rdbms/migration/V5_7_2017_05_05__DS_3431_Add_Policies_for_BasicWorkflow.java @@ -7,25 +7,21 @@ */ package org.dspace.storage.rdbms.migration; -import java.sql.Connection; - -import org.dspace.core.Constants; import org.dspace.storage.rdbms.DatabaseUtils; -import org.flywaydb.core.api.migration.MigrationChecksumProvider; -import org.flywaydb.core.api.migration.jdbc.JdbcMigration; -import org.flywaydb.core.internal.util.scanner.classpath.ClassPathResource; +import org.flywaydb.core.api.migration.BaseJavaMigration; +import org.flywaydb.core.api.migration.Context; public class V5_7_2017_05_05__DS_3431_Add_Policies_for_BasicWorkflow - implements JdbcMigration, MigrationChecksumProvider { + extends BaseJavaMigration { // Size of migration script run Integer migration_file_size = -1; @Override - public void migrate(Connection connection) throws Exception { + public void migrate(Context context) throws Exception { // Based on type of DB, get path to SQL migration script - String dbtype = DatabaseUtils.getDbType(connection); + String dbtype = DatabaseUtils.getDbType(context.getConnection()); String dataMigrateSQL; String sqlMigrationPath = "org/dspace/storage/rdbms/sqlmigration/workflow/" + dbtype + "/"; @@ -33,19 +29,18 @@ public class V5_7_2017_05_05__DS_3431_Add_Policies_for_BasicWorkflow // If XMLWorkflow Table does NOT exist in this database, then lets do the migration! // If XMLWorkflow Table ALREADY exists, then this migration is a noop, we assume you manually ran the sql // scripts - if (DatabaseUtils.tableExists(connection, "cwf_workflowitem")) { + if (DatabaseUtils.tableExists(context.getConnection(), "cwf_workflowitem")) { return; } else { //Migrate the basic workflow // Get the contents of our data migration script, based on path & DB type - dataMigrateSQL = new ClassPathResource(sqlMigrationPath + "basicWorkflow" + "/V5.7_2017.05.05__DS-3431.sql", - getClass().getClassLoader()) - .loadAsString(Constants.DEFAULT_ENCODING); + dataMigrateSQL = MigrationUtils.getResourceAsString( + sqlMigrationPath + "basicWorkflow/V5.7_2017.05.05__DS-3431.sql"); } // Actually execute the Data migration SQL // This will migrate all existing traditional workflows to the new XMLWorkflow system & tables - DatabaseUtils.executeSql(connection, dataMigrateSQL); + DatabaseUtils.executeSql(context.getConnection(), dataMigrateSQL); migration_file_size = dataMigrateSQL.length(); } diff --git a/dspace-api/src/main/java/org/dspace/storage/rdbms/migration/V6_0_2015_03_06__DS_2701_Dso_Uuid_Migration.java b/dspace-api/src/main/java/org/dspace/storage/rdbms/migration/V6_0_2015_03_06__DS_2701_Dso_Uuid_Migration.java index 98ac8752be..7aa0dc50a7 100644 --- a/dspace-api/src/main/java/org/dspace/storage/rdbms/migration/V6_0_2015_03_06__DS_2701_Dso_Uuid_Migration.java +++ b/dspace-api/src/main/java/org/dspace/storage/rdbms/migration/V6_0_2015_03_06__DS_2701_Dso_Uuid_Migration.java @@ -7,30 +7,29 @@ */ package org.dspace.storage.rdbms.migration; -import java.sql.Connection; - -import org.flywaydb.core.api.migration.MigrationChecksumProvider; -import org.flywaydb.core.api.migration.jdbc.JdbcMigration; +import org.flywaydb.core.api.migration.BaseJavaMigration; +import org.flywaydb.core.api.migration.Context; /** * Migration class that will drop the public key for the dspace objects, the integer based key will be moved to a UUID * * @author kevinvandevelde at atmire.com */ -public class V6_0_2015_03_06__DS_2701_Dso_Uuid_Migration implements JdbcMigration, MigrationChecksumProvider { +public class V6_0_2015_03_06__DS_2701_Dso_Uuid_Migration extends BaseJavaMigration { private int checksum = -1; @Override - public void migrate(Connection connection) throws Exception { - checksum += MigrationUtils.dropDBConstraint(connection, "eperson", "eperson_id", "pkey"); - checksum += MigrationUtils.dropDBConstraint(connection, "epersongroup", "eperson_group_id", "pkey"); - checksum += MigrationUtils.dropDBConstraint(connection, "community", "community_id", "pkey"); - checksum += MigrationUtils.dropDBConstraint(connection, "collection", "collection_id", "pkey"); - checksum += MigrationUtils.dropDBConstraint(connection, "item", "item_id", "pkey"); - checksum += MigrationUtils.dropDBConstraint(connection, "bundle", "bundle_id", "pkey"); - checksum += MigrationUtils.dropDBConstraint(connection, "bitstream", "bitstream_id", "pkey"); + public void migrate(Context context) throws Exception { + checksum += MigrationUtils.dropDBConstraint(context.getConnection(), "eperson", "eperson_id", "pkey"); + checksum += MigrationUtils.dropDBConstraint(context.getConnection(), "epersongroup", + "eperson_group_id", "pkey"); + checksum += MigrationUtils.dropDBConstraint(context.getConnection(), "community", "community_id", "pkey"); + checksum += MigrationUtils.dropDBConstraint(context.getConnection(), "collection", "collection_id", "pkey"); + checksum += MigrationUtils.dropDBConstraint(context.getConnection(), "item", "item_id", "pkey"); + checksum += MigrationUtils.dropDBConstraint(context.getConnection(), "bundle", "bundle_id", "pkey"); + checksum += MigrationUtils.dropDBConstraint(context.getConnection(), "bitstream", "bitstream_id", "pkey"); } @Override diff --git a/dspace-api/src/main/java/org/dspace/storage/rdbms/migration/V6_0_2015_08_31__DS_2701_Hibernate_Workflow_Migration.java b/dspace-api/src/main/java/org/dspace/storage/rdbms/migration/V6_0_2015_08_31__DS_2701_Hibernate_Workflow_Migration.java index 62f4b126ba..dd01aa8d2c 100644 --- a/dspace-api/src/main/java/org/dspace/storage/rdbms/migration/V6_0_2015_08_31__DS_2701_Hibernate_Workflow_Migration.java +++ b/dspace-api/src/main/java/org/dspace/storage/rdbms/migration/V6_0_2015_08_31__DS_2701_Hibernate_Workflow_Migration.java @@ -7,29 +7,25 @@ */ package org.dspace.storage.rdbms.migration; -import java.sql.Connection; - -import org.dspace.core.Constants; import org.dspace.storage.rdbms.DatabaseUtils; -import org.flywaydb.core.api.migration.MigrationChecksumProvider; -import org.flywaydb.core.api.migration.jdbc.JdbcMigration; -import org.flywaydb.core.internal.util.scanner.classpath.ClassPathResource; +import org.flywaydb.core.api.migration.BaseJavaMigration; +import org.flywaydb.core.api.migration.Context; /** * User: kevin (kevin at atmire.com) * Date: 1/09/15 * Time: 12:08 */ -public class V6_0_2015_08_31__DS_2701_Hibernate_Workflow_Migration implements JdbcMigration, MigrationChecksumProvider { +public class V6_0_2015_08_31__DS_2701_Hibernate_Workflow_Migration extends BaseJavaMigration { // Size of migration script run Integer migration_file_size = -1; @Override - public void migrate(Connection connection) throws Exception { + public void migrate(Context context) throws Exception { // Based on type of DB, get path to SQL migration script - String dbtype = DatabaseUtils.getDbType(connection); + String dbtype = DatabaseUtils.getDbType(context.getConnection()); String dataMigrateSQL; String sqlMigrationPath = "org/dspace/storage/rdbms/sqlmigration/workflow/" + dbtype + "/"; @@ -37,24 +33,20 @@ public class V6_0_2015_08_31__DS_2701_Hibernate_Workflow_Migration implements Jd // If XMLWorkflow Table does NOT exist in this database, then lets do the migration! // If XMLWorkflow Table ALREADY exists, then this migration is a noop, we assume you manually ran the sql // scripts - if (DatabaseUtils.tableExists(connection, "cwf_workflowitem")) { + if (DatabaseUtils.tableExists(context.getConnection(), "cwf_workflowitem")) { // Get the contents of our data migration script, based on path & DB type - dataMigrateSQL = new ClassPathResource(sqlMigrationPath + "xmlworkflow" + - "/V6.0_2015.08.11__DS-2701_Xml_Workflow_Migration.sql", - getClass().getClassLoader()) - .loadAsString(Constants.DEFAULT_ENCODING); + dataMigrateSQL = MigrationUtils.getResourceAsString(sqlMigrationPath + "xmlworkflow" + + "/V6.0_2015.08.11__DS-2701_Xml_Workflow_Migration.sql"); } else { //Migrate the basic workflow // Get the contents of our data migration script, based on path & DB type - dataMigrateSQL = new ClassPathResource(sqlMigrationPath + "basicWorkflow" + - "/V6.0_2015.08.11__DS-2701_Basic_Workflow_Migration.sql", - getClass().getClassLoader()) - .loadAsString(Constants.DEFAULT_ENCODING); + dataMigrateSQL = MigrationUtils.getResourceAsString(sqlMigrationPath + "basicWorkflow" + + "/V6.0_2015.08.11__DS-2701_Basic_Workflow_Migration.sql"); } // Actually execute the Data migration SQL // This will migrate all existing traditional workflows to the new XMLWorkflow system & tables - DatabaseUtils.executeSql(connection, dataMigrateSQL); + DatabaseUtils.executeSql(context.getConnection(), dataMigrateSQL); migration_file_size = dataMigrateSQL.length(); } diff --git a/dspace-api/src/main/java/org/dspace/storage/rdbms/migration/V6_0_2016_01_26__DS_2188_Remove_DBMS_Browse_Tables.java b/dspace-api/src/main/java/org/dspace/storage/rdbms/migration/V6_0_2016_01_26__DS_2188_Remove_DBMS_Browse_Tables.java index 2b614b5356..daf2269e92 100644 --- a/dspace-api/src/main/java/org/dspace/storage/rdbms/migration/V6_0_2016_01_26__DS_2188_Remove_DBMS_Browse_Tables.java +++ b/dspace-api/src/main/java/org/dspace/storage/rdbms/migration/V6_0_2016_01_26__DS_2188_Remove_DBMS_Browse_Tables.java @@ -14,8 +14,8 @@ import org.apache.logging.log4j.Logger; import org.dspace.browse.BrowseException; import org.dspace.browse.BrowseIndex; import org.dspace.storage.rdbms.DatabaseUtils; -import org.flywaydb.core.api.migration.MigrationChecksumProvider; -import org.flywaydb.core.api.migration.jdbc.JdbcMigration; +import org.flywaydb.core.api.migration.BaseJavaMigration; +import org.flywaydb.core.api.migration.Context; /** * This Flyway Java migration deletes any legacy DBMS browse tables found in @@ -23,7 +23,7 @@ import org.flywaydb.core.api.migration.jdbc.JdbcMigration; * * @author Tim Donohue */ -public class V6_0_2016_01_26__DS_2188_Remove_DBMS_Browse_Tables implements JdbcMigration, MigrationChecksumProvider { +public class V6_0_2016_01_26__DS_2188_Remove_DBMS_Browse_Tables extends BaseJavaMigration { /** * log4j category */ @@ -34,8 +34,8 @@ public class V6_0_2016_01_26__DS_2188_Remove_DBMS_Browse_Tables implements JdbcM private int checksum = -1; @Override - public void migrate(Connection connection) throws Exception, SQLException { - removeDBMSBrowseTables(connection); + public void migrate(Context context) throws Exception, SQLException { + removeDBMSBrowseTables(context.getConnection()); } /** diff --git a/dspace-api/src/main/java/org/dspace/storage/rdbms/migration/V6_1_2017_01_03__DS_3431_Add_Policies_for_BasicWorkflow.java b/dspace-api/src/main/java/org/dspace/storage/rdbms/migration/V6_1_2017_01_03__DS_3431_Add_Policies_for_BasicWorkflow.java index 51e401b400..13636b311e 100644 --- a/dspace-api/src/main/java/org/dspace/storage/rdbms/migration/V6_1_2017_01_03__DS_3431_Add_Policies_for_BasicWorkflow.java +++ b/dspace-api/src/main/java/org/dspace/storage/rdbms/migration/V6_1_2017_01_03__DS_3431_Add_Policies_for_BasicWorkflow.java @@ -7,25 +7,21 @@ */ package org.dspace.storage.rdbms.migration; -import java.sql.Connection; - -import org.dspace.core.Constants; import org.dspace.storage.rdbms.DatabaseUtils; -import org.flywaydb.core.api.migration.MigrationChecksumProvider; -import org.flywaydb.core.api.migration.jdbc.JdbcMigration; -import org.flywaydb.core.internal.util.scanner.classpath.ClassPathResource; +import org.flywaydb.core.api.migration.BaseJavaMigration; +import org.flywaydb.core.api.migration.Context; public class V6_1_2017_01_03__DS_3431_Add_Policies_for_BasicWorkflow - implements JdbcMigration, MigrationChecksumProvider { + extends BaseJavaMigration { // Size of migration script run Integer migration_file_size = -1; @Override - public void migrate(Connection connection) throws Exception { + public void migrate(Context context) throws Exception { // Based on type of DB, get path to SQL migration script - String dbtype = DatabaseUtils.getDbType(connection); + String dbtype = DatabaseUtils.getDbType(context.getConnection()); String dataMigrateSQL; String sqlMigrationPath = "org/dspace/storage/rdbms/sqlmigration/workflow/" + dbtype + "/"; @@ -33,19 +29,18 @@ public class V6_1_2017_01_03__DS_3431_Add_Policies_for_BasicWorkflow // If XMLWorkflow Table does NOT exist in this database, then lets do the migration! // If XMLWorkflow Table ALREADY exists, then this migration is a noop, we assume you manually ran the sql // scripts - if (DatabaseUtils.tableExists(connection, "cwf_workflowitem")) { + if (DatabaseUtils.tableExists(context.getConnection(), "cwf_workflowitem")) { return; } else { //Migrate the basic workflow // Get the contents of our data migration script, based on path & DB type - dataMigrateSQL = new ClassPathResource(sqlMigrationPath + "basicWorkflow" + "/V6.1_2017.01.03__DS-3431.sql", - getClass().getClassLoader()) - .loadAsString(Constants.DEFAULT_ENCODING); + dataMigrateSQL = MigrationUtils.getResourceAsString(sqlMigrationPath + "basicWorkflow" + + "/V6.1_2017.01.03__DS-3431.sql"); } // Actually execute the Data migration SQL // This will migrate all existing traditional workflows to the new XMLWorkflow system & tables - DatabaseUtils.executeSql(connection, dataMigrateSQL); + DatabaseUtils.executeSql(context.getConnection(), dataMigrateSQL); migration_file_size = dataMigrateSQL.length(); } diff --git a/dspace-api/src/main/java/org/dspace/storage/rdbms/migration/V7_0_2018_04_03__Upgrade_Workflow_Policy.java b/dspace-api/src/main/java/org/dspace/storage/rdbms/migration/V7_0_2018_04_03__Upgrade_Workflow_Policy.java index 100e345df3..3da7f8b40f 100644 --- a/dspace-api/src/main/java/org/dspace/storage/rdbms/migration/V7_0_2018_04_03__Upgrade_Workflow_Policy.java +++ b/dspace-api/src/main/java/org/dspace/storage/rdbms/migration/V7_0_2018_04_03__Upgrade_Workflow_Policy.java @@ -7,15 +7,11 @@ */ package org.dspace.storage.rdbms.migration; -import java.sql.Connection; - -import org.dspace.core.Constants; import org.dspace.storage.rdbms.DatabaseUtils; import org.dspace.workflow.factory.WorkflowServiceFactory; import org.dspace.xmlworkflow.service.XmlWorkflowService; -import org.flywaydb.core.api.migration.MigrationChecksumProvider; -import org.flywaydb.core.api.migration.jdbc.JdbcMigration; -import org.flywaydb.core.internal.util.scanner.classpath.ClassPathResource; +import org.flywaydb.core.api.migration.BaseJavaMigration; +import org.flywaydb.core.api.migration.Context; /** * This class automatically adding rptype to the resource policy created with a migration into XML-based Configurable @@ -23,30 +19,27 @@ import org.flywaydb.core.internal.util.scanner.classpath.ClassPathResource; * * @author Luigi Andrea Pascarelli (luigiandrea.pascarelli at 4science.it) */ -public class V7_0_2018_04_03__Upgrade_Workflow_Policy implements JdbcMigration, MigrationChecksumProvider { +public class V7_0_2018_04_03__Upgrade_Workflow_Policy extends BaseJavaMigration { // Size of migration script run protected Integer migration_file_size = -1; @Override - public void migrate(Connection connection) throws Exception { + public void migrate(Context context) throws Exception { // Make sure XML Workflow is enabled, shouldn't even be needed since this class is only loaded if the service // is enabled. if (WorkflowServiceFactory.getInstance().getWorkflowService() instanceof XmlWorkflowService) { // Now, check if the XMLWorkflow table (cwf_workflowitem) already exists in this database - if (DatabaseUtils.tableExists(connection, "cwf_workflowitem")) { - String dbtype = DatabaseUtils.getDbType(connection); + if (DatabaseUtils.tableExists(context.getConnection(), "cwf_workflowitem")) { + String dbtype = DatabaseUtils.getDbType(context.getConnection()); String sqlMigrationPath = "org/dspace/storage/rdbms/sqlmigration/workflow/" + dbtype + "/"; - String dataMigrateSQL = new ClassPathResource(sqlMigrationPath + - "xmlworkflow" + - "/V7.0_2018.04.03__upgrade_workflow_policy.sql", - getClass().getClassLoader()) - .loadAsString(Constants.DEFAULT_ENCODING); + String dataMigrateSQL = MigrationUtils.getResourceAsString( + sqlMigrationPath + "xmlworkflow/V7.0_2018.04.03__upgrade_workflow_policy.sql"); // Actually execute the Data migration SQL // This will migrate all existing traditional workflows to the new XMLWorkflow system & tables - DatabaseUtils.executeSql(connection, dataMigrateSQL); + DatabaseUtils.executeSql(context.getConnection(), dataMigrateSQL); // Assuming both succeeded, save the size of the scripts for getChecksum() below migration_file_size = dataMigrateSQL.length(); diff --git a/dspace-api/src/main/java/org/dspace/storage/rdbms/xmlworkflow/V5_0_2014_11_04__Enable_XMLWorkflow_Migration.java b/dspace-api/src/main/java/org/dspace/storage/rdbms/xmlworkflow/V5_0_2014_11_04__Enable_XMLWorkflow_Migration.java index 8f6a305bb9..dc8c7c22df 100644 --- a/dspace-api/src/main/java/org/dspace/storage/rdbms/xmlworkflow/V5_0_2014_11_04__Enable_XMLWorkflow_Migration.java +++ b/dspace-api/src/main/java/org/dspace/storage/rdbms/xmlworkflow/V5_0_2014_11_04__Enable_XMLWorkflow_Migration.java @@ -8,16 +8,14 @@ package org.dspace.storage.rdbms.xmlworkflow; import java.io.IOException; -import java.sql.Connection; import java.sql.SQLException; -import org.dspace.core.Constants; import org.dspace.storage.rdbms.DatabaseUtils; +import org.dspace.storage.rdbms.migration.MigrationUtils; import org.dspace.workflow.factory.WorkflowServiceFactory; import org.dspace.xmlworkflow.service.XmlWorkflowService; -import org.flywaydb.core.api.migration.MigrationChecksumProvider; -import org.flywaydb.core.api.migration.jdbc.JdbcMigration; -import org.flywaydb.core.internal.util.scanner.classpath.ClassPathResource; +import org.flywaydb.core.api.migration.BaseJavaMigration; +import org.flywaydb.core.api.migration.Context; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -38,7 +36,7 @@ import org.slf4j.LoggerFactory; * @author Tim Donohue */ public class V5_0_2014_11_04__Enable_XMLWorkflow_Migration - implements JdbcMigration, MigrationChecksumProvider { + extends BaseJavaMigration { /** * logging category */ @@ -50,12 +48,12 @@ public class V5_0_2014_11_04__Enable_XMLWorkflow_Migration /** * Actually migrate the existing database * - * @param connection SQL Connection object + * @param context Flyway Migration Context * @throws IOException A general class of exceptions produced by failed or interrupted I/O operations. * @throws SQLException An exception that provides information on a database access error or other errors. */ @Override - public void migrate(Connection connection) + public void migrate(Context context) throws IOException, SQLException { // Make sure XML Workflow is enabled, shouldn't even be needed since this class is only loaded if the service // is enabled. @@ -64,13 +62,13 @@ public class V5_0_2014_11_04__Enable_XMLWorkflow_Migration // migration, as it is incompatible // with a 6.x database. In that scenario the corresponding 6.x XML Workflow migration will create // necessary tables. - && DatabaseUtils.getCurrentFlywayDSpaceState(connection) < 6) { + && DatabaseUtils.getCurrentFlywayDSpaceState(context.getConnection()) < 6) { // Now, check if the XMLWorkflow table (cwf_workflowitem) already exists in this database // If XMLWorkflow Table does NOT exist in this database, then lets do the migration! // If XMLWorkflow Table ALREADY exists, then this migration is a noop, we assume you manually ran the sql // scripts - if (!DatabaseUtils.tableExists(connection, "cwf_workflowitem")) { - String dbtype = connection.getMetaData().getDatabaseProductName(); + if (!DatabaseUtils.tableExists(context.getConnection(), "cwf_workflowitem")) { + String dbtype = context.getConnection().getMetaData().getDatabaseProductName(); String dbFileLocation = null; if (dbtype.toLowerCase().contains("postgres")) { dbFileLocation = "postgres"; @@ -88,27 +86,21 @@ public class V5_0_2014_11_04__Enable_XMLWorkflow_Migration // Get the contents of our DB Schema migration script, based on path & DB type // (e.g. /src/main/resources/[path-to-this-class]/postgres/xml_workflow_migration.sql) - String dbMigrateSQL = new ClassPathResource(packagePath + "/" + - dbFileLocation + - "/xml_workflow_migration.sql", - getClass().getClassLoader()) - .loadAsString(Constants.DEFAULT_ENCODING); + String dbMigrateSQL = MigrationUtils.getResourceAsString(packagePath + "/" + dbFileLocation + + "/xml_workflow_migration.sql"); // Actually execute the Database schema migration SQL // This will create the necessary tables for the XMLWorkflow feature - DatabaseUtils.executeSql(connection, dbMigrateSQL); + DatabaseUtils.executeSql(context.getConnection(), dbMigrateSQL); // Get the contents of our data migration script, based on path & DB type // (e.g. /src/main/resources/[path-to-this-class]/postgres/data_workflow_migration.sql) - String dataMigrateSQL = new ClassPathResource(packagePath + "/" + - dbFileLocation + - "/data_workflow_migration.sql", - getClass().getClassLoader()) - .loadAsString(Constants.DEFAULT_ENCODING); + String dataMigrateSQL = MigrationUtils.getResourceAsString(packagePath + "/" + dbFileLocation + + "/data_workflow_migration.sql"); // Actually execute the Data migration SQL // This will migrate all existing traditional workflows to the new XMLWorkflow system & tables - DatabaseUtils.executeSql(connection, dataMigrateSQL); + DatabaseUtils.executeSql(context.getConnection(), dataMigrateSQL); // Assuming both succeeded, save the size of the scripts for getChecksum() below migration_file_size = dbMigrateSQL.length() + dataMigrateSQL.length(); diff --git a/dspace-api/src/main/java/org/dspace/storage/rdbms/xmlworkflow/V6_0_2015_09_01__DS_2701_Enable_XMLWorkflow_Migration.java b/dspace-api/src/main/java/org/dspace/storage/rdbms/xmlworkflow/V6_0_2015_09_01__DS_2701_Enable_XMLWorkflow_Migration.java index ab090f5bf1..b70b19f3a5 100644 --- a/dspace-api/src/main/java/org/dspace/storage/rdbms/xmlworkflow/V6_0_2015_09_01__DS_2701_Enable_XMLWorkflow_Migration.java +++ b/dspace-api/src/main/java/org/dspace/storage/rdbms/xmlworkflow/V6_0_2015_09_01__DS_2701_Enable_XMLWorkflow_Migration.java @@ -7,15 +7,13 @@ */ package org.dspace.storage.rdbms.xmlworkflow; -import java.sql.Connection; -import org.dspace.core.Constants; import org.dspace.storage.rdbms.DatabaseUtils; +import org.dspace.storage.rdbms.migration.MigrationUtils; import org.dspace.workflow.factory.WorkflowServiceFactory; import org.dspace.xmlworkflow.service.XmlWorkflowService; -import org.flywaydb.core.api.migration.MigrationChecksumProvider; -import org.flywaydb.core.api.migration.jdbc.JdbcMigration; -import org.flywaydb.core.internal.util.scanner.classpath.ClassPathResource; +import org.flywaydb.core.api.migration.BaseJavaMigration; +import org.flywaydb.core.api.migration.Context; /** * This class automatically migrates your DSpace Database to use the @@ -34,13 +32,13 @@ import org.flywaydb.core.internal.util.scanner.classpath.ClassPathResource; * Date: 1/09/15 * Time: 11:34 */ -public class V6_0_2015_09_01__DS_2701_Enable_XMLWorkflow_Migration implements JdbcMigration, MigrationChecksumProvider { +public class V6_0_2015_09_01__DS_2701_Enable_XMLWorkflow_Migration extends BaseJavaMigration { // Size of migration script run protected Integer migration_file_size = -1; @Override - public void migrate(Connection connection) throws Exception { + public void migrate(Context context) throws Exception { // Make sure XML Workflow is enabled, shouldn't even be needed since this class is only loaded if the service // is enabled. if (WorkflowServiceFactory.getInstance().getWorkflowService() instanceof XmlWorkflowService) { @@ -48,8 +46,8 @@ public class V6_0_2015_09_01__DS_2701_Enable_XMLWorkflow_Migration implements Jd // If XMLWorkflow Table does NOT exist in this database, then lets do the migration! // If XMLWorkflow Table ALREADY exists, then this migration is a noop, we assume you manually ran the sql // scripts - if (!DatabaseUtils.tableExists(connection, "cwf_workflowitem")) { - String dbtype = connection.getMetaData().getDatabaseProductName(); + if (!DatabaseUtils.tableExists(context.getConnection(), "cwf_workflowitem")) { + String dbtype = context.getConnection().getMetaData().getDatabaseProductName(); String dbFileLocation = null; if (dbtype.toLowerCase().contains("postgres")) { dbFileLocation = "postgres"; @@ -67,27 +65,21 @@ public class V6_0_2015_09_01__DS_2701_Enable_XMLWorkflow_Migration implements Jd // Get the contents of our DB Schema migration script, based on path & DB type // (e.g. /src/main/resources/[path-to-this-class]/postgres/xml_workflow_migration.sql) - String dbMigrateSQL = new ClassPathResource(packagePath + "/" + - dbFileLocation + - "/v6.0__DS-2701_xml_workflow_migration.sql", - getClass().getClassLoader()) - .loadAsString(Constants.DEFAULT_ENCODING); + String dbMigrateSQL = MigrationUtils.getResourceAsString(packagePath + "/" + dbFileLocation + + "/v6.0__DS-2701_xml_workflow_migration.sql"); // Actually execute the Database schema migration SQL // This will create the necessary tables for the XMLWorkflow feature - DatabaseUtils.executeSql(connection, dbMigrateSQL); + DatabaseUtils.executeSql(context.getConnection(), dbMigrateSQL); // Get the contents of our data migration script, based on path & DB type // (e.g. /src/main/resources/[path-to-this-class]/postgres/data_workflow_migration.sql) - String dataMigrateSQL = new ClassPathResource(packagePath + "/" + - dbFileLocation + - "/v6.0__DS-2701_data_workflow_migration.sql", - getClass().getClassLoader()) - .loadAsString(Constants.DEFAULT_ENCODING); + String dataMigrateSQL = MigrationUtils.getResourceAsString(packagePath + "/" + dbFileLocation + + "/v6.0__DS-2701_data_workflow_migration.sql"); // Actually execute the Data migration SQL // This will migrate all existing traditional workflows to the new XMLWorkflow system & tables - DatabaseUtils.executeSql(connection, dataMigrateSQL); + DatabaseUtils.executeSql(context.getConnection(), dataMigrateSQL); // Assuming both succeeded, save the size of the scripts for getChecksum() below migration_file_size = dbMigrateSQL.length() + dataMigrateSQL.length(); diff --git a/dspace-api/src/main/java/org/dspace/submit/lookup/ArXivFileDataLoader.java b/dspace-api/src/main/java/org/dspace/submit/lookup/ArXivFileDataLoader.java deleted file mode 100644 index ebc898e4cf..0000000000 --- a/dspace-api/src/main/java/org/dspace/submit/lookup/ArXivFileDataLoader.java +++ /dev/null @@ -1,146 +0,0 @@ -/** - * The contents of this file are subject to the license and copyright - * detailed in the LICENSE and NOTICE files at the root of the source - * tree and available online at - * - * http://www.dspace.org/license/ - */ - -package org.dspace.submit.lookup; - -import java.io.File; -import java.io.FileInputStream; -import java.io.FileNotFoundException; -import java.io.IOException; -import java.io.InputStream; -import java.util.List; -import java.util.Map; -import javax.xml.parsers.DocumentBuilder; -import javax.xml.parsers.DocumentBuilderFactory; -import javax.xml.parsers.ParserConfigurationException; - -import gr.ekt.bte.core.DataLoadingSpec; -import gr.ekt.bte.core.Record; -import gr.ekt.bte.core.RecordSet; -import gr.ekt.bte.core.Value; -import gr.ekt.bte.dataloader.FileDataLoader; -import gr.ekt.bte.exceptions.MalformedSourceException; -import org.apache.commons.lang3.StringUtils; -import org.apache.logging.log4j.Logger; -import org.dspace.app.util.XMLUtils; -import org.w3c.dom.Document; -import org.w3c.dom.Element; -import org.xml.sax.SAXException; - -/** - * @author Andrea Bollini - * @author Kostas Stamatis - * @author Luigi Andrea Pascarelli - * @author Panagiotis Koutsourakis - */ -public class ArXivFileDataLoader extends FileDataLoader { - - private static Logger log = org.apache.logging.log4j.LogManager.getLogger(ArXivFileDataLoader.class); - - Map fieldMap; // mapping between service fields and local - // intermediate fields - - /** - * Empty constructor - */ - public ArXivFileDataLoader() { - } - - /** - * @param filename Name of file to load ArXiv data from. - */ - public ArXivFileDataLoader(String filename) { - super(filename); - } - - /* - * {@see gr.ekt.bte.core.DataLoader#getRecords()} - * - * @throws MalformedSourceException - */ - @Override - public RecordSet getRecords() throws MalformedSourceException { - - RecordSet recordSet = new RecordSet(); - - try { - InputStream inputStream = new FileInputStream(new File(filename)); - - DocumentBuilderFactory factory = DocumentBuilderFactory - .newInstance(); - factory.setValidating(false); - factory.setIgnoringComments(true); - factory.setIgnoringElementContentWhitespace(true); - - DocumentBuilder db = factory.newDocumentBuilder(); - Document inDoc = db.parse(inputStream); - - Element xmlRoot = inDoc.getDocumentElement(); - List dataRoots = XMLUtils.getElementList(xmlRoot, "entry"); - - for (Element dataRoot : dataRoots) { - Record record = ArxivUtils.convertArxixDomToRecord(dataRoot); - if (record != null) { - recordSet.addRecord(convertFields(record)); - } - } - } catch (FileNotFoundException e) { - log.error(e.getMessage(), e); - } catch (ParserConfigurationException e) { - log.error(e.getMessage(), e); - } catch (SAXException e) { - log.error(e.getMessage(), e); - } catch (IOException e) { - log.error(e.getMessage(), e); - } - - return recordSet; - } - - /* - * (non-Javadoc) - * - * @see - * gr.ekt.bte.core.DataLoader#getRecords(gr.ekt.bte.core.DataLoadingSpec) - */ - @Override - public RecordSet getRecords(DataLoadingSpec spec) - throws MalformedSourceException { - if (spec.getOffset() > 0) { - return new RecordSet(); - } - return getRecords(); - } - - public Record convertFields(Record publication) { - for (String fieldName : fieldMap.keySet()) { - String md = null; - if (fieldMap != null) { - md = this.fieldMap.get(fieldName); - } - - if (StringUtils.isBlank(md)) { - continue; - } else { - md = md.trim(); - } - - if (publication.isMutable()) { - List values = publication.getValues(fieldName); - publication.makeMutable().removeField(fieldName); - publication.makeMutable().addField(md, values); - } - } - - return publication; - } - - public void setFieldMap(Map fieldMap) { - this.fieldMap = fieldMap; - } -} diff --git a/dspace-api/src/main/java/org/dspace/submit/lookup/ArXivOnlineDataLoader.java b/dspace-api/src/main/java/org/dspace/submit/lookup/ArXivOnlineDataLoader.java deleted file mode 100644 index e477412621..0000000000 --- a/dspace-api/src/main/java/org/dspace/submit/lookup/ArXivOnlineDataLoader.java +++ /dev/null @@ -1,84 +0,0 @@ -/** - * The contents of this file are subject to the license and copyright - * detailed in the LICENSE and NOTICE files at the root of the source - * tree and available online at - * - * http://www.dspace.org/license/ - */ -package org.dspace.submit.lookup; - -import java.io.IOException; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.List; -import java.util.Map; -import java.util.Set; - -import gr.ekt.bte.core.Record; -import org.apache.http.HttpException; -import org.dspace.core.Context; - -/** - * @author Andrea Bollini - * @author Kostas Stamatis - * @author Luigi Andrea Pascarelli - * @author Panagiotis Koutsourakis - */ -public class ArXivOnlineDataLoader extends NetworkSubmissionLookupDataLoader { - protected ArXivService arXivService = new ArXivService(); - - protected boolean searchProvider = true; - - public void setArXivService(ArXivService arXivService) { - this.arXivService = arXivService; - } - - @Override - public List getSupportedIdentifiers() { - return Arrays.asList(new String[] {ARXIV, DOI}); - } - - public void setSearchProvider(boolean searchProvider) { - this.searchProvider = searchProvider; - } - - @Override - public boolean isSearchProvider() { - return searchProvider; - } - - @Override - public List getByIdentifier(Context context, - Map> keys) throws HttpException, IOException { - List results = new ArrayList(); - if (keys != null) { - Set dois = keys.get(DOI); - Set arxivids = keys.get(ARXIV); - List items = new ArrayList(); - if (dois != null && dois.size() > 0) { - items.addAll(arXivService.getByDOIs(dois)); - } - if (arxivids != null && arxivids.size() > 0) { - for (String arxivid : arxivids) { - items.add(arXivService.getByArXivIDs(arxivid)); - } - } - - for (Record item : items) { - results.add(convertFields(item)); - } - } - return results; - } - - @Override - public List search(Context context, String title, String author, - int year) throws HttpException, IOException { - List results = new ArrayList(); - List items = arXivService.searchByTerm(title, author, year); - for (Record item : items) { - results.add(convertFields(item)); - } - return results; - } -} diff --git a/dspace-api/src/main/java/org/dspace/submit/lookup/ArXivService.java b/dspace-api/src/main/java/org/dspace/submit/lookup/ArXivService.java deleted file mode 100644 index 0a32871758..0000000000 --- a/dspace-api/src/main/java/org/dspace/submit/lookup/ArXivService.java +++ /dev/null @@ -1,159 +0,0 @@ -/** - * The contents of this file are subject to the license and copyright - * detailed in the LICENSE and NOTICE files at the root of the source - * tree and available online at - * - * http://www.dspace.org/license/ - */ -package org.dspace.submit.lookup; - -import java.io.IOException; -import java.net.URISyntaxException; -import java.util.ArrayList; -import java.util.List; -import java.util.Set; -import javax.xml.parsers.DocumentBuilder; -import javax.xml.parsers.DocumentBuilderFactory; - -import gr.ekt.bte.core.Record; -import org.apache.commons.lang3.StringUtils; -import org.apache.http.HttpException; -import org.apache.http.HttpResponse; -import org.apache.http.HttpStatus; -import org.apache.http.StatusLine; -import org.apache.http.client.HttpClient; -import org.apache.http.client.methods.HttpGet; -import org.apache.http.client.utils.URIBuilder; -import org.apache.http.impl.client.DefaultHttpClient; -import org.apache.http.params.CoreConnectionPNames; -import org.apache.http.params.HttpParams; -import org.dspace.app.util.XMLUtils; -import org.w3c.dom.Document; -import org.w3c.dom.Element; - -/** - * @author Andrea Bollini - * @author Kostas Stamatis - * @author Luigi Andrea Pascarelli - * @author Panagiotis Koutsourakis - */ -public class ArXivService { - private int timeout = 1000; - - /** - * How long to wait for a connection to be established. - * - * @param timeout milliseconds - */ - public void setTimeout(int timeout) { - this.timeout = timeout; - } - - public List getByDOIs(Set dois) throws HttpException, - IOException { - if (dois != null && dois.size() > 0) { - String doisQuery = StringUtils.join(dois.iterator(), " OR "); - return search(doisQuery, null, 100); - } - return null; - } - - public List searchByTerm(String title, String author, int year) - throws HttpException, IOException { - StringBuffer query = new StringBuffer(); - if (StringUtils.isNotBlank(title)) { - query.append("ti:\"").append(title).append("\""); - } - if (StringUtils.isNotBlank(author)) { - // [FAU] - if (query.length() > 0) { - query.append(" AND "); - } - query.append("au:\"").append(author).append("\""); - } - return search(query.toString(), "", 10); - } - - protected List search(String query, String arxivid, int max_result) - throws IOException, HttpException { - List results = new ArrayList(); - HttpGet method = null; - try { - HttpClient client = new DefaultHttpClient(); - HttpParams params = client.getParams(); - params.setIntParameter(CoreConnectionPNames.CONNECTION_TIMEOUT, timeout); - - try { - URIBuilder uriBuilder = new URIBuilder("http://export.arxiv.org/api/query"); - uriBuilder.addParameter("id_list", arxivid); - uriBuilder.addParameter("search_query", query); - uriBuilder.addParameter("max_results", String.valueOf(max_result)); - method = new HttpGet(uriBuilder.build()); - } catch (URISyntaxException ex) { - throw new HttpException(ex.getMessage()); - } - - // Execute the method. - HttpResponse response = client.execute(method); - StatusLine responseStatus = response.getStatusLine(); - int statusCode = responseStatus.getStatusCode(); - - if (statusCode != HttpStatus.SC_OK) { - if (statusCode == HttpStatus.SC_BAD_REQUEST) { - throw new RuntimeException("arXiv query is not valid"); - } else { - throw new RuntimeException("Http call failed: " - + responseStatus); - } - } - - try { - DocumentBuilderFactory factory = DocumentBuilderFactory - .newInstance(); - factory.setValidating(false); - factory.setIgnoringComments(true); - factory.setIgnoringElementContentWhitespace(true); - - DocumentBuilder db = factory.newDocumentBuilder(); - Document inDoc = db.parse(response.getEntity().getContent()); - - Element xmlRoot = inDoc.getDocumentElement(); - List dataRoots = XMLUtils.getElementList(xmlRoot, - "entry"); - - for (Element dataRoot : dataRoots) { - Record crossitem = ArxivUtils - .convertArxixDomToRecord(dataRoot); - if (crossitem != null) { - results.add(crossitem); - } - } - } catch (Exception e) { - throw new RuntimeException( - "ArXiv identifier is not valid or not exist"); - } - } finally { - if (method != null) { - method.releaseConnection(); - } - } - - return results; - } - - public Record getByArXivIDs(String raw) throws HttpException, IOException { - if (StringUtils.isNotBlank(raw)) { - raw = raw.trim(); - if (raw.startsWith("http://arxiv.org/abs/")) { - raw = raw.substring("http://arxiv.org/abs/".length()); - } else if (raw.toLowerCase().startsWith("arxiv:")) { - raw = raw.substring("arxiv:".length()); - } - List result = search("", raw, 1); - if (result != null && result.size() > 0) { - return result.get(0); - } - } - return null; - } -} diff --git a/dspace-api/src/main/java/org/dspace/submit/lookup/ArxivUtils.java b/dspace-api/src/main/java/org/dspace/submit/lookup/ArxivUtils.java deleted file mode 100644 index 4caa0a957b..0000000000 --- a/dspace-api/src/main/java/org/dspace/submit/lookup/ArxivUtils.java +++ /dev/null @@ -1,151 +0,0 @@ -/** - * The contents of this file are subject to the license and copyright - * detailed in the LICENSE and NOTICE files at the root of the source - * tree and available online at - * - * http://www.dspace.org/license/ - */ -/** - * - */ -package org.dspace.submit.lookup; - -import java.util.LinkedList; -import java.util.List; - -import gr.ekt.bte.core.MutableRecord; -import gr.ekt.bte.core.Record; -import gr.ekt.bte.core.StringValue; -import gr.ekt.bte.core.Value; -import org.dspace.app.util.XMLUtils; -import org.dspace.submit.util.SubmissionLookupPublication; -import org.w3c.dom.Element; - -/** - * @author Andrea Bollini - * @author Kostas Stamatis - * @author Luigi Andrea Pascarelli - * @author Panagiotis Koutsourakis - */ -public class ArxivUtils { - - /** - * Default constructor - */ - private ArxivUtils() { } - - public static Record convertArxixDomToRecord(Element dataRoot) { - MutableRecord record = new SubmissionLookupPublication(""); - - String articleTitle = XMLUtils.getElementValue(dataRoot, "title"); - if (articleTitle != null) { - record.addValue("title", new StringValue(articleTitle)); - } - String summary = XMLUtils.getElementValue(dataRoot, "summary"); - if (summary != null) { - record.addValue("summary", new StringValue(summary)); - } - String year = XMLUtils.getElementValue(dataRoot, "published"); - if (year != null) { - record.addValue("published", new StringValue(year)); - } - String splashPageUrl = XMLUtils.getElementValue(dataRoot, "id"); - if (splashPageUrl != null) { - record.addValue("id", new StringValue(splashPageUrl)); - } - String comment = XMLUtils.getElementValue(dataRoot, "arxiv:comment"); - if (comment != null) { - record.addValue("comment", new StringValue(comment)); - } - - List links = XMLUtils.getElementList(dataRoot, "link"); - if (links != null) { - for (Element link : links) { - if ("related".equals(link.getAttribute("rel")) - && "pdf".equals(link.getAttribute("title"))) { - String pdfUrl = link.getAttribute("href"); - if (pdfUrl != null) { - record.addValue("pdfUrl", new StringValue(pdfUrl)); - } - } - } - } - - String doi = XMLUtils.getElementValue(dataRoot, "arxiv:doi"); - if (doi != null) { - record.addValue("doi", new StringValue(doi)); - } - String journalRef = XMLUtils.getElementValue(dataRoot, - "arxiv:journal_ref"); - if (journalRef != null) { - record.addValue("journalRef", new StringValue(journalRef)); - } - - List primaryCategory = new LinkedList(); - List primaryCategoryList = XMLUtils.getElementList(dataRoot, - "arxiv:primary_category"); - if (primaryCategoryList != null) { - for (Element primaryCategoryElement : primaryCategoryList) { - primaryCategory - .add(primaryCategoryElement.getAttribute("term")); - } - } - - if (primaryCategory.size() > 0) { - List values = new LinkedList(); - for (String s : primaryCategory) { - values.add(new StringValue(s)); - } - record.addField("primaryCategory", values); - } - - List category = new LinkedList(); - List categoryList = XMLUtils.getElementList(dataRoot, - "category"); - if (categoryList != null) { - for (Element categoryElement : categoryList) { - category.add(categoryElement.getAttribute("term")); - } - } - - if (category.size() > 0) { - List values = new LinkedList(); - for (String s : category) { - values.add(new StringValue(s)); - } - record.addField("category", values); - } - - List authors = new LinkedList(); - List authorsWithAffiliations = new LinkedList(); - List authorList = XMLUtils.getElementList(dataRoot, "author"); - if (authorList != null) { - for (Element authorElement : authorList) { - String authorName = XMLUtils.getElementValue(authorElement, "name"); - String authorAffiliation = XMLUtils.getElementValue(authorElement, "arxiv:affiliation"); - - authors.add(authorName); - authorsWithAffiliations.add(authorName + ": " + authorAffiliation); - } - } - - if (authors.size() > 0) { - List values = new LinkedList(); - for (String sArray : authors) { - values.add(new StringValue(sArray)); - } - record.addField("author", values); - } - - if (authorsWithAffiliations.size() > 0) { - List values = new LinkedList(); - for (String sArray : authorsWithAffiliations) { - values.add(new StringValue(sArray)); - } - record.addField("authorWithAffiliation", values); - } - - return record; - } - -} diff --git a/dspace-api/src/main/java/org/dspace/submit/lookup/CiNiiService.java b/dspace-api/src/main/java/org/dspace/submit/lookup/CiNiiService.java index 23026353fd..bb59043e52 100644 --- a/dspace-api/src/main/java/org/dspace/submit/lookup/CiNiiService.java +++ b/dspace-api/src/main/java/org/dspace/submit/lookup/CiNiiService.java @@ -102,6 +102,9 @@ public class CiNiiService { factory.setValidating(false); factory.setIgnoringComments(true); factory.setIgnoringElementContentWhitespace(true); + // disallow DTD parsing to ensure no XXE attacks can occur. + // See https://cheatsheetseries.owasp.org/cheatsheets/XML_External_Entity_Prevention_Cheat_Sheet.html + factory.setFeature("http://apache.org/xml/features/disallow-doctype-decl", true); DocumentBuilder db = factory.newDocumentBuilder(); Document inDoc = db.parse(response.getEntity().getContent()); @@ -178,6 +181,9 @@ public class CiNiiService { factory.setValidating(false); factory.setIgnoringComments(true); factory.setIgnoringElementContentWhitespace(true); + // disallow DTD parsing to ensure no XXE attacks can occur. + // See https://cheatsheetseries.owasp.org/cheatsheets/XML_External_Entity_Prevention_Cheat_Sheet.html + factory.setFeature("http://apache.org/xml/features/disallow-doctype-decl", true); DocumentBuilder db = factory.newDocumentBuilder(); Document inDoc = db.parse(response.getEntity().getContent()); diff --git a/dspace-api/src/main/java/org/dspace/submit/lookup/CrossRefService.java b/dspace-api/src/main/java/org/dspace/submit/lookup/CrossRefService.java index f73e9c0352..4b99cf1f8b 100644 --- a/dspace-api/src/main/java/org/dspace/submit/lookup/CrossRefService.java +++ b/dspace-api/src/main/java/org/dspace/submit/lookup/CrossRefService.java @@ -99,6 +99,9 @@ public class CrossRefService { factory.setValidating(false); factory.setIgnoringComments(true); factory.setIgnoringElementContentWhitespace(true); + // disallow DTD parsing to ensure no XXE attacks can occur. + // See https://cheatsheetseries.owasp.org/cheatsheets/XML_External_Entity_Prevention_Cheat_Sheet.html + factory.setFeature("http://apache.org/xml/features/disallow-doctype-decl", true); DocumentBuilder db = factory .newDocumentBuilder(); diff --git a/dspace-api/src/main/java/org/dspace/submit/lookup/PubmedFileDataLoader.java b/dspace-api/src/main/java/org/dspace/submit/lookup/PubmedFileDataLoader.java deleted file mode 100644 index 05a37e64d6..0000000000 --- a/dspace-api/src/main/java/org/dspace/submit/lookup/PubmedFileDataLoader.java +++ /dev/null @@ -1,148 +0,0 @@ -/** - * The contents of this file are subject to the license and copyright - * detailed in the LICENSE and NOTICE files at the root of the source - * tree and available online at - * - * http://www.dspace.org/license/ - */ - -package org.dspace.submit.lookup; - -import java.io.File; -import java.io.FileInputStream; -import java.io.FileNotFoundException; -import java.io.IOException; -import java.io.InputStream; -import java.util.List; -import java.util.Map; -import javax.xml.parsers.DocumentBuilder; -import javax.xml.parsers.DocumentBuilderFactory; -import javax.xml.parsers.ParserConfigurationException; - -import gr.ekt.bte.core.DataLoadingSpec; -import gr.ekt.bte.core.Record; -import gr.ekt.bte.core.RecordSet; -import gr.ekt.bte.core.Value; -import gr.ekt.bte.dataloader.FileDataLoader; -import gr.ekt.bte.exceptions.MalformedSourceException; -import org.apache.commons.lang3.StringUtils; -import org.dspace.app.util.XMLUtils; -import org.w3c.dom.Document; -import org.w3c.dom.Element; -import org.xml.sax.SAXException; - -/** - * @author Andrea Bollini - * @author Kostas Stamatis - * @author Luigi Andrea Pascarelli - * @author Panagiotis Koutsourakis - */ -public class PubmedFileDataLoader extends FileDataLoader { - - Map fieldMap; // mapping between service fields and local - // intermediate fields - - /** - * - */ - public PubmedFileDataLoader() { - } - - /** - * @param filename Name of file to load CiNii data from. - */ - public PubmedFileDataLoader(String filename) { - super(filename); - } - - /* - * {@see gr.ekt.bte.core.DataLoader#getRecords()} - * - * @throws MalformedSourceException - */ - @Override - public RecordSet getRecords() throws MalformedSourceException { - - RecordSet recordSet = new RecordSet(); - - try { - InputStream inputStream = new FileInputStream(new File(filename)); - - DocumentBuilderFactory factory = DocumentBuilderFactory - .newInstance(); - factory.setValidating(false); - factory.setIgnoringComments(true); - factory.setIgnoringElementContentWhitespace(true); - - DocumentBuilder builder = factory.newDocumentBuilder(); - Document inDoc = builder.parse(inputStream); - - Element xmlRoot = inDoc.getDocumentElement(); - List pubArticles = XMLUtils.getElementList(xmlRoot, - "PubmedArticle"); - - for (Element xmlArticle : pubArticles) { - Record record = null; - try { - record = PubmedUtils.convertPubmedDomToRecord(xmlArticle); - recordSet.addRecord(convertFields(record)); - } catch (Exception e) { - throw new RuntimeException(e.getMessage(), e); - } - } - } catch (FileNotFoundException e) { - e.printStackTrace(); - } catch (ParserConfigurationException e) { - e.printStackTrace(); - } catch (SAXException e) { - e.printStackTrace(); - } catch (IOException e) { - e.printStackTrace(); - } - - return recordSet; - - } - - /* - * (non-Javadoc) - * - * @see - * gr.ekt.bte.core.DataLoader#getRecords(gr.ekt.bte.core.DataLoadingSpec) - */ - @Override - public RecordSet getRecords(DataLoadingSpec spec) - throws MalformedSourceException { - if (spec.getOffset() > 0) { - return new RecordSet(); - } - return getRecords(); - } - - public Record convertFields(Record publication) { - for (String fieldName : fieldMap.keySet()) { - String md = null; - if (fieldMap != null) { - md = this.fieldMap.get(fieldName); - } - - if (StringUtils.isBlank(md)) { - continue; - } else { - md = md.trim(); - } - - if (publication.isMutable()) { - List values = publication.getValues(fieldName); - publication.makeMutable().removeField(fieldName); - publication.makeMutable().addField(md, values); - } - } - - return publication; - } - - public void setFieldMap(Map fieldMap) { - this.fieldMap = fieldMap; - } -} diff --git a/dspace-api/src/main/java/org/dspace/submit/lookup/PubmedOnlineDataLoader.java b/dspace-api/src/main/java/org/dspace/submit/lookup/PubmedOnlineDataLoader.java deleted file mode 100644 index 094ce4e21d..0000000000 --- a/dspace-api/src/main/java/org/dspace/submit/lookup/PubmedOnlineDataLoader.java +++ /dev/null @@ -1,116 +0,0 @@ -/** - * The contents of this file are subject to the license and copyright - * detailed in the LICENSE and NOTICE files at the root of the source - * tree and available online at - * - * http://www.dspace.org/license/ - */ -package org.dspace.submit.lookup; - -import java.io.IOException; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.List; -import java.util.Map; -import java.util.Set; - -import gr.ekt.bte.core.Record; -import org.apache.http.HttpException; -import org.apache.logging.log4j.Logger; -import org.dspace.core.Context; -import org.dspace.core.LogManager; - -/** - * @author Andrea Bollini - * @author Kostas Stamatis - * @author Luigi Andrea Pascarelli - * @author Panagiotis Koutsourakis - */ -public class PubmedOnlineDataLoader extends NetworkSubmissionLookupDataLoader { - protected boolean searchProvider = true; - - private static final Logger log = org.apache.logging.log4j.LogManager.getLogger(PubmedOnlineDataLoader.class); - - protected PubmedService pubmedService = new PubmedService(); - - public void setPubmedService(PubmedService pubmedService) { - this.pubmedService = pubmedService; - } - - @Override - public List getSupportedIdentifiers() { - return Arrays.asList(new String[] {PUBMED, DOI}); - } - - public void setSearchProvider(boolean searchProvider) { - this.searchProvider = searchProvider; - } - - @Override - public boolean isSearchProvider() { - return searchProvider; - } - - @Override - public List getByIdentifier(Context context, - Map> keys) throws HttpException, IOException { - Set pmids = keys != null ? keys.get(PUBMED) : null; - Set dois = keys != null ? keys.get(DOI) : null; - List results = new ArrayList(); - if (pmids != null && pmids.size() > 0 - && (dois == null || dois.size() == 0)) { - for (String pmid : pmids) { - Record p = null; - try { - p = pubmedService.getByPubmedID(pmid); - } catch (Exception e) { - log.error(LogManager.getHeader(context, "getByIdentifier", - "pmid=" + pmid), e); - } - if (p != null) { - results.add(convertFields(p)); - } - } - } else if (dois != null && dois.size() > 0 - && (pmids == null || pmids.size() == 0)) { - StringBuffer query = new StringBuffer(); - for (String d : dois) { - if (query.length() > 0) { - query.append(" OR "); - } - query.append(d).append("[AI]"); - } - - List pubmedResults = pubmedService.search(query.toString()); - for (Record p : pubmedResults) { - results.add(convertFields(p)); - } - } else if (dois != null && dois.size() > 0 && pmids != null - && pmids.size() > 0) { - // EKT:ToDo: support list of dois and pmids in the search method of - // pubmedService - List pubmedResults = pubmedService.search(dois.iterator() - .next(), pmids.iterator().next()); - if (pubmedResults != null) { - for (Record p : pubmedResults) { - results.add(convertFields(p)); - } - } - } - - return results; - } - - @Override - public List search(Context context, String title, String author, - int year) throws HttpException, IOException { - List pubmedResults = pubmedService.search(title, author, year); - List results = new ArrayList(); - if (pubmedResults != null) { - for (Record p : pubmedResults) { - results.add(convertFields(p)); - } - } - return results; - } -} diff --git a/dspace-api/src/main/java/org/dspace/submit/lookup/PubmedService.java b/dspace-api/src/main/java/org/dspace/submit/lookup/PubmedService.java deleted file mode 100644 index fa30ee8ea5..0000000000 --- a/dspace-api/src/main/java/org/dspace/submit/lookup/PubmedService.java +++ /dev/null @@ -1,265 +0,0 @@ -/** - * The contents of this file are subject to the license and copyright - * detailed in the LICENSE and NOTICE files at the root of the source - * tree and available online at - * - * http://www.dspace.org/license/ - */ -package org.dspace.submit.lookup; - -import java.io.File; -import java.io.FileInputStream; -import java.io.IOException; -import java.io.InputStream; -import java.net.URISyntaxException; -import java.util.ArrayList; -import java.util.List; -import javax.xml.parsers.DocumentBuilder; -import javax.xml.parsers.DocumentBuilderFactory; -import javax.xml.parsers.ParserConfigurationException; - -import gr.ekt.bte.core.Record; -import org.apache.commons.lang3.StringUtils; -import org.apache.http.HttpException; -import org.apache.http.HttpResponse; -import org.apache.http.HttpStatus; -import org.apache.http.StatusLine; -import org.apache.http.client.HttpClient; -import org.apache.http.client.methods.HttpGet; -import org.apache.http.client.utils.URIBuilder; -import org.apache.http.impl.client.DefaultHttpClient; -import org.apache.http.params.CoreConnectionPNames; -import org.apache.logging.log4j.Logger; -import org.dspace.app.util.XMLUtils; -import org.dspace.core.ConfigurationManager; -import org.w3c.dom.Document; -import org.w3c.dom.Element; -import org.xml.sax.SAXException; - -/** - * @author Andrea Bollini - * @author Kostas Stamatis - * @author Luigi Andrea Pascarelli - * @author Panagiotis Koutsourakis - */ -public class PubmedService { - - private static final Logger log = org.apache.logging.log4j.LogManager.getLogger(PubmedService.class); - - protected int timeout = 1000; - - public void setTimeout(int timeout) { - this.timeout = timeout; - } - - public Record getByPubmedID(String pubmedid) throws HttpException, - IOException, ParserConfigurationException, SAXException { - List ids = new ArrayList(); - ids.add(pubmedid.trim()); - List items = getByPubmedIDs(ids); - if (items != null && items.size() > 0) { - return items.get(0); - } - return null; - } - - public List search(String title, String author, int year) - throws HttpException, IOException { - StringBuffer query = new StringBuffer(); - if (StringUtils.isNotBlank(title)) { - query.append("((").append(title).append("[TI]) OR ("); - // [TI] does not always work, book chapter title - query.append("(").append(title).append("[book]))"); - } - if (StringUtils.isNotBlank(author)) { - // [FAU] - if (query.length() > 0) { - query.append(" AND "); - } - query.append("(").append(author).append("[AU])"); - } - if (year != -1) { - // [DP] - if (query.length() > 0) { - query.append(" AND "); - } - query.append(year).append("[DP]"); - } - return search(query.toString()); - } - - public List search(String query) throws IOException, HttpException { - List results = new ArrayList<>(); - if (!ConfigurationManager.getBooleanProperty(SubmissionLookupService.CFG_MODULE, "remoteservice.demo")) { - HttpGet method = null; - try { - HttpClient client = new DefaultHttpClient(); - client.getParams().setIntParameter(CoreConnectionPNames.CONNECTION_TIMEOUT, timeout); - - URIBuilder uriBuilder = new URIBuilder( - "https://eutils.ncbi.nlm.nih.gov/entrez/eutils/esearch.fcgi"); - uriBuilder.addParameter("db", "pubmed"); - uriBuilder.addParameter("datetype", "edat"); - uriBuilder.addParameter("retmax", "10"); - uriBuilder.addParameter("term", query); - method = new HttpGet(uriBuilder.build()); - - // Execute the method. - HttpResponse response = client.execute(method); - StatusLine statusLine = response.getStatusLine(); - int statusCode = statusLine.getStatusCode(); - - if (statusCode != HttpStatus.SC_OK) { - throw new RuntimeException("WS call failed: " - + statusLine); - } - - DocumentBuilderFactory factory = DocumentBuilderFactory - .newInstance(); - factory.setValidating(false); - factory.setIgnoringComments(true); - factory.setIgnoringElementContentWhitespace(true); - - DocumentBuilder builder; - try { - builder = factory.newDocumentBuilder(); - - Document inDoc = builder.parse(response.getEntity().getContent()); - - Element xmlRoot = inDoc.getDocumentElement(); - Element idList = XMLUtils.getSingleElement(xmlRoot, - "IdList"); - List pubmedIDs = XMLUtils.getElementValueList( - idList, "Id"); - results = getByPubmedIDs(pubmedIDs); - } catch (ParserConfigurationException e1) { - log.error(e1.getMessage(), e1); - } catch (SAXException e1) { - log.error(e1.getMessage(), e1); - } - } catch (Exception e1) { - log.error(e1.getMessage(), e1); - } finally { - if (method != null) { - method.releaseConnection(); - } - } - } else { - InputStream stream = null; - try { - File file = new File( - ConfigurationManager.getProperty("dspace.dir") - + "/config/crosswalks/demo/pubmed-search.xml"); - stream = new FileInputStream(file); - DocumentBuilderFactory factory = DocumentBuilderFactory - .newInstance(); - factory.setValidating(false); - factory.setIgnoringComments(true); - factory.setIgnoringElementContentWhitespace(true); - - DocumentBuilder builder = factory.newDocumentBuilder(); - Document inDoc = builder.parse(stream); - - Element xmlRoot = inDoc.getDocumentElement(); - Element idList = XMLUtils.getSingleElement(xmlRoot, "IdList"); - List pubmedIDs = XMLUtils.getElementValueList(idList, - "Id"); - results = getByPubmedIDs(pubmedIDs); - } catch (Exception e) { - throw new RuntimeException(e.getMessage(), e); - } finally { - if (stream != null) { - try { - stream.close(); - } catch (IOException e) { - e.printStackTrace(); - } - } - } - } - return results; - } - - public List getByPubmedIDs(List pubmedIDs) - throws HttpException, IOException, ParserConfigurationException, - SAXException { - List results = new ArrayList(); - HttpGet method = null; - try { - HttpClient client = new DefaultHttpClient(); - client.getParams().setIntParameter(CoreConnectionPNames.CONNECTION_TIMEOUT, 5 * timeout); - - try { - URIBuilder uriBuilder = new URIBuilder( - "https://eutils.ncbi.nlm.nih.gov/entrez/eutils/efetch.fcgi"); - uriBuilder.addParameter("db", "pubmed"); - uriBuilder.addParameter("retmode", "xml"); - uriBuilder.addParameter("rettype", "full"); - uriBuilder.addParameter("id", StringUtils.join( - pubmedIDs.iterator(), ",")); - method = new HttpGet(uriBuilder.build()); - } catch (URISyntaxException ex) { - throw new RuntimeException("Request not sent", ex); - } - - // Execute the method. - HttpResponse response = client.execute(method); - StatusLine statusLine = response.getStatusLine(); - int statusCode = statusLine.getStatusCode(); - - if (statusCode != HttpStatus.SC_OK) { - throw new RuntimeException("WS call failed: " + statusLine); - } - - DocumentBuilderFactory factory = DocumentBuilderFactory - .newInstance(); - factory.setValidating(false); - factory.setIgnoringComments(true); - factory.setIgnoringElementContentWhitespace(true); - - DocumentBuilder builder = factory.newDocumentBuilder(); - Document inDoc = builder - .parse(response.getEntity().getContent()); - - Element xmlRoot = inDoc.getDocumentElement(); - List pubArticles = XMLUtils.getElementList(xmlRoot, - "PubmedArticle"); - - for (Element xmlArticle : pubArticles) { - Record pubmedItem = null; - try { - pubmedItem = PubmedUtils - .convertPubmedDomToRecord(xmlArticle); - results.add(pubmedItem); - } catch (Exception e) { - throw new RuntimeException( - "PubmedID is not valid or not exist: " - + e.getMessage(), e); - } - } - - return results; - } finally { - if (method != null) { - method.releaseConnection(); - } - } - } - - public List search(String doi, String pmid) throws HttpException, - IOException { - StringBuffer query = new StringBuffer(); - if (StringUtils.isNotBlank(doi)) { - query.append(doi); - query.append("[AID]"); - } - if (StringUtils.isNotBlank(pmid)) { - // [FAU] - if (query.length() > 0) { - query.append(" OR "); - } - query.append(pmid).append("[PMID]"); - } - return search(query.toString()); - } -} diff --git a/dspace-api/src/main/java/org/dspace/submit/lookup/PubmedUtils.java b/dspace-api/src/main/java/org/dspace/submit/lookup/PubmedUtils.java deleted file mode 100644 index bca34de295..0000000000 --- a/dspace-api/src/main/java/org/dspace/submit/lookup/PubmedUtils.java +++ /dev/null @@ -1,316 +0,0 @@ -/** - * The contents of this file are subject to the license and copyright - * detailed in the LICENSE and NOTICE files at the root of the source - * tree and available online at - * - * http://www.dspace.org/license/ - */ -/** - * - */ -package org.dspace.submit.lookup; - -import java.util.HashMap; -import java.util.LinkedList; -import java.util.List; -import java.util.Map; - -import gr.ekt.bte.core.MutableRecord; -import gr.ekt.bte.core.Record; -import gr.ekt.bte.core.StringValue; -import gr.ekt.bte.core.Value; -import org.apache.commons.lang3.StringUtils; -import org.dspace.app.util.XMLUtils; -import org.dspace.submit.util.SubmissionLookupPublication; -import org.w3c.dom.Element; - -/** - * @author Andrea Bollini - * @author Kostas Stamatis - * @author Luigi Andrea Pascarelli - * @author Panagiotis Koutsourakis - */ -public class PubmedUtils { - - /** - * Default constructor - */ - private PubmedUtils() { } - - public static Record convertPubmedDomToRecord(Element pubArticle) { - MutableRecord record = new SubmissionLookupPublication(""); - - Map monthToNum = new HashMap(); - monthToNum.put("Jan", "01"); - monthToNum.put("Feb", "02"); - monthToNum.put("Mar", "03"); - monthToNum.put("Apr", "04"); - monthToNum.put("May", "05"); - monthToNum.put("Jun", "06"); - monthToNum.put("Jul", "07"); - monthToNum.put("Aug", "08"); - monthToNum.put("Sep", "09"); - monthToNum.put("Oct", "10"); - monthToNum.put("Nov", "11"); - monthToNum.put("Dec", "12"); - - Element medline = XMLUtils.getSingleElement(pubArticle, - "MedlineCitation"); - - Element article = XMLUtils.getSingleElement(medline, "Article"); - Element pubmed = XMLUtils.getSingleElement(pubArticle, "PubmedData"); - - Element identifierList = XMLUtils.getSingleElement(pubmed, - "ArticleIdList"); - if (identifierList != null) { - List identifiers = XMLUtils.getElementList(identifierList, - "ArticleId"); - if (identifiers != null) { - for (Element id : identifiers) { - if ("pubmed".equals(id.getAttribute("IdType"))) { - String pubmedID = id.getTextContent().trim(); - if (pubmedID != null) { - record.addValue("pubmedID", new StringValue( - pubmedID)); - } - } else if ("doi".equals(id.getAttribute("IdType"))) { - String doi = id.getTextContent().trim(); - if (doi != null) { - record.addValue("doi", new StringValue(doi)); - } - } - } - } - } - - String status = XMLUtils.getElementValue(pubmed, "PublicationStatus"); - if (status != null) { - record.addValue("publicationStatus", new StringValue(status)); - } - - String pubblicationModel = XMLUtils.getElementAttribute(medline, - "Article", "PubModel"); - if (pubblicationModel != null) { - record.addValue("pubModel", new StringValue( - pubblicationModel)); - } - - String title = XMLUtils.getElementValue(article, "ArticleTitle"); - if (title != null) { - record.addValue("articleTitle", new StringValue(title)); - } - - Element abstractElement = XMLUtils - .getSingleElement(article, "Abstract"); - if (abstractElement == null) { - abstractElement = XMLUtils.getSingleElement(medline, - "OtherAbstract"); - } - if (abstractElement != null) { - String summary = XMLUtils.getElementValue(abstractElement, - "AbstractText"); - if (summary != null) { - record.addValue("abstractText", new StringValue(summary)); - } - } - - List authors = new LinkedList(); - Element authorList = XMLUtils.getSingleElement(article, "AuthorList"); - if (authorList != null) { - List authorsElement = XMLUtils.getElementList(authorList, - "Author"); - if (authorsElement != null) { - for (Element author : authorsElement) { - if (StringUtils.isBlank(XMLUtils.getElementValue(author, - "CollectiveName"))) { - authors.add(new String[] { - XMLUtils.getElementValue(author, "ForeName"), - XMLUtils.getElementValue(author, "LastName")}); - } - } - } - } - if (authors.size() > 0) { - List values = new LinkedList(); - for (String[] sArray : authors) { - values.add(new StringValue(sArray[1] + ", " + sArray[0])); - } - record.addField("author", values); - } - - Element journal = XMLUtils.getSingleElement(article, "Journal"); - if (journal != null) { - List jnumbers = XMLUtils.getElementList(journal, "ISSN"); - if (jnumbers != null) { - for (Element jnumber : jnumbers) { - if ("Print".equals(jnumber.getAttribute("IssnType"))) { - String issn = jnumber.getTextContent().trim(); - if (issn != null) { - record.addValue("printISSN", new StringValue(issn)); - } - } else { - String eissn = jnumber.getTextContent().trim(); - if (eissn != null) { - record.addValue("electronicISSN", new StringValue(eissn)); - } - } - } - } - - String journalTitle = XMLUtils.getElementValue(journal, "Title"); - if (journalTitle != null) { - record.addValue("journalTitle", new StringValue(journalTitle)); - } - - Element journalIssueElement = XMLUtils.getSingleElement(journal, - "JournalIssue"); - if (journalIssueElement != null) { - String volume = XMLUtils.getElementValue(journalIssueElement, - "Volume"); - if (volume != null) { - record.addValue("journalVolume", new StringValue(volume)); - } - - String issue = XMLUtils.getElementValue(journalIssueElement, - "Issue"); - if (issue != null) { - record.addValue("journalIssue", new StringValue(issue)); - } - - Element pubDateElement = XMLUtils.getSingleElement( - journalIssueElement, "PubDate"); - - String pubDate = null; - if (pubDateElement != null) { - pubDate = XMLUtils.getElementValue(pubDateElement, "Year"); - - String mounth = XMLUtils.getElementValue(pubDateElement, - "Month"); - String day = XMLUtils - .getElementValue(pubDateElement, "Day"); - if (StringUtils.isNotBlank(mounth) - && monthToNum.containsKey(mounth)) { - pubDate += "-" + monthToNum.get(mounth); - if (StringUtils.isNotBlank(day)) { - pubDate += "-" + (day.length() == 1 ? "0" + day : day); - } - } - } - if (pubDate == null) { - pubDate = XMLUtils.getElementValue(pubDateElement, "MedlineDate"); - } - if (pubDate != null) { - record.addValue("pubDate", new StringValue(pubDate)); - } - } - - String language = XMLUtils.getElementValue(article, "Language"); - if (language != null) { - record.addValue("language", new StringValue(language)); - } - - List type = new LinkedList(); - Element publicationTypeList = XMLUtils.getSingleElement(article, - "PublicationTypeList"); - if (publicationTypeList != null) { - List publicationTypes = XMLUtils.getElementList( - publicationTypeList, "PublicationType"); - for (Element publicationType : publicationTypes) { - type.add(publicationType.getTextContent().trim()); - } - } - if (type.size() > 0) { - List values = new LinkedList(); - for (String s : type) { - values.add(new StringValue(s)); - } - record.addField("publicationType", values); - } - - List primaryKeywords = new LinkedList(); - List secondaryKeywords = new LinkedList(); - Element keywordsList = XMLUtils.getSingleElement(medline, - "KeywordList"); - if (keywordsList != null) { - List keywords = XMLUtils.getElementList(keywordsList, - "Keyword"); - for (Element keyword : keywords) { - if ("Y".equals(keyword.getAttribute("MajorTopicYN"))) { - primaryKeywords.add(keyword.getTextContent().trim()); - } else { - secondaryKeywords.add(keyword.getTextContent().trim()); - } - } - } - if (primaryKeywords.size() > 0) { - List values = new LinkedList(); - for (String s : primaryKeywords) { - values.add(new StringValue(s)); - } - record.addField("primaryKeyword", values); - } - if (secondaryKeywords.size() > 0) { - List values = new LinkedList(); - for (String s : secondaryKeywords) { - values.add(new StringValue(s)); - } - record.addField("secondaryKeyword", values); - } - - List primaryMeshHeadings = new LinkedList(); - List secondaryMeshHeadings = new LinkedList(); - Element meshHeadingsList = XMLUtils.getSingleElement(medline, - "MeshHeadingList"); - if (meshHeadingsList != null) { - List meshHeadings = XMLUtils.getElementList( - meshHeadingsList, "MeshHeading"); - for (Element meshHeading : meshHeadings) { - if ("Y".equals(XMLUtils.getElementAttribute(meshHeading, - "DescriptorName", "MajorTopicYN"))) { - primaryMeshHeadings.add(XMLUtils.getElementValue( - meshHeading, "DescriptorName")); - } else { - secondaryMeshHeadings.add(XMLUtils.getElementValue( - meshHeading, "DescriptorName")); - } - } - } - if (primaryMeshHeadings.size() > 0) { - List values = new LinkedList(); - for (String s : primaryMeshHeadings) { - values.add(new StringValue(s)); - } - record.addField("primaryMeshHeading", values); - } - if (secondaryMeshHeadings.size() > 0) { - List values = new LinkedList(); - for (String s : secondaryMeshHeadings) { - values.add(new StringValue(s)); - } - record.addField("secondaryMeshHeading", values); - } - - Element paginationElement = XMLUtils.getSingleElement(article, - "Pagination"); - if (paginationElement != null) { - String startPage = XMLUtils.getElementValue(paginationElement, - "StartPage"); - String endPage = XMLUtils.getElementValue(paginationElement, - "EndPage"); - if (StringUtils.isBlank(startPage)) { - startPage = XMLUtils.getElementValue(paginationElement, - "MedlinePgn"); - } - - if (startPage != null) { - record.addValue("startPage", new StringValue(startPage)); - } - if (endPage != null) { - record.addValue("endPage", new StringValue(endPage)); - } - } - } - - return record; - } -} diff --git a/dspace-api/src/main/java/org/dspace/versioning/Version.java b/dspace-api/src/main/java/org/dspace/versioning/Version.java index a926fba0f8..2d4d359545 100644 --- a/dspace-api/src/main/java/org/dspace/versioning/Version.java +++ b/dspace-api/src/main/java/org/dspace/versioning/Version.java @@ -135,12 +135,12 @@ public class Version implements ReloadableEntity { return true; } Class objClass = HibernateProxyHelper.getClassWithoutInitializingProxy(o); - if (getClass() != objClass) { + if (!getClass().equals(objClass)) { return false; } final Version that = (Version) o; - if (this.getID() != that.getID()) { + if (!this.getID().equals(that.getID())) { return false; } diff --git a/dspace-api/src/main/java/org/dspace/versioning/VersionHistory.java b/dspace-api/src/main/java/org/dspace/versioning/VersionHistory.java index 0f5b9384bd..1acacc7838 100644 --- a/dspace-api/src/main/java/org/dspace/versioning/VersionHistory.java +++ b/dspace-api/src/main/java/org/dspace/versioning/VersionHistory.java @@ -93,12 +93,12 @@ public class VersionHistory implements ReloadableEntity { return true; } Class objClass = HibernateProxyHelper.getClassWithoutInitializingProxy(o); - if (getClass() != objClass) { + if (!getClass().equals(objClass)) { return false; } final VersionHistory that = (VersionHistory) o; - if (this.getID() != that.getID()) { + if (!this.getID().equals(that.getID())) { return false; } diff --git a/dspace-api/src/main/java/org/dspace/workflow/WorkflowService.java b/dspace-api/src/main/java/org/dspace/workflow/WorkflowService.java index 53da1660db..ced074d71d 100644 --- a/dspace-api/src/main/java/org/dspace/workflow/WorkflowService.java +++ b/dspace-api/src/main/java/org/dspace/workflow/WorkflowService.java @@ -80,6 +80,20 @@ public interface WorkflowService { */ public WorkspaceItem abort(Context c, T wi, EPerson e) throws SQLException, AuthorizeException, IOException; + /** + * Deletes workflow task item in correct order. + * + * @param c The relevant DSpace Context. + * @param wi The WorkflowItem that shall be deleted. + * @param e Admin that deletes this workflow task and item (for logging + * @throws SQLException An exception that provides information on a database access error or other errors. + * @throws AuthorizeException Exception indicating the current user of the context does not have permission + * to perform a particular action. + * @throws IOException A general class of exceptions produced by failed or interrupted I/O operations. + */ + public void deleteWorkflowByWorkflowItem(Context c, T wi, EPerson e) + throws SQLException, AuthorizeException, IOException; + public WorkspaceItem sendWorkflowItemBackSubmission(Context c, T workflowItem, EPerson e, String provenance, String rejection_message) throws SQLException, AuthorizeException, IOException; @@ -94,5 +108,19 @@ public interface WorkflowService { public Group getWorkflowRoleGroup(Context context, Collection collection, String roleName, Group roleGroup) throws SQLException, IOException, WorkflowConfigurationException, AuthorizeException, WorkflowException; + /** + * This method will create the workflowRoleGroup for a collection and the given rolename + * @param context The relevant DSpace context + * @param collection The collection + * @param roleName The rolename + * @return The created Group + * @throws AuthorizeException If something goes wrong + * @throws SQLException If something goes wrong + * @throws IOException If something goes wrong + * @throws WorkflowConfigurationException If something goes wrong + */ + public Group createWorkflowRoleGroup(Context context, Collection collection, String roleName) + throws AuthorizeException, SQLException, IOException, WorkflowConfigurationException; + public List getFlywayMigrationLocations(); } diff --git a/dspace-api/src/main/java/org/dspace/workflowbasic/BasicWorkflowServiceImpl.java b/dspace-api/src/main/java/org/dspace/workflowbasic/BasicWorkflowServiceImpl.java index 58f393804a..f97e5d9e4a 100644 --- a/dspace-api/src/main/java/org/dspace/workflowbasic/BasicWorkflowServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/workflowbasic/BasicWorkflowServiceImpl.java @@ -798,33 +798,37 @@ public class BasicWorkflowServiceImpl implements BasicWorkflowService { try { // Get submitter EPerson ep = item.getSubmitter(); - // Get the Locale - Locale supportedLocale = I18nUtil.getEPersonLocale(ep); - Email email = Email.getEmail(I18nUtil.getEmailFilename(supportedLocale, "submit_archive")); - // Get the item handle to email to user - String handle = handleService.findHandle(context, item); + // send the notification to the submitter unless the submitter eperson has been deleted + if (ep != null) { + // Get the Locale + Locale supportedLocale = I18nUtil.getEPersonLocale(ep); + Email email = Email.getEmail(I18nUtil.getEmailFilename(supportedLocale, "submit_archive")); - // Get title - String title = item.getName(); - if (StringUtils.isBlank(title)) { - try { - title = I18nUtil.getMessage("org.dspace.workflow.WorkflowManager.untitled"); - } catch (MissingResourceException e) { - title = "Untitled"; + // Get the item handle to email to user + String handle = handleService.findHandle(context, item); + + // Get title + String title = item.getName(); + if (StringUtils.isBlank(title)) { + try { + title = I18nUtil.getMessage("org.dspace.workflow.WorkflowManager.untitled"); + } catch (MissingResourceException e) { + title = "Untitled"; + } } + + email.addRecipient(ep.getEmail()); + email.addArgument(title); + email.addArgument(coll.getName()); + email.addArgument(handleService.getCanonicalForm(handle)); + + email.send(); } - - email.addRecipient(ep.getEmail()); - email.addArgument(title); - email.addArgument(coll.getName()); - email.addArgument(handleService.getCanonicalForm(handle)); - - email.send(); } catch (MessagingException e) { log.warn(LogManager.getHeader(context, "notifyOfArchive", - "cannot email user; item_id=" + item.getID() - + ": " + e.getMessage())); + "cannot email user; item_id=" + item.getID() + + ": " + e.getMessage())); } } @@ -866,6 +870,22 @@ public class BasicWorkflowServiceImpl implements BasicWorkflowService { return workspaceItem; } + @Override + public void deleteWorkflowByWorkflowItem(Context context, BasicWorkflowItem wi, EPerson e) + throws SQLException, AuthorizeException, IOException { + Item myitem = wi.getItem(); + UUID itemID = myitem.getID(); + Integer workflowID = wi.getID(); + UUID collID = wi.getCollection().getID(); + // stop workflow + taskListItemService.deleteByWorkflowItem(context, wi); + // Now remove the workflow object manually from the database + workflowItemService.deleteWrapper(context, wi); + // Now delete the item + itemService.delete(context, myitem); + log.info(LogManager.getHeader(context, "delete_workflow", String.format("workflow_item_id=%s " + + "item_id=%s collection_id=%s eperson_id=%s", workflowID, itemID, collID, e.getID()))); + } @Override public WorkspaceItem sendWorkflowItemBackSubmission(Context context, BasicWorkflowItem workflowItem, @@ -1047,25 +1067,30 @@ public class BasicWorkflowServiceImpl implements BasicWorkflowService { protected void notifyOfReject(Context context, BasicWorkflowItem workflowItem, EPerson e, String reason) { try { - // Get the item title - String title = getItemTitle(workflowItem); + // Get submitter + EPerson ep = workflowItem.getSubmitter(); + // send the notification only if the person was not deleted in the meantime + if (ep != null) { + // Get the item title + String title = getItemTitle(workflowItem); - // Get the collection - Collection coll = workflowItem.getCollection(); + // Get the collection + Collection coll = workflowItem.getCollection(); - // Get rejector's name - String rejector = getEPersonName(e); - Locale supportedLocale = I18nUtil.getEPersonLocale(e); - Email email = Email.getEmail(I18nUtil.getEmailFilename(supportedLocale, "submit_reject")); + // Get rejector's name + String rejector = getEPersonName(e); + Locale supportedLocale = I18nUtil.getEPersonLocale(e); + Email email = Email.getEmail(I18nUtil.getEmailFilename(supportedLocale, "submit_reject")); - email.addRecipient(workflowItem.getSubmitter().getEmail()); - email.addArgument(title); - email.addArgument(coll.getName()); - email.addArgument(rejector); - email.addArgument(reason); - email.addArgument(getMyDSpaceLink()); + email.addRecipient(ep.getEmail()); + email.addArgument(title); + email.addArgument(coll.getName()); + email.addArgument(rejector); + email.addArgument(reason); + email.addArgument(getMyDSpaceLink()); - email.send(); + email.send(); + } } catch (RuntimeException re) { // log this email error log.warn(LogManager.getHeader(context, "notify_of_reject", @@ -1101,7 +1126,9 @@ public class BasicWorkflowServiceImpl implements BasicWorkflowService { @Override public String getSubmitterName(BasicWorkflowItem wi) throws SQLException { EPerson e = wi.getSubmitter(); - + if (e == null) { + return null; + } return getEPersonName(e); } @@ -1216,6 +1243,12 @@ public class BasicWorkflowServiceImpl implements BasicWorkflowService { return roleGroup; } + @Override + public Group createWorkflowRoleGroup(Context context, Collection collection, String roleName) + throws AuthorizeException, SQLException { + return getWorkflowRoleGroup(context, collection, roleName, null); + } + @Override public List getFlywayMigrationLocations() { return Collections.emptyList(); diff --git a/dspace-api/src/main/java/org/dspace/workflowbasic/TaskListItemServiceImpl.java b/dspace-api/src/main/java/org/dspace/workflowbasic/TaskListItemServiceImpl.java index d064600191..ba55ca9460 100644 --- a/dspace-api/src/main/java/org/dspace/workflowbasic/TaskListItemServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/workflowbasic/TaskListItemServiceImpl.java @@ -46,6 +46,17 @@ public class TaskListItemServiceImpl implements TaskListItemService { taskListItemDAO.deleteByWorkflowItem(context, workflowItem); } + @Override + public void deleteByWorkflowItemAndEPerson(Context context, BasicWorkflowItem workflowItem, EPerson ePerson) + throws SQLException { + taskListItemDAO.deleteByWorkflowItemAndEPerson(context, workflowItem, ePerson); + } + + @Override + public void deleteByEPerson(Context context, EPerson ePerson) throws SQLException { + taskListItemDAO.deleteByEPerson(context, ePerson); + } + @Override public void update(Context context, TaskListItem taskListItem) throws SQLException { taskListItemDAO.save(context, taskListItem); diff --git a/dspace-api/src/main/java/org/dspace/workflowbasic/dao/TaskListItemDAO.java b/dspace-api/src/main/java/org/dspace/workflowbasic/dao/TaskListItemDAO.java index b09cac72e0..5cdf3e0611 100644 --- a/dspace-api/src/main/java/org/dspace/workflowbasic/dao/TaskListItemDAO.java +++ b/dspace-api/src/main/java/org/dspace/workflowbasic/dao/TaskListItemDAO.java @@ -28,5 +28,10 @@ public interface TaskListItemDAO extends GenericDAO { public void deleteByWorkflowItem(Context context, BasicWorkflowItem workflowItem) throws SQLException; + public void deleteByWorkflowItemAndEPerson(Context context, BasicWorkflowItem workflowItem, EPerson ePerson) + throws SQLException; + + public void deleteByEPerson(Context context, EPerson ePerson) throws SQLException; + public List findByEPerson(Context context, EPerson ePerson) throws SQLException; } diff --git a/dspace-api/src/main/java/org/dspace/workflowbasic/dao/impl/TaskListItemDAOImpl.java b/dspace-api/src/main/java/org/dspace/workflowbasic/dao/impl/TaskListItemDAOImpl.java index ec92faec03..2f6448fb4e 100644 --- a/dspace-api/src/main/java/org/dspace/workflowbasic/dao/impl/TaskListItemDAOImpl.java +++ b/dspace-api/src/main/java/org/dspace/workflowbasic/dao/impl/TaskListItemDAOImpl.java @@ -42,6 +42,24 @@ public class TaskListItemDAOImpl extends AbstractHibernateDAO impl query.executeUpdate(); } + @Override + public void deleteByWorkflowItemAndEPerson(Context context, BasicWorkflowItem workflowItem, EPerson ePerson) + throws SQLException { + String queryString = "delete from TaskListItem where workflowItem = :workflowItem AND ePerson = :ePerson"; + Query query = createQuery(context, queryString); + query.setParameter("workflowItem", workflowItem); + query.setParameter("ePerson", ePerson); + query.executeUpdate(); + } + + @Override + public void deleteByEPerson(Context context, EPerson ePerson) throws SQLException { + String queryString = "delete from TaskListItem where ePerson = :ePerson"; + Query query = createQuery(context, queryString); + query.setParameter("ePerson", ePerson); + query.executeUpdate(); + } + @Override public List findByEPerson(Context context, EPerson ePerson) throws SQLException { CriteriaBuilder criteriaBuilder = getCriteriaBuilder(context); diff --git a/dspace-api/src/main/java/org/dspace/workflowbasic/service/TaskListItemService.java b/dspace-api/src/main/java/org/dspace/workflowbasic/service/TaskListItemService.java index 4ce605f87f..3a8aac65fe 100644 --- a/dspace-api/src/main/java/org/dspace/workflowbasic/service/TaskListItemService.java +++ b/dspace-api/src/main/java/org/dspace/workflowbasic/service/TaskListItemService.java @@ -28,6 +28,11 @@ public interface TaskListItemService { public void deleteByWorkflowItem(Context context, BasicWorkflowItem workflowItem) throws SQLException; + public void deleteByWorkflowItemAndEPerson(Context context, BasicWorkflowItem workflowItem, EPerson ePerson) + throws SQLException; + + public void deleteByEPerson(Context context, EPerson ePerson) throws SQLException; + public void update(Context context, TaskListItem taskListItem) throws SQLException; public List findByEPerson(Context context, EPerson ePerson) throws SQLException; diff --git a/dspace-api/src/main/java/org/dspace/xmlworkflow/XmlWorkflowFactoryImpl.java b/dspace-api/src/main/java/org/dspace/xmlworkflow/XmlWorkflowFactoryImpl.java index ffc62dcddb..4150d84d04 100644 --- a/dspace-api/src/main/java/org/dspace/xmlworkflow/XmlWorkflowFactoryImpl.java +++ b/dspace-api/src/main/java/org/dspace/xmlworkflow/XmlWorkflowFactoryImpl.java @@ -97,7 +97,7 @@ public class XmlWorkflowFactoryImpl implements XmlWorkflowFactory { } @Override - public List getCollectionHandlesMappedToWorklow(Context context, String workflowName) { + public List getCollectionHandlesMappedToWorkflow(Context context, String workflowName) { List collectionsMapped = new ArrayList<>(); for (String handle : this.workflowMapping.keySet()) { if (this.workflowMapping.get(handle).getID().equals(workflowName)) { @@ -107,7 +107,7 @@ public class XmlWorkflowFactoryImpl implements XmlWorkflowFactory { collectionsMapped.add(collection); } } catch (SQLException e) { - log.error("SQLException in XmlWorkflowFactoryImpl.getCollectionHandlesMappedToWorklow trying to " + + log.error("SQLException in XmlWorkflowFactoryImpl.getCollectionHandlesMappedToWorkflow trying to " + "retrieve collection with handle: " + handle, e); } } diff --git a/dspace-api/src/main/java/org/dspace/xmlworkflow/XmlWorkflowServiceImpl.java b/dspace-api/src/main/java/org/dspace/xmlworkflow/XmlWorkflowServiceImpl.java index c26864417d..285a219cfc 100644 --- a/dspace-api/src/main/java/org/dspace/xmlworkflow/XmlWorkflowServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/xmlworkflow/XmlWorkflowServiceImpl.java @@ -157,22 +157,6 @@ public class XmlWorkflowServiceImpl implements XmlWorkflowService { Role role = WorkflowUtils.getCollectionAndRepositoryRoles(collection).get(roleName); if (role.getScope() == Role.Scope.COLLECTION || role.getScope() == Role.Scope.REPOSITORY) { roleGroup = WorkflowUtils.getRoleGroup(context, collection, role); - if (roleGroup == null) { - authorizeService.authorizeAction(context, collection, Constants.WRITE); - roleGroup = groupService.create(context); - if (role.getScope() == Role.Scope.COLLECTION) { - groupService.setName(roleGroup, - "COLLECTION_" + collection.getID().toString() - + "_WORKFLOW_ROLE_" + roleName); - } else { - groupService.setName(roleGroup, role.getName()); - } - groupService.update(context, roleGroup); - authorizeService.addPolicy(context, collection, Constants.ADD, roleGroup); - if (role.getScope() == Role.Scope.COLLECTION) { - WorkflowUtils.createCollectionWorkflowRole(context, collection, roleName, roleGroup); - } - } } return roleGroup; } catch (WorkflowConfigurationException e) { @@ -180,9 +164,31 @@ public class XmlWorkflowServiceImpl implements XmlWorkflowService { } } + @Override + public Group createWorkflowRoleGroup(Context context, Collection collection, String roleName) + throws AuthorizeException, SQLException, IOException, WorkflowConfigurationException { + Group roleGroup; + authorizeService.authorizeAction(context, collection, Constants.WRITE); + roleGroup = groupService.create(context); + Role role = WorkflowUtils.getCollectionAndRepositoryRoles(collection).get(roleName); + if (role.getScope() == Role.Scope.COLLECTION) { + groupService.setName(roleGroup, + "COLLECTION_" + collection.getID().toString() + + "_WORKFLOW_ROLE_" + roleName); + } else { + groupService.setName(roleGroup, role.getName()); + } + groupService.update(context, roleGroup); + authorizeService.addPolicy(context, collection, Constants.ADD, roleGroup); + if (role.getScope() == Role.Scope.COLLECTION) { + WorkflowUtils.createCollectionWorkflowRole(context, collection, roleName, roleGroup); + } + return roleGroup; + } + @Override public List getFlywayMigrationLocations() { - return Collections.singletonList("classpath:org.dspace.storage.rdbms.xmlworkflow"); + return Collections.singletonList("classpath:org/dspace/storage/rdbms/xmlworkflow"); } @Override @@ -263,19 +269,21 @@ public class XmlWorkflowServiceImpl implements XmlWorkflowService { } protected void grantSubmitterReadPolicies(Context context, Item item) throws SQLException, AuthorizeException { - //A list of policies the user has for this item - List userHasPolicies = new ArrayList(); - List itempols = authorizeService.getPolicies(context, item); EPerson submitter = item.getSubmitter(); - for (ResourcePolicy resourcePolicy : itempols) { - if (submitter.equals(resourcePolicy.getEPerson())) { - //The user has already got this policy so add it to the list - userHasPolicies.add(resourcePolicy.getAction()); + if (null != submitter) { + //A list of policies the user has for this item + List userHasPolicies = new ArrayList<>(); + List itempols = authorizeService.getPolicies(context, item); + for (ResourcePolicy resourcePolicy : itempols) { + if (submitter.equals(resourcePolicy.getEPerson())) { + //The user has already got this policy so add it to the list + userHasPolicies.add(resourcePolicy.getAction()); + } + } + //Make sure we don't add duplicate policies + if (!userHasPolicies.contains(Constants.READ)) { + addPolicyToItem(context, item, Constants.READ, submitter, ResourcePolicy.TYPE_SUBMISSION); } - } - //Make sure we don't add duplicate policies - if (!userHasPolicies.contains(Constants.READ)) { - addPolicyToItem(context, item, Constants.READ, submitter, ResourcePolicy.TYPE_SUBMISSION); } } @@ -583,35 +591,38 @@ public class XmlWorkflowServiceImpl implements XmlWorkflowService { try { // Get submitter EPerson ep = item.getSubmitter(); - // Get the Locale - Locale supportedLocale = I18nUtil.getEPersonLocale(ep); - Email email = Email.getEmail(I18nUtil.getEmailFilename(supportedLocale, "submit_archive")); + // send the notification to the submitter unless the submitter eperson has been deleted + if (null != ep) { + // Get the Locale + Locale supportedLocale = I18nUtil.getEPersonLocale(ep); + Email email = Email.getEmail(I18nUtil.getEmailFilename(supportedLocale, "submit_archive")); - // Get the item handle to email to user - String handle = handleService.findHandle(context, item); + // Get the item handle to email to user + String handle = handleService.findHandle(context, item); - // Get title - List titles = itemService - .getMetadata(item, MetadataSchemaEnum.DC.getName(), "title", null, Item.ANY); - String title = ""; - try { - title = I18nUtil.getMessage("org.dspace.workflow.WorkflowManager.untitled"); - } catch (MissingResourceException e) { - title = "Untitled"; + // Get title + List titles = itemService + .getMetadata(item, MetadataSchemaEnum.DC.getName(), "title", null, Item.ANY); + String title = ""; + try { + title = I18nUtil.getMessage("org.dspace.workflow.WorkflowManager.untitled"); + } catch (MissingResourceException e) { + title = "Untitled"; + } + if (titles.size() > 0) { + title = titles.iterator().next().getValue(); + } + + email.addRecipient(ep.getEmail()); + email.addArgument(title); + email.addArgument(coll.getName()); + email.addArgument(handleService.getCanonicalForm(handle)); + + email.send(); } - if (titles.size() > 0) { - title = titles.iterator().next().getValue(); - } - - email.addRecipient(ep.getEmail()); - email.addArgument(title); - email.addArgument(coll.getName()); - email.addArgument(handleService.getCanonicalForm(handle)); - - email.send(); } catch (MessagingException e) { log.warn(LogManager.getHeader(context, "notifyOfArchive", - "cannot email user" + " item_id=" + item.getID())); + "cannot email user" + " item_id=" + item.getID())); } } @@ -819,7 +830,7 @@ public class XmlWorkflowServiceImpl implements XmlWorkflowService { } public void removeUserItemPolicies(Context context, Item item, EPerson e) throws SQLException, AuthorizeException { - if (e != null) { + if (e != null && item.getSubmitter() != null) { //Also remove any lingering authorizations from this user authorizeService.removeEPersonPolicies(context, item, e); //Remove the bundle rights @@ -841,7 +852,7 @@ public class XmlWorkflowServiceImpl implements XmlWorkflowService { protected void removeGroupItemPolicies(Context context, Item item, Group e) throws SQLException, AuthorizeException { - if (e != null) { + if (e != null && item.getSubmitter() != null) { //Also remove any lingering authorizations from this user authorizeService.removeGroupPolicies(context, item, e); //Remove the bundle rights @@ -857,7 +868,32 @@ public class XmlWorkflowServiceImpl implements XmlWorkflowService { } @Override - public WorkspaceItem sendWorkflowItemBackSubmission(Context context, XmlWorkflowItem wi, EPerson e, + public void deleteWorkflowByWorkflowItem(Context context, XmlWorkflowItem wi, EPerson e) + throws SQLException, AuthorizeException, IOException { + Item myitem = wi.getItem(); + UUID itemID = myitem.getID(); + Integer workflowID = wi.getID(); + UUID collID = wi.getCollection().getID(); + // stop workflow + deleteAllTasks(context, wi); + context.turnOffAuthorisationSystem(); + //Also clear all info for this step + workflowRequirementsService.clearInProgressUsers(context, wi); + // Remove (if any) the workflowItemroles for this item + workflowItemRoleService.deleteForWorkflowItem(context, wi); + // Now remove the workflow object manually from the database + xmlWorkflowItemService.deleteWrapper(context, wi); + // Now delete the item + itemService.delete(context, myitem); + log.info(LogManager.getHeader(context, "delete_workflow", "workflow_item_id=" + + workflowID + "item_id=" + itemID + + "collection_id=" + collID + "eperson_id=" + + e.getID())); + context.restoreAuthSystemState(); + } + + @Override + public WorkspaceItem sendWorkflowItemBackSubmission(Context context, XmlWorkflowItem wi, EPerson e, String provenance, String rejection_message) throws SQLException, AuthorizeException, @@ -1032,31 +1068,38 @@ public class XmlWorkflowServiceImpl implements XmlWorkflowService { protected void notifyOfReject(Context c, XmlWorkflowItem wi, EPerson e, String reason) { try { - // Get the item title - String title = wi.getItem().getName(); + // send the notification only if the person was not deleted in the + // meantime between submission and archiving. + EPerson eperson = wi.getSubmitter(); + if (eperson != null) { + // Get the item title + String title = wi.getItem().getName(); - // Get the collection - Collection coll = wi.getCollection(); + // Get the collection + Collection coll = wi.getCollection(); - // Get rejector's name - String rejector = getEPersonName(e); - Locale supportedLocale = I18nUtil.getEPersonLocale(e); - Email email = Email.getEmail(I18nUtil.getEmailFilename(supportedLocale, "submit_reject")); + // Get rejector's name + String rejector = getEPersonName(e); + Locale supportedLocale = I18nUtil.getEPersonLocale(e); + Email email = Email.getEmail(I18nUtil.getEmailFilename(supportedLocale, "submit_reject")); - email.addRecipient(wi.getSubmitter().getEmail()); - email.addArgument(title); - email.addArgument(coll.getName()); - email.addArgument(rejector); - email.addArgument(reason); - email.addArgument(ConfigurationManager.getProperty("dspace.ui.url") + "/mydspace"); + email.addRecipient(eperson.getEmail()); + email.addArgument(title); + email.addArgument(coll.getName()); + email.addArgument(rejector); + email.addArgument(reason); + email.addArgument(ConfigurationManager.getProperty("dspace.ui.url") + "/mydspace"); - email.send(); + email.send(); + } else { + // DO nothing + } } catch (Exception ex) { // log this email error log.warn(LogManager.getHeader(c, "notify_of_reject", - "cannot email user" + " eperson_id" + e.getID() - + " eperson_email" + e.getEmail() - + " workflow_item_id" + wi.getID())); + "cannot email user" + " eperson_id" + e.getID() + + " eperson_email" + e.getEmail() + + " workflow_item_id" + wi.getID())); } } diff --git a/dspace-api/src/main/java/org/dspace/xmlworkflow/factory/XmlWorkflowFactory.java b/dspace-api/src/main/java/org/dspace/xmlworkflow/factory/XmlWorkflowFactory.java index 5d33843747..db856bb57b 100644 --- a/dspace-api/src/main/java/org/dspace/xmlworkflow/factory/XmlWorkflowFactory.java +++ b/dspace-api/src/main/java/org/dspace/xmlworkflow/factory/XmlWorkflowFactory.java @@ -86,7 +86,7 @@ public interface XmlWorkflowFactory { * @param workflowName Name of workflow we want the collections of that are mapped to is * @return List of collections mapped to the requested workflow */ - public List getCollectionHandlesMappedToWorklow(Context context, String workflowName); + public List getCollectionHandlesMappedToWorkflow(Context context, String workflowName); /** * Returns list of collections that are not mapped to any configured workflow, and thus use the default workflow diff --git a/dspace-api/src/main/java/org/dspace/xmlworkflow/state/Step.java b/dspace-api/src/main/java/org/dspace/xmlworkflow/state/Step.java index a982107d78..16befc2626 100644 --- a/dspace-api/src/main/java/org/dspace/xmlworkflow/state/Step.java +++ b/dspace-api/src/main/java/org/dspace/xmlworkflow/state/Step.java @@ -81,7 +81,7 @@ public class Step implements BeanNameAware { /** * Get the next step based on out the outcome * @param outcome the outcome of the previous step - * @return the next stepp or NULL if there is no step configured for this outcome + * @return the next step or NULL if there is no step configured for this outcome */ public Step getNextStep(int outcome) { return outcomes.get(outcome); diff --git a/dspace-api/src/main/java/org/dspace/xmlworkflow/state/Workflow.java b/dspace-api/src/main/java/org/dspace/xmlworkflow/state/Workflow.java index a064e27ebf..a31b24a7b6 100644 --- a/dspace-api/src/main/java/org/dspace/xmlworkflow/state/Workflow.java +++ b/dspace-api/src/main/java/org/dspace/xmlworkflow/state/Workflow.java @@ -104,7 +104,7 @@ public class Workflow implements BeanNameAware { Map roles = new HashMap<>(); for (Step step : steps) { if (step.getRole() != null) { - roles.put(step.getRole().getName(), step.getRole()); + roles.put(step.getRole().getId(), step.getRole()); } } return roles; diff --git a/dspace-api/src/main/java/org/dspace/xmlworkflow/state/actions/processingaction/AcceptEditRejectAction.java b/dspace-api/src/main/java/org/dspace/xmlworkflow/state/actions/processingaction/AcceptEditRejectAction.java index cb74bcf22d..743d00b2b6 100644 --- a/dspace-api/src/main/java/org/dspace/xmlworkflow/state/actions/processingaction/AcceptEditRejectAction.java +++ b/dspace-api/src/main/java/org/dspace/xmlworkflow/state/actions/processingaction/AcceptEditRejectAction.java @@ -36,6 +36,7 @@ public class AcceptEditRejectAction extends ProcessingAction { private static final String SUBMIT_APPROVE = "submit_approve"; private static final String SUBMIT_REJECT = "submit_reject"; + private static final String SUBMITTER_IS_DELETED_PAGE = "submitter_deleted"; //TODO: rename to AcceptAndEditMetadataAction @@ -53,6 +54,8 @@ public class AcceptEditRejectAction extends ProcessingAction { return processAccept(c, wfi); case SUBMIT_REJECT: return processRejectPage(c, wfi, request); + case SUBMITTER_IS_DELETED_PAGE: + return processSubmitterIsDeletedPage(c, wfi, request); default: return new ActionResult(ActionResult.TYPE.TYPE_CANCEL); } @@ -93,6 +96,22 @@ public class AcceptEditRejectAction extends ProcessingAction { return new ActionResult(ActionResult.TYPE.TYPE_SUBMISSION_PAGE); } + public ActionResult processSubmitterIsDeletedPage(Context c, XmlWorkflowItem wfi, HttpServletRequest request) + throws SQLException, AuthorizeException, IOException { + if (request.getParameter("submit_delete") != null) { + XmlWorkflowServiceFactory.getInstance().getXmlWorkflowService() + .deleteWorkflowByWorkflowItem(c, wfi, c.getCurrentUser()); + // Delete and send user back to myDspace page + return new ActionResult(ActionResult.TYPE.TYPE_SUBMISSION_PAGE); + } else if (request.getParameter("submit_keep_it") != null) { + // Do nothing, just send it back to myDspace page + return new ActionResult(ActionResult.TYPE.TYPE_SUBMISSION_PAGE); + } else { + //Cancel, go back to the main task page + return new ActionResult(ActionResult.TYPE.TYPE_PAGE); + } + } + private void addApprovedProvenance(Context c, XmlWorkflowItem wfi) throws SQLException, AuthorizeException { //Add the provenance for the accept String now = DCDate.getCurrent().toString(); diff --git a/dspace-api/src/main/java/org/dspace/xmlworkflow/state/actions/processingaction/ReviewAction.java b/dspace-api/src/main/java/org/dspace/xmlworkflow/state/actions/processingaction/ReviewAction.java index 5630087d57..8474757be6 100644 --- a/dspace-api/src/main/java/org/dspace/xmlworkflow/state/actions/processingaction/ReviewAction.java +++ b/dspace-api/src/main/java/org/dspace/xmlworkflow/state/actions/processingaction/ReviewAction.java @@ -38,6 +38,8 @@ public class ReviewAction extends ProcessingAction { private static final String SUBMIT_APPROVE = "submit_approve"; private static final String SUBMIT_REJECT = "submit_reject"; + private static final String SUBMITTER_IS_DELETED_PAGE = "submitter_deleted"; + @Override public void activate(Context c, XmlWorkflowItem wfItem) { @@ -53,6 +55,8 @@ public class ReviewAction extends ProcessingAction { return processAccept(c, wfi); case SUBMIT_REJECT: return processRejectPage(c, wfi, step, request); + case SUBMITTER_IS_DELETED_PAGE: + return processSubmitterIsDeletedPage(c, wfi, request); default: return new ActionResult(ActionResult.TYPE.TYPE_CANCEL); } @@ -108,4 +112,21 @@ public class ReviewAction extends ProcessingAction { return new ActionResult(ActionResult.TYPE.TYPE_SUBMISSION_PAGE); } + + public ActionResult processSubmitterIsDeletedPage(Context c, XmlWorkflowItem wfi, HttpServletRequest request) + throws SQLException, AuthorizeException, IOException { + if (request.getParameter("submit_delete") != null) { + XmlWorkflowServiceFactory.getInstance().getXmlWorkflowService() + .deleteWorkflowByWorkflowItem(c, wfi, c.getCurrentUser()); + // Delete and send user back to myDspace page + return new ActionResult(ActionResult.TYPE.TYPE_SUBMISSION_PAGE); + } else if (request.getParameter("submit_keep_it") != null) { + // Do nothing, just send it back to myDspace page + return new ActionResult(ActionResult.TYPE.TYPE_SUBMISSION_PAGE); + } else { + //Cancel, go back to the main task page + request.setAttribute("page", MAIN_PAGE); + return new ActionResult(ActionResult.TYPE.TYPE_PAGE); + } + } } diff --git a/dspace-api/src/main/java/org/dspace/xmlworkflow/state/actions/processingaction/SingleUserReviewAction.java b/dspace-api/src/main/java/org/dspace/xmlworkflow/state/actions/processingaction/SingleUserReviewAction.java index d115832389..9ef554821d 100644 --- a/dspace-api/src/main/java/org/dspace/xmlworkflow/state/actions/processingaction/SingleUserReviewAction.java +++ b/dspace-api/src/main/java/org/dspace/xmlworkflow/state/actions/processingaction/SingleUserReviewAction.java @@ -37,6 +37,7 @@ public class SingleUserReviewAction extends ProcessingAction { public static final int MAIN_PAGE = 0; public static final int REJECT_PAGE = 1; + public static final int SUBMITTER_IS_DELETED_PAGE = 2; public static final int OUTCOME_REJECT = 1; @@ -59,6 +60,8 @@ public class SingleUserReviewAction extends ProcessingAction { return processMainPage(c, wfi, step, request); case REJECT_PAGE: return processRejectPage(c, wfi, step, request); + case SUBMITTER_IS_DELETED_PAGE: + return processSubmitterIsDeletedPage(c, wfi, request); default: return new ActionResult(ActionResult.TYPE.TYPE_CANCEL); } @@ -82,7 +85,11 @@ public class SingleUserReviewAction extends ProcessingAction { return new ActionResult(ActionResult.TYPE.TYPE_OUTCOME, ActionResult.OUTCOME_COMPLETE); } else if (request.getParameter(SUBMIT_REJECT) != null) { // Make sure we indicate which page we want to process - request.setAttribute("page", REJECT_PAGE); + if (wfi.getSubmitter() == null) { + request.setAttribute("page", SUBMITTER_IS_DELETED_PAGE); + } else { + request.setAttribute("page", REJECT_PAGE); + } // We have pressed reject item, so take the user to a page where he can reject return new ActionResult(ActionResult.TYPE.TYPE_PAGE); } else if (request.getParameter(SUBMIT_DECLINE_TASK) != null) { @@ -135,4 +142,21 @@ public class SingleUserReviewAction extends ProcessingAction { return new ActionResult(ActionResult.TYPE.TYPE_PAGE); } } + + public ActionResult processSubmitterIsDeletedPage(Context c, XmlWorkflowItem wfi, HttpServletRequest request) + throws SQLException, AuthorizeException, IOException { + if (request.getParameter("submit_delete") != null) { + XmlWorkflowServiceFactory.getInstance().getXmlWorkflowService() + .deleteWorkflowByWorkflowItem(c, wfi, c.getCurrentUser()); + // Delete and send user back to myDspace page + return new ActionResult(ActionResult.TYPE.TYPE_SUBMISSION_PAGE); + } else if (request.getParameter("submit_keep_it") != null) { + // Do nothing, just send it back to myDspace page + return new ActionResult(ActionResult.TYPE.TYPE_SUBMISSION_PAGE); + } else { + //Cancel, go back to the main task page + request.setAttribute("page", MAIN_PAGE); + return new ActionResult(ActionResult.TYPE.TYPE_PAGE); + } + } } diff --git a/dspace-api/src/main/java/org/dspace/xmlworkflow/state/actions/userassignment/AssignOriginalSubmitterAction.java b/dspace-api/src/main/java/org/dspace/xmlworkflow/state/actions/userassignment/AssignOriginalSubmitterAction.java index 01d995ccf6..3c8d85997a 100644 --- a/dspace-api/src/main/java/org/dspace/xmlworkflow/state/actions/userassignment/AssignOriginalSubmitterAction.java +++ b/dspace-api/src/main/java/org/dspace/xmlworkflow/state/actions/userassignment/AssignOriginalSubmitterAction.java @@ -74,20 +74,22 @@ public class AssignOriginalSubmitterAction extends UserSelectionAction { @Override public void alertUsersOnActivation(Context c, XmlWorkflowItem wfi, RoleMembers roleMembers) throws IOException, SQLException { - try { - XmlWorkflowService xmlWorkflowService = XmlWorkflowServiceFactory.getInstance().getXmlWorkflowService(); - xmlWorkflowService.alertUsersOnTaskActivation(c, wfi, "submit_task", Arrays.asList(wfi.getSubmitter()), - //The arguments - wfi.getItem().getName(), - wfi.getCollection().getName(), - wfi.getSubmitter().getFullName(), - //TODO: message - "New task available.", - xmlWorkflowService.getMyDSpaceLink() - ); - } catch (MessagingException e) { - log.info(LogManager.getHeader(c, "error emailing user(s) for claimed task", + if (wfi.getSubmitter() != null) { + try { + XmlWorkflowService xmlWorkflowService = XmlWorkflowServiceFactory.getInstance().getXmlWorkflowService(); + xmlWorkflowService.alertUsersOnTaskActivation(c, wfi, "submit_task", Arrays.asList(wfi.getSubmitter()), + //The arguments + wfi.getItem().getName(), + wfi.getCollection().getName(), + wfi.getSubmitter().getFullName(), + //TODO: message + "New task available.", + xmlWorkflowService.getMyDSpaceLink() + ); + } catch (MessagingException e) { + log.info(LogManager.getHeader(c, "error emailing user(s) for claimed task", "step: " + getParent().getStep().getId() + " workflowitem: " + wfi.getID())); + } } } @@ -107,9 +109,9 @@ public class AssignOriginalSubmitterAction extends UserSelectionAction { .getId() + " to assign a submitter to. Aborting the action."); throw new IllegalStateException(); } - - createTaskForEPerson(c, wfi, step, nextAction, submitter); - + if (submitter != null) { + createTaskForEPerson(c, wfi, step, nextAction, submitter); + } //It is important that we return to the submission page since we will continue our actions with the submitter return new ActionResult(ActionResult.TYPE.TYPE_OUTCOME, ActionResult.OUTCOME_COMPLETE); } diff --git a/dspace-api/src/main/java/org/dspace/xmlworkflow/state/actions/userassignment/ClaimAction.java b/dspace-api/src/main/java/org/dspace/xmlworkflow/state/actions/userassignment/ClaimAction.java index 78742c6553..36b2aaeee5 100644 --- a/dspace-api/src/main/java/org/dspace/xmlworkflow/state/actions/userassignment/ClaimAction.java +++ b/dspace-api/src/main/java/org/dspace/xmlworkflow/state/actions/userassignment/ClaimAction.java @@ -77,22 +77,25 @@ public class ClaimAction extends UserSelectionAction { public void alertUsersOnActivation(Context c, XmlWorkflowItem wfi, RoleMembers roleMembers) throws IOException, SQLException { try { + EPerson ep = wfi.getSubmitter(); + String submitterName = null; + if (ep != null) { + submitterName = ep.getFullName(); + } XmlWorkflowService xmlWorkflowService = XmlWorkflowServiceFactory.getInstance().getXmlWorkflowService(); xmlWorkflowService.alertUsersOnTaskActivation(c, wfi, "submit_task", roleMembers.getAllUniqueMembers(c), - //The arguments - wfi.getItem().getName(), - wfi.getCollection().getName(), - wfi.getSubmitter().getFullName(), - //TODO: message - "New task available.", - xmlWorkflowService.getMyDSpaceLink() + //The arguments + wfi.getItem().getName(), + wfi.getCollection().getName(), + submitterName, + //TODO: message + "New task available.", + xmlWorkflowService.getMyDSpaceLink() ); } catch (MessagingException e) { log.info(LogManager.getHeader(c, "error emailing user(s) for claimed task", - "step: " + getParent().getStep().getId() + " workflowitem: " + wfi.getID())); + "step: " + getParent().getStep().getId() + " workflowitem: " + wfi.getID())); } - - } @Override diff --git a/dspace-api/src/main/java/org/dspace/xmlworkflow/storedcomponents/PoolTaskServiceImpl.java b/dspace-api/src/main/java/org/dspace/xmlworkflow/storedcomponents/PoolTaskServiceImpl.java index 684eb2cd04..f64f1b3942 100644 --- a/dspace-api/src/main/java/org/dspace/xmlworkflow/storedcomponents/PoolTaskServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/xmlworkflow/storedcomponents/PoolTaskServiceImpl.java @@ -125,11 +125,29 @@ public class PoolTaskServiceImpl implements PoolTaskService { } } + @Override + public void deleteByEperson(Context context, EPerson ePerson) + throws SQLException, AuthorizeException, IOException { + List tasks = findByEperson(context, ePerson); + //Use an iterator to remove the tasks ! + Iterator iterator = tasks.iterator(); + while (iterator.hasNext()) { + PoolTask poolTask = iterator.next(); + iterator.remove(); + delete(context, poolTask); + } + } + @Override public List findByEPerson(Context context, EPerson ePerson) throws SQLException { return poolTaskDAO.findByEPerson(context, ePerson); } + @Override + public List findByGroup(Context context, Group group) throws SQLException { + return poolTaskDAO.findByGroup(context, group); + } + @Override public PoolTask create(Context context) throws SQLException, AuthorizeException { return poolTaskDAO.create(context, new PoolTask()); diff --git a/dspace-api/src/main/java/org/dspace/xmlworkflow/storedcomponents/WorkflowItemRoleServiceImpl.java b/dspace-api/src/main/java/org/dspace/xmlworkflow/storedcomponents/WorkflowItemRoleServiceImpl.java index c96bcd032d..4204c7dcc3 100644 --- a/dspace-api/src/main/java/org/dspace/xmlworkflow/storedcomponents/WorkflowItemRoleServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/xmlworkflow/storedcomponents/WorkflowItemRoleServiceImpl.java @@ -58,6 +58,16 @@ public class WorkflowItemRoleServiceImpl implements WorkflowItemRoleService { } } + @Override + public void deleteByEPerson(Context context, EPerson ePerson) throws SQLException, AuthorizeException { + Iterator workflowItemRoles = findByEPerson(context, ePerson).iterator(); + while (workflowItemRoles.hasNext()) { + WorkflowItemRole workflowItemRole = workflowItemRoles.next(); + workflowItemRoles.remove(); + delete(context, workflowItemRole); + } + } + @Override public List findByEPerson(Context context, EPerson ePerson) throws SQLException { return workflowItemRoleDAO.findByEPerson(context, ePerson); diff --git a/dspace-api/src/main/java/org/dspace/xmlworkflow/storedcomponents/service/PoolTaskService.java b/dspace-api/src/main/java/org/dspace/xmlworkflow/storedcomponents/service/PoolTaskService.java index d0de6bef38..7f5ed5e6a0 100644 --- a/dspace-api/src/main/java/org/dspace/xmlworkflow/storedcomponents/service/PoolTaskService.java +++ b/dspace-api/src/main/java/org/dspace/xmlworkflow/storedcomponents/service/PoolTaskService.java @@ -14,6 +14,7 @@ import java.util.List; import org.dspace.authorize.AuthorizeException; import org.dspace.core.Context; import org.dspace.eperson.EPerson; +import org.dspace.eperson.Group; import org.dspace.service.DSpaceCRUDService; import org.dspace.xmlworkflow.storedcomponents.PoolTask; import org.dspace.xmlworkflow.storedcomponents.XmlWorkflowItem; @@ -40,5 +41,16 @@ public interface PoolTaskService extends DSpaceCRUDService { public void deleteByWorkflowItem(Context context, XmlWorkflowItem xmlWorkflowItem) throws SQLException, AuthorizeException; + public void deleteByEperson(Context context, EPerson ePerson) throws SQLException, AuthorizeException, IOException; + public List findByEPerson(Context context, EPerson ePerson) throws SQLException; + + /** + * This method will return a list of PoolTask for the given group + * @param context The relevant DSpace context + * @param group The Group to be searched on + * @return The list of PoolTask objects + * @throws SQLException If something goes wrong + */ + public List findByGroup(Context context, Group group) throws SQLException; } diff --git a/dspace-api/src/main/java/org/dspace/xmlworkflow/storedcomponents/service/WorkflowItemRoleService.java b/dspace-api/src/main/java/org/dspace/xmlworkflow/storedcomponents/service/WorkflowItemRoleService.java index 62c661f02a..9f909231f1 100644 --- a/dspace-api/src/main/java/org/dspace/xmlworkflow/storedcomponents/service/WorkflowItemRoleService.java +++ b/dspace-api/src/main/java/org/dspace/xmlworkflow/storedcomponents/service/WorkflowItemRoleService.java @@ -33,5 +33,7 @@ public interface WorkflowItemRoleService extends DSpaceCRUDService findByEPerson(Context context, EPerson ePerson) throws SQLException; } diff --git a/dspace-api/src/main/resources/Messages.properties b/dspace-api/src/main/resources/Messages.properties index bf1b475375..974c71083d 100644 --- a/dspace-api/src/main/resources/Messages.properties +++ b/dspace-api/src/main/resources/Messages.properties @@ -6,6 +6,8 @@ # http://www.dspace.org/license/ # +admin.name = DSpace Administrator + browse.page-title = Browsing DSpace browse.et-al = et al @@ -252,6 +254,12 @@ jsp.dspace-admin.eperson-browse.phone = Telephone jsp.dspace-admin.eperson-browse.self = Self Registered jsp.dspace-admin.eperson-browse.title = E-People jsp.dspace-admin.eperson-confirm-delete.confirm = Are you sure this e-person should be deleted? +jsp.dspace-admin.eperson-confirm-delete.confirm.constraint = This EPerson +jsp.dspace-admin.eperson-confirm-delete.confirm.item = has submitted one or more items which will be kept +jsp.dspace-admin.eperson-confirm-delete.confirm.workspaceitem = has unsubmitted workspace items which will be deleted +jsp.dspace-admin.eperson-confirm-delete.confirm.workflowitem = has an active submission workflow which will be put back into the pool +jsp.dspace-admin.eperson-confirm-delete.confirm.resourcepolicy = has resource policies associated with him which will be deleted +jsp.dspace-admin.eperson-confirm-delete.confirm.tasklistitem = has a workflow task awaiting their attention jsp.dspace-admin.eperson-confirm-delete.heading = Delete e-person: {0} ({1}) jsp.dspace-admin.eperson-confirm-delete.title = Delete E-Person jsp.dspace-admin.eperson-deletion-error.errormsg = The EPerson {0} cannot be deleted because a reference to it exists in the following table(s): @@ -576,7 +584,6 @@ jsp.general.without-contributor jsp.general.without-date = No date given jsp.help = jsp.help.formats.contact1 = Please contact your -jsp.help.formats.contact2 = DSpace Administrator jsp.help.formats.contact3 = if you have questions about a particular format. jsp.help.formats.extensions = Extensions jsp.help.formats.here = (Your Site's Format Support Policy Here) @@ -741,6 +748,12 @@ jsp.mydspace.reject-reason.cancel.button = Cancel Rejecti jsp.mydspace.reject-reason.reject.button = Reject Item jsp.mydspace.reject-reason.text1 = Please enter the reason you are rejecting the submission into the box below. Please indicate in your message whether the submitter should fix a problem and resubmit. jsp.mydspace.reject-reason.title = Enter Reason for Rejection +jsp.mydspace.reject-deleted-submitter.title = The Submitter of this item has been deleted +jsp.mydspace.reject-deleted-submitter.message = Do you want to delete the document, keep it as a task and work on it later or cancel the rejection process? +jsp.mydspace.reject-deleted-submitter-keep-it.button = Keep it I will work on it later +jsp.mydspace.reject-deleted-submitter-delete.button = Delete Item +jsp.mydspace.reject-deleted-submitter-delete.title = Delete successfully +jsp.mydspace.reject-deleted-submitter-delete.info = Reviewing task is done, the submitted item has been successfully removed from the system. jsp.mydspace.remove-item.cancel.button = Cancel Removal jsp.mydspace.remove-item.confirmation = Are you sure you want to remove the following incomplete item? jsp.mydspace.remove-item.remove.button = Remove the Item @@ -1643,6 +1656,7 @@ org.dspace.workflow.WorkflowManager.step1 org.dspace.workflow.WorkflowManager.step2 = The submission must be checked before inclusion in the archive. org.dspace.workflow.WorkflowManager.step3 = The metadata needs to be checked to ensure compliance with the collection's standards, and edited if necessary. org.dspace.workflow.WorkflowManager.untitled = Untitled +org.dspace.workflow.WorkflowManager.deleted-submitter = Unknown (deleted submitter) search.order.asc = Ascending search.order.desc = Descending @@ -1824,6 +1838,11 @@ In response to your request I have the pleasure to send you in attachment a copy Best regards,\n\ {3} <{4}> +itemRequest.admin.response.body.approve = Dear {0},\n\ +In response to your request please see the attached copy of the file(s) related to the document: "{2}" ({1}).\n\n\ +Best regards,\n\ +{3} + itemRequest.response.subject.reject = Request copy of document itemRequest.response.body.reject = Dear {0},\n\ In response to your request I regret to inform you that it''s not possible to send you a copy of the file(s) you have requested, concerning the document: "{2}" ({1}), of which I am author (or co-author).\n\n\ @@ -1845,6 +1864,12 @@ itemRequest.response.body.contactRequester = Dear {0},\n\n\ Thanks for your interest! Since the author owns the copyright for this work, I will contact the author and ask permission to send you a copy. I''ll let you know as soon as I hear from the author.\n\n\ Thanks!\n\ {1} <{2}> + +itemRequest.admin.response.body.reject = Dear {0},\n\ +In response to your request I regret to inform you that it''s not possible to send you a copy of the file(s) you have requested, concerning the document: "{2}" ({1}).\n\n\ +Best regards,\n\ +{3} + jsp.request.item.request-form.info2 = Request a document copy: {0} jsp.request.item.request-form.problem = You must fill all the missing fields. jsp.request.item.request-form.reqname = Requester name: @@ -1886,7 +1911,8 @@ jsp.request.item.request-free-acess.free = Change to Open Access jsp.request.item.request-free-acess.name = Name: jsp.request.item.request-free-acess.email = E-mail: org.dspace.app.requestitem.RequestItemMetadataStrategy.unnamed = Corresponding Author -org.dspace.app.requestitem.RequestItemHelpdeskStrategy.helpdeskname = Help Desk +org.dspace.app.requestitem.helpdeskname = Help Desk +org.dspace.app.requestitem.default-author-name = DSpace User org.dspace.app.webui.jsptag.ItemTag.restrict = Request a copy jsp.layout.navbar-admin.batchimport = Batch import diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/flywayupgrade/oracle/upgradeToFlyway4x.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/flywayupgrade/oracle/upgradeToFlyway4x.sql new file mode 100644 index 0000000000..7907fccc00 --- /dev/null +++ b/dspace-api/src/main/resources/org/dspace/storage/rdbms/flywayupgrade/oracle/upgradeToFlyway4x.sql @@ -0,0 +1,29 @@ +-- +-- Copyright 2010-2017 Boxfuse GmbH +-- +-- Licensed under the Apache License, Version 2.0 (the "License"); +-- you may not use this file except in compliance with the License. +-- You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- +----------------- +-- This is the Oracle upgrade script from Flyway v4.2.0, copied/borrowed from: +-- https://github.com/flyway/flyway/blob/flyway-4.2.0/flyway-core/src/main/resources/org/flywaydb/core/internal/dbsupport/oracle/upgradeMetaDataTable.sql +-- +-- The variables in this script are replaced in FlywayUpgradeUtils.upgradeFlywayTable() +------------------ + +DROP INDEX "${schema}"."${table}_vr_idx"; +DROP INDEX "${schema}"."${table}_ir_idx"; +ALTER TABLE "${schema}"."${table}" DROP COLUMN "version_rank"; +ALTER TABLE "${schema}"."${table}" DROP PRIMARY KEY DROP INDEX; +ALTER TABLE "${schema}"."${table}" MODIFY "version" NULL; +ALTER TABLE "${schema}"."${table}" ADD CONSTRAINT "${table}_pk" PRIMARY KEY ("installed_rank"); +UPDATE "${schema}"."${table}" SET "type"='BASELINE' WHERE "type"='INIT'; diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/flywayupgrade/postgres/upgradeToFlyway4x.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/flywayupgrade/postgres/upgradeToFlyway4x.sql new file mode 100644 index 0000000000..7548fa4c6a --- /dev/null +++ b/dspace-api/src/main/resources/org/dspace/storage/rdbms/flywayupgrade/postgres/upgradeToFlyway4x.sql @@ -0,0 +1,29 @@ +-- +-- Copyright 2010-2017 Boxfuse GmbH +-- +-- Licensed under the Apache License, Version 2.0 (the "License"); +-- you may not use this file except in compliance with the License. +-- You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- +----------------- +-- This is the PostgreSQL upgrade script from Flyway v4.2.0, copied/borrowed from: +-- https://github.com/flyway/flyway/blob/flyway-4.2.0/flyway-core/src/main/resources/org/flywaydb/core/internal/dbsupport/oracle/upgradeMetaDataTable.sql +-- +-- The variables in this script are replaced in FlywayUpgradeUtils.upgradeFlywayTable() +------------------ + +DROP INDEX "${schema}"."${table}_vr_idx"; +DROP INDEX "${schema}"."${table}_ir_idx"; +ALTER TABLE "${schema}"."${table}" DROP COLUMN "version_rank"; +ALTER TABLE "${schema}"."${table}" DROP CONSTRAINT "${table}_pk"; +ALTER TABLE "${schema}"."${table}" ALTER COLUMN "version" DROP NOT NULL; +ALTER TABLE "${schema}"."${table}" ADD CONSTRAINT "${table}_pk" PRIMARY KEY ("installed_rank"); +UPDATE "${schema}"."${table}" SET "type"='BASELINE' WHERE "type"='INIT'; diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/h2/V7.0_2020.01.08__DS-626-statistics-tracker.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/h2/V7.0_2020.01.08__DS-626-statistics-tracker.sql new file mode 100644 index 0000000000..48d182af61 --- /dev/null +++ b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/h2/V7.0_2020.01.08__DS-626-statistics-tracker.sql @@ -0,0 +1,29 @@ +-- +-- The contents of this file are subject to the license and copyright +-- detailed in the LICENSE and NOTICE files at the root of the source +-- tree and available online at +-- +-- http://www.dspace.org/license/ +-- + +-- =============================================================== +-- WARNING WARNING WARNING WARNING WARNING WARNING WARNING WARNING +-- +-- DO NOT MANUALLY RUN THIS DATABASE MIGRATION. IT WILL BE EXECUTED +-- AUTOMATICALLY (IF NEEDED) BY "FLYWAY" WHEN YOU STARTUP DSPACE. +-- http://flywaydb.org/ +-- =============================================================== + +------------------------------------------------------------- +-- This will create the setup for the IRUS statistics harvester +------------------------------------------------------------- + +CREATE SEQUENCE openurltracker_seq; + +CREATE TABLE openurltracker +( + tracker_id INTEGER, + tracker_url VARCHAR(1000), + uploaddate DATE, + CONSTRAINT openurltracker_PK PRIMARY KEY (tracker_id) +); \ No newline at end of file diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.0_2020.01.08__DS-626-statistics-tracker.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.0_2020.01.08__DS-626-statistics-tracker.sql new file mode 100644 index 0000000000..a108fd74b4 --- /dev/null +++ b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.0_2020.01.08__DS-626-statistics-tracker.sql @@ -0,0 +1,29 @@ +-- +-- The contents of this file are subject to the license and copyright +-- detailed in the LICENSE and NOTICE files at the root of the source +-- tree and available online at +-- +-- http://www.dspace.org/license/ +-- + +-- =============================================================== +-- WARNING WARNING WARNING WARNING WARNING WARNING WARNING WARNING +-- +-- DO NOT MANUALLY RUN THIS DATABASE MIGRATION. IT WILL BE EXECUTED +-- AUTOMATICALLY (IF NEEDED) BY "FLYWAY" WHEN YOU STARTUP DSPACE. +-- http://flywaydb.org/ +-- =============================================================== + +------------------------------------------------------------- +-- This will create the setup for the IRUS statistics harvester +------------------------------------------------------------- + +CREATE SEQUENCE openurltracker_seq; + +CREATE TABLE openurltracker +( + tracker_id NUMBER, + tracker_url VARCHAR2(1000), + uploaddate DATE, + CONSTRAINT openurltracker_PK PRIMARY KEY (tracker_id) +); \ No newline at end of file diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/postgres/V7.0_2020.01.08__DS-626-statistics-tracker.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/postgres/V7.0_2020.01.08__DS-626-statistics-tracker.sql new file mode 100644 index 0000000000..48d182af61 --- /dev/null +++ b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/postgres/V7.0_2020.01.08__DS-626-statistics-tracker.sql @@ -0,0 +1,29 @@ +-- +-- The contents of this file are subject to the license and copyright +-- detailed in the LICENSE and NOTICE files at the root of the source +-- tree and available online at +-- +-- http://www.dspace.org/license/ +-- + +-- =============================================================== +-- WARNING WARNING WARNING WARNING WARNING WARNING WARNING WARNING +-- +-- DO NOT MANUALLY RUN THIS DATABASE MIGRATION. IT WILL BE EXECUTED +-- AUTOMATICALLY (IF NEEDED) BY "FLYWAY" WHEN YOU STARTUP DSPACE. +-- http://flywaydb.org/ +-- =============================================================== + +------------------------------------------------------------- +-- This will create the setup for the IRUS statistics harvester +------------------------------------------------------------- + +CREATE SEQUENCE openurltracker_seq; + +CREATE TABLE openurltracker +( + tracker_id INTEGER, + tracker_url VARCHAR(1000), + uploaddate DATE, + CONSTRAINT openurltracker_PK PRIMARY KEY (tracker_id) +); \ No newline at end of file diff --git a/dspace-api/src/main/resources/spring/spring-dspace-addon-import-services.xml b/dspace-api/src/main/resources/spring/spring-dspace-addon-import-services.xml index bbdf085619..0046366f2e 100644 --- a/dspace-api/src/main/resources/spring/spring-dspace-addon-import-services.xml +++ b/dspace-api/src/main/resources/spring/spring-dspace-addon-import-services.xml @@ -19,11 +19,6 @@ - - - - - - - - - + + + + + + + + + + + - + + + + + + + + + + + + + + xml + + + + + + + + + + ris + + + + + + + + bib + bibtex + + + + + + + + + + csv + + + + + + + + + + + + tsv + + + + + + + + + enl + enw + + + + + diff --git a/dspace-api/src/test/data/dspaceFolder/assetstore/curate.txt b/dspace-api/src/test/data/dspaceFolder/assetstore/curate.txt new file mode 100644 index 0000000000..ff2cb89ef6 --- /dev/null +++ b/dspace-api/src/test/data/dspaceFolder/assetstore/curate.txt @@ -0,0 +1,2 @@ +checklinks +requiredmetadata diff --git a/dspace-api/src/test/data/dspaceFolder/assetstore/testImport.csv b/dspace-api/src/test/data/dspaceFolder/assetstore/testImport.csv new file mode 100644 index 0000000000..cb658de4ed --- /dev/null +++ b/dspace-api/src/test/data/dspaceFolder/assetstore/testImport.csv @@ -0,0 +1,2 @@ +id,collection,dc.contributor.author ++,"123456789/2","Donald, SmithImported" diff --git a/dspace-api/src/test/data/dspaceFolder/config/item-submission.xml b/dspace-api/src/test/data/dspaceFolder/config/item-submission.xml index de19ef7287..cd53a5c1c6 100644 --- a/dspace-api/src/test/data/dspaceFolder/config/item-submission.xml +++ b/dspace-api/src/test/data/dspaceFolder/config/item-submission.xml @@ -18,6 +18,7 @@ + @@ -82,9 +83,9 @@ - + submit.progressbar.CClicense + org.dspace.app.rest.submit.step.CCLicenseStep + cclicense @@ -145,10 +152,14 @@ - + + + + + diff --git a/dspace-api/src/test/data/dspaceFolder/config/local.cfg b/dspace-api/src/test/data/dspaceFolder/config/local.cfg index 3c4b4a839d..5f32bd0919 100644 --- a/dspace-api/src/test/data/dspaceFolder/config/local.cfg +++ b/dspace-api/src/test/data/dspaceFolder/config/local.cfg @@ -108,3 +108,24 @@ plugin.sequence.java.util.Collection = \ java.util.LinkedList, \ java.util.Stack, \ java.util.TreeSet + +# Enable a test authority control on dc.language.iso field +choices.plugin.dc.language.iso = common_iso_languages +choices.presentation.dc.language.iso = select +authority.controlled.dc.language.iso = true + +########################################### +# PROPERTIES USED TO TEST CONFIGURATION # +# PROPERTY EXPOSURE VIA REST # +########################################### +rest.properties.exposed = configuration.exposed.single.value +rest.properties.exposed = configuration.exposed.array.value +rest.properties.exposed = configuration.not.existing + +configuration.not.exposed = secret_value +configuration.exposed.single.value = public_value +configuration.exposed.array.value = public_value_1, public_value_2 + +# Test config for the authentication ip functionality +authentication-ip.Staff = 5.5.5.5 +authentication-ip.Student = 6.6.6.6 diff --git a/dspace-api/src/test/data/dspaceFolder/config/spring/api/event-service-listeners.xml b/dspace-api/src/test/data/dspaceFolder/config/spring/api/event-service-listeners.xml new file mode 100644 index 0000000000..15de9735d7 --- /dev/null +++ b/dspace-api/src/test/data/dspaceFolder/config/spring/api/event-service-listeners.xml @@ -0,0 +1,14 @@ + + + + + + + + + \ No newline at end of file diff --git a/dspace-api/src/test/data/dspaceFolder/config/spring/api/openurltracker.xml b/dspace-api/src/test/data/dspaceFolder/config/spring/api/openurltracker.xml new file mode 100644 index 0000000000..1d3be040a3 --- /dev/null +++ b/dspace-api/src/test/data/dspaceFolder/config/spring/api/openurltracker.xml @@ -0,0 +1,13 @@ + + + + + + + + + + \ No newline at end of file diff --git a/dspace-api/src/test/data/dspaceFolder/config/spring/api/scripts.xml b/dspace-api/src/test/data/dspaceFolder/config/spring/api/scripts.xml index b28d45ec18..c614a3158d 100644 --- a/dspace-api/src/test/data/dspaceFolder/config/spring/api/scripts.xml +++ b/dspace-api/src/test/data/dspaceFolder/config/spring/api/scripts.xml @@ -4,13 +4,42 @@ xsi:schemaLocation="http://www.springframework.org/schema/beans http://www.springframework.org/schema/beans/spring-beans-2.5.xsd"> - - + + + + + - - - + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/dspace-api/src/test/data/dspaceFolder/config/spring/api/solr-services.xml b/dspace-api/src/test/data/dspaceFolder/config/spring/api/solr-services.xml index 5ad031b688..80d45bdd58 100644 --- a/dspace-api/src/test/data/dspaceFolder/config/spring/api/solr-services.xml +++ b/dspace-api/src/test/data/dspaceFolder/config/spring/api/solr-services.xml @@ -19,19 +19,29 @@ - + - + - - + + - + - + - - + + diff --git a/dspace-api/src/test/data/dspaceFolder/config/spring/api/workflow-actions.xml b/dspace-api/src/test/data/dspaceFolder/config/spring/api/workflow-actions.xml index 7381972961..318d1ad3d7 100644 --- a/dspace-api/src/test/data/dspaceFolder/config/spring/api/workflow-actions.xml +++ b/dspace-api/src/test/data/dspaceFolder/config/spring/api/workflow-actions.xml @@ -13,7 +13,7 @@ - + diff --git a/dspace-api/src/test/data/dspaceFolder/config/spring/api/workflow.xml b/dspace-api/src/test/data/dspaceFolder/config/spring/api/workflow.xml index 47f22c5d88..97dd957474 100644 --- a/dspace-api/src/test/data/dspaceFolder/config/spring/api/workflow.xml +++ b/dspace-api/src/test/data/dspaceFolder/config/spring/api/workflow.xml @@ -43,7 +43,7 @@ - + - + - + - + - + @@ -140,7 +140,7 @@ - + @@ -159,7 +159,7 @@ - + - + diff --git a/dspace-api/src/test/data/dspaceFolder/config/submission-forms.xml b/dspace-api/src/test/data/dspaceFolder/config/submission-forms.xml index 6ddfef9b83..14e2affacb 100644 --- a/dspace-api/src/test/data/dspaceFolder/config/submission-forms.xml +++ b/dspace-api/src/test/data/dspaceFolder/config/submission-forms.xml @@ -237,7 +237,7 @@ it, please enter the types and the actual numbers or codes.

    - isVolumeOfJournal + isJournalOfVolume periodical creativework.publisher:somepublishername @@ -282,6 +282,70 @@ it, please enter the types and the actual numbers or codes.
    +
    + + + dc + contributor + author + + name + false + You must enter at least the author. + Enter the names of the authors of this item in the form Lastname, Firstname [i.e. Smith, Josh or Smith, J]. + + + + + person + affiliation + name + + onebox + false + + Enter the affiliation of the author as stated on the publication. + + +
    + +
    + + + dc + contributor + author + true + + onebox + Author field that can be associated with an authority providing suggestion + + + + + + dc + contributor + editor + false + + name + Editor field that can be associated with an authority providing the special name lookup + + + + + + dc + subject + true + + onebox + Subject field that can be associated with an authority providing lookup + + + +
    diff --git a/dspace-api/src/test/data/dspaceFolder/config/submission-forms_it.xml b/dspace-api/src/test/data/dspaceFolder/config/submission-forms_it.xml new file mode 100644 index 0000000000..66ed4a926c --- /dev/null +++ b/dspace-api/src/test/data/dspaceFolder/config/submission-forms_it.xml @@ -0,0 +1,169 @@ + + + + + + + + + + + + + + + + + + + + + + + +
    + + + dc + title + + false + + onebox + Inserisci nome del file + È necessario inserire un titolo principale per questo item + + + + + dc + description + true + + textarea + Inserisci descrizione per questo file + + + +
    + +
    + + + isAuthorOfPublication + person + true + + Aggiungi un autore + + dc + contributor + author + name + + È richiesto almeno un autore + + + + + dc + title + + false + + onebox + Inserisci titolo principale di questo item + È necessario inserire un titolo principale per questo item + + + + + + + + dc + language + iso + false + + dropdown + Selezionare la lingua del contenuto principale dell'item. Se la lingua non compare nell'elenco, selezionare (Altro). Se il contenuto non ha davvero una lingua (ad esempio, se è un set di dati o un'immagine) selezionare (N/A). + + + + +
    +
    + + + + + + + + + + + + + + + + + + + + + N/A + + + + Inglese (USA) + en_US + + + Inglese + en + + + Spagnolo + es + + + Tedesco + de + + + Francese + fr + + + Italiano + it + + + Giapponese + ja + + + Cinese + zh + + + Portogallo + pt + + + Ucraino + uk + + + (Altro) + other + + + + +
    \ No newline at end of file diff --git a/dspace-api/src/test/data/dspaceFolder/config/submission-forms_uk.xml b/dspace-api/src/test/data/dspaceFolder/config/submission-forms_uk.xml new file mode 100644 index 0000000000..49a2ccc1a9 --- /dev/null +++ b/dspace-api/src/test/data/dspaceFolder/config/submission-forms_uk.xml @@ -0,0 +1,166 @@ + + + + + + + + + + + + + + + + + + + + + + + +
    + + + dc + title + + false + + onebox + Ввести основний заголовок файла. + Заговолок файла обов'язковий ! + + + + + dc + description + true + + textarea + Ввести опис для цього файла + + + +
    + +
    + + + isAuthorOfPublication + person + true + + Додати автора + + dc + contributor + author + name + + Потрібно ввести хочаб одного автора! + + + + + dc + title + + false + + onebox + Ввести основний заголовок файла + Заговолок файла обов'язковий ! + + + + + + + dc + language + iso + false + + dropdown + Виберiть мову головного змiсту файлу, як що мови немає у списку, вибрати (Iнша). Як що вмiст вайлу не є текстовим, наприклад є фотографiєю, тодi вибрати (N/A) + + + +
    +
    + + + + + + + + + + + + + + + + + + + + N/A + + + + Американська (USA) + en_US + + + Англiйська + en + + + Iспанська + es + + + Нiмецька + de + + + Французька + fr + + + Iталiйська + it + + + Японська + ja + + + Китайська + zh + + + Португальська + pt + + + Турецька + tr + + + (Iнша) + other + + + + +
    \ No newline at end of file diff --git a/dspace-api/src/test/data/solr/solr.xml b/dspace-api/src/test/data/solr/solr.xml new file mode 100644 index 0000000000..8f3644098a --- /dev/null +++ b/dspace-api/src/test/data/solr/solr.xml @@ -0,0 +1,3 @@ + + + diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/test/AbstractDSpaceIntegrationTest.java b/dspace-api/src/test/java/org/dspace/AbstractDSpaceIntegrationTest.java similarity index 97% rename from dspace-server-webapp/src/test/java/org/dspace/app/rest/test/AbstractDSpaceIntegrationTest.java rename to dspace-api/src/test/java/org/dspace/AbstractDSpaceIntegrationTest.java index e3bb0a0500..1abc4e017d 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/test/AbstractDSpaceIntegrationTest.java +++ b/dspace-api/src/test/java/org/dspace/AbstractDSpaceIntegrationTest.java @@ -5,7 +5,7 @@ * * http://www.dspace.org/license/ */ -package org.dspace.app.rest.test; +package org.dspace; import static org.junit.Assert.fail; @@ -17,7 +17,7 @@ import java.util.TimeZone; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; -import org.dspace.app.rest.builder.AbstractBuilder; +import org.dspace.builder.AbstractBuilder; import org.dspace.servicemanager.DSpaceKernelImpl; import org.dspace.servicemanager.DSpaceKernelInit; import org.junit.AfterClass; @@ -90,8 +90,9 @@ public class AbstractDSpaceIntegrationTest { } /** - * This method will be run after all tests finish as per @AfterClass. It + * This method will be run after all tests finish as per @AfterClass. It * will clean resources initialized by the @BeforeClass methods. + * @throws java.sql.SQLException */ @AfterClass public static void destroyTestEnvironment() throws SQLException { diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/test/AbstractIntegrationTestWithDatabase.java b/dspace-api/src/test/java/org/dspace/AbstractIntegrationTestWithDatabase.java similarity index 90% rename from dspace-server-webapp/src/test/java/org/dspace/app/rest/test/AbstractIntegrationTestWithDatabase.java rename to dspace-api/src/test/java/org/dspace/AbstractIntegrationTestWithDatabase.java index 2e6fbceafb..8ad492c499 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/test/AbstractIntegrationTestWithDatabase.java +++ b/dspace-api/src/test/java/org/dspace/AbstractIntegrationTestWithDatabase.java @@ -5,7 +5,7 @@ * * http://www.dspace.org/license/ */ -package org.dspace.app.rest.test; +package org.dspace; import static org.junit.Assert.fail; @@ -14,20 +14,22 @@ import java.sql.SQLException; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.dspace.app.launcher.ScriptLauncher; -import org.dspace.app.rest.builder.AbstractBuilder; import org.dspace.app.scripts.handler.impl.TestDSpaceRunnableHandler; +import org.dspace.authority.MockAuthoritySolrServiceImpl; import org.dspace.authorize.AuthorizeException; +import org.dspace.builder.AbstractBuilder; import org.dspace.content.Community; import org.dspace.core.Context; import org.dspace.core.I18nUtil; import org.dspace.discovery.MockSolrSearchCore; -import org.dspace.discovery.SolrSearchCore; import org.dspace.eperson.EPerson; import org.dspace.eperson.Group; import org.dspace.eperson.factory.EPersonServiceFactory; import org.dspace.eperson.service.EPersonService; import org.dspace.eperson.service.GroupService; +import org.dspace.kernel.ServiceManager; import org.dspace.services.factory.DSpaceServicesFactory; +import org.dspace.statistics.MockSolrLoggerServiceImpl; import org.dspace.storage.rdbms.DatabaseUtils; import org.jdom.Document; import org.junit.After; @@ -81,11 +83,6 @@ public class AbstractIntegrationTestWithDatabase extends AbstractDSpaceIntegrati */ @BeforeClass public static void initDatabase() { - // Clear our old flyway object. Because this DB is in-memory, its - // data is lost when the last connection is closed. So, we need - // to (re)start Flyway from scratch for each Unit Test class. - DatabaseUtils.clearFlywayDBCache(); - try { // Update/Initialize the database to latest version (via Flyway) DatabaseUtils.updateDatabase(); @@ -178,15 +175,25 @@ public class AbstractIntegrationTestWithDatabase extends AbstractDSpaceIntegrati parentCommunity = null; cleanupContext(); + ServiceManager serviceManager = DSpaceServicesFactory.getInstance().getServiceManager(); // Clear the search core. - MockSolrSearchCore searchService = DSpaceServicesFactory.getInstance() - .getServiceManager() - .getServiceByName(SolrSearchCore.class.getName(), MockSolrSearchCore.class); + MockSolrSearchCore searchService = serviceManager + .getServiceByName(null, MockSolrSearchCore.class); searchService.reset(); + MockSolrLoggerServiceImpl statisticsService = serviceManager + .getServiceByName(null, MockSolrLoggerServiceImpl.class); + statisticsService.reset(); + + MockAuthoritySolrServiceImpl authorityService = serviceManager + .getServiceByName(null, MockAuthoritySolrServiceImpl.class); + authorityService.reset(); + // Reload our ConfigurationService (to reset configs to defaults again) DSpaceServicesFactory.getInstance().getConfigurationService().reloadConfig(); + AbstractBuilder.cleanupBuilderCache(); + // NOTE: we explicitly do NOT destroy our default eperson & admin as they // are cached and reused for all tests. This speeds up all tests. } catch (Exception e) { @@ -197,6 +204,7 @@ public class AbstractIntegrationTestWithDatabase extends AbstractDSpaceIntegrati /** * Utility method to cleanup a created Context object (to save memory). * This can also be used by individual tests to cleanup context objects they create. + * @throws java.sql.SQLException passed through. */ protected void cleanupContext() throws SQLException { // If context still valid, flush all database changes and close it @@ -241,4 +249,4 @@ public class AbstractIntegrationTestWithDatabase extends AbstractDSpaceIntegrati } } } -} \ No newline at end of file +} diff --git a/dspace-api/src/test/java/org/dspace/AbstractUnitTest.java b/dspace-api/src/test/java/org/dspace/AbstractUnitTest.java index cd3669b143..d91240d218 100644 --- a/dspace-api/src/test/java/org/dspace/AbstractUnitTest.java +++ b/dspace-api/src/test/java/org/dspace/AbstractUnitTest.java @@ -75,11 +75,6 @@ public class AbstractUnitTest extends AbstractDSpaceTest { */ @BeforeClass public static void initDatabase() { - // Clear our old flyway object. Because this DB is in-memory, its - // data is lost when the last connection is closed. So, we need - // to (re)start Flyway from scratch for each Unit Test class. - DatabaseUtils.clearFlywayDBCache(); - try { // Update/Initialize the database to latest version (via Flyway) DatabaseUtils.updateDatabase(); diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/test/ExitException.java b/dspace-api/src/test/java/org/dspace/ExitException.java similarity index 93% rename from dspace-server-webapp/src/test/java/org/dspace/app/rest/test/ExitException.java rename to dspace-api/src/test/java/org/dspace/ExitException.java index a377d42238..3e7ce2fdc2 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/test/ExitException.java +++ b/dspace-api/src/test/java/org/dspace/ExitException.java @@ -5,7 +5,7 @@ * * http://www.dspace.org/license/ */ -package org.dspace.app.rest.test; +package org.dspace; public class ExitException extends SecurityException { private final int status; diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/test/NoExitSecurityManager.java b/dspace-api/src/test/java/org/dspace/NoExitSecurityManager.java similarity index 95% rename from dspace-server-webapp/src/test/java/org/dspace/app/rest/test/NoExitSecurityManager.java rename to dspace-api/src/test/java/org/dspace/NoExitSecurityManager.java index 79d75dcaf1..7d98f688ef 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/test/NoExitSecurityManager.java +++ b/dspace-api/src/test/java/org/dspace/NoExitSecurityManager.java @@ -5,7 +5,7 @@ * * http://www.dspace.org/license/ */ -package org.dspace.app.rest.test; +package org.dspace; import java.security.Permission; diff --git a/dspace-api/src/test/java/org/dspace/app/bulkedit/DSpaceCSVTest.java b/dspace-api/src/test/java/org/dspace/app/bulkedit/DSpaceCSVTest.java index 1ddba1a011..9cb664fb78 100644 --- a/dspace-api/src/test/java/org/dspace/app/bulkedit/DSpaceCSVTest.java +++ b/dspace-api/src/test/java/org/dspace/app/bulkedit/DSpaceCSVTest.java @@ -18,6 +18,7 @@ import java.io.OutputStreamWriter; import java.util.ArrayList; import java.util.List; +import org.apache.commons.io.FileUtils; import org.apache.logging.log4j.Logger; import org.dspace.AbstractUnitTest; import org.junit.Test; @@ -67,7 +68,7 @@ public class DSpaceCSVTest extends AbstractUnitTest { out = null; // Test the CSV parsing was OK - DSpaceCSV dcsv = new DSpaceCSV(new File(filename), context); + DSpaceCSV dcsv = new DSpaceCSV(FileUtils.openInputStream(new File(filename)), context); String[] lines = dcsv.getCSVLinesAsStringArray(); assertThat("testDSpaceCSV Good CSV", lines.length, equalTo(8)); @@ -96,7 +97,7 @@ public class DSpaceCSVTest extends AbstractUnitTest { // Test the CSV parsing was OK try { - dcsv = new DSpaceCSV(new File(filename), context); + dcsv = new DSpaceCSV(FileUtils.openInputStream(new File(filename)), context); lines = dcsv.getCSVLinesAsStringArray(); fail("An exception should have been thrown due to bad CSV"); @@ -124,7 +125,7 @@ public class DSpaceCSVTest extends AbstractUnitTest { // Test the CSV parsing was OK try { - dcsv = new DSpaceCSV(new File(filename), context); + dcsv = new DSpaceCSV(FileUtils.openInputStream(new File(filename)), context); lines = dcsv.getCSVLinesAsStringArray(); fail("An exception should have been thrown due to bad CSV"); diff --git a/dspace-api/src/test/java/org/dspace/app/bulkedit/MetadataExportIT.java b/dspace-api/src/test/java/org/dspace/app/bulkedit/MetadataExportIT.java new file mode 100644 index 0000000000..d7379351e5 --- /dev/null +++ b/dspace-api/src/test/java/org/dspace/app/bulkedit/MetadataExportIT.java @@ -0,0 +1,103 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.bulkedit; + +import static junit.framework.TestCase.assertTrue; + +import java.io.File; +import java.io.FileInputStream; +import java.nio.charset.StandardCharsets; + +import org.apache.commons.cli.ParseException; +import org.apache.commons.io.IOUtils; +import org.dspace.AbstractIntegrationTestWithDatabase; +import org.dspace.app.launcher.ScriptLauncher; +import org.dspace.app.scripts.handler.impl.TestDSpaceRunnableHandler; +import org.dspace.builder.CollectionBuilder; +import org.dspace.builder.CommunityBuilder; +import org.dspace.builder.ItemBuilder; +import org.dspace.content.Collection; +import org.dspace.content.Community; +import org.dspace.content.Item; +import org.dspace.scripts.DSpaceRunnable; +import org.dspace.scripts.configuration.ScriptConfiguration; +import org.dspace.scripts.factory.ScriptServiceFactory; +import org.dspace.scripts.service.ScriptService; +import org.dspace.services.ConfigurationService; +import org.dspace.services.factory.DSpaceServicesFactory; +import org.junit.Rule; +import org.junit.Test; +import org.junit.rules.ExpectedException; + +public class MetadataExportIT + extends AbstractIntegrationTestWithDatabase { + + @Rule + public ExpectedException thrown = ExpectedException.none(); + + private final ConfigurationService configurationService + = DSpaceServicesFactory.getInstance().getConfigurationService(); + + @Test + public void metadataExportToCsvTest() throws Exception { + context.turnOffAuthorisationSystem(); + Community community = CommunityBuilder.createCommunity(context) + .build(); + Collection collection = CollectionBuilder.createCollection(context, community) + .build(); + Item item = ItemBuilder.createItem(context, collection) + .withAuthor("Donald, Smith") + .build(); + context.restoreAuthSystemState(); + String fileLocation = configurationService.getProperty("dspace.dir") + + testProps.get("test.exportcsv").toString(); + + String[] args = new String[] {"metadata-export", + "-i", String.valueOf(item.getHandle()), + "-f", fileLocation}; + TestDSpaceRunnableHandler testDSpaceRunnableHandler + = new TestDSpaceRunnableHandler(); + + ScriptLauncher.handleScript(args, ScriptLauncher.getConfig(kernelImpl), + testDSpaceRunnableHandler, kernelImpl); + File file = new File(fileLocation); + String fileContent = IOUtils.toString(new FileInputStream(file), StandardCharsets.UTF_8); + assertTrue(fileContent.contains("Donald, Smith")); + assertTrue(fileContent.contains(String.valueOf(item.getID()))); + } + + @Test(expected = ParseException.class) + public void metadataExportWithoutFileParameter() + throws IllegalAccessException, InstantiationException, ParseException { + context.turnOffAuthorisationSystem(); + Community community = CommunityBuilder.createCommunity(context) + .build(); + Collection collection = CollectionBuilder.createCollection(context, community) + .build(); + Item item = ItemBuilder.createItem(context, collection) + .withAuthor("Donald, Smith") + .build(); + context.restoreAuthSystemState(); + + String[] args = new String[] {"metadata-export", + "-i", String.valueOf(item.getHandle())}; + TestDSpaceRunnableHandler testDSpaceRunnableHandler = new TestDSpaceRunnableHandler(); + + ScriptService scriptService = ScriptServiceFactory.getInstance().getScriptService(); + ScriptConfiguration scriptConfiguration = scriptService.getScriptConfiguration(args[0]); + + DSpaceRunnable script = null; + if (scriptConfiguration != null) { + script = scriptService.createDSpaceRunnableForScriptConfiguration(scriptConfiguration); + } + if (script != null) { + script.initialize(args, testDSpaceRunnableHandler, null); + script.run(); + } + } +} diff --git a/dspace-api/src/test/java/org/dspace/app/bulkedit/MetadataImportTest.java b/dspace-api/src/test/java/org/dspace/app/bulkedit/MetadataImportTest.java new file mode 100644 index 0000000000..4a0043586b --- /dev/null +++ b/dspace-api/src/test/java/org/dspace/app/bulkedit/MetadataImportTest.java @@ -0,0 +1,92 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.bulkedit; + +import static junit.framework.TestCase.assertEquals; +import static junit.framework.TestCase.assertTrue; + +import java.io.File; + +import org.apache.commons.cli.ParseException; +import org.apache.commons.lang3.StringUtils; +import org.dspace.AbstractIntegrationTest; +import org.dspace.app.launcher.ScriptLauncher; +import org.dspace.app.scripts.handler.impl.TestDSpaceRunnableHandler; +import org.dspace.content.Collection; +import org.dspace.content.Community; +import org.dspace.content.Item; +import org.dspace.content.factory.ContentServiceFactory; +import org.dspace.content.service.CollectionService; +import org.dspace.content.service.CommunityService; +import org.dspace.content.service.ItemService; +import org.dspace.scripts.DSpaceRunnable; +import org.dspace.scripts.configuration.ScriptConfiguration; +import org.dspace.scripts.factory.ScriptServiceFactory; +import org.dspace.scripts.service.ScriptService; +import org.junit.Rule; +import org.junit.Test; +import org.junit.rules.ExpectedException; + +public class MetadataImportTest extends AbstractIntegrationTest { + + private final ItemService itemService + = ContentServiceFactory.getInstance().getItemService(); + private final CollectionService collectionService + = ContentServiceFactory.getInstance().getCollectionService(); + private final CommunityService communityService + = ContentServiceFactory.getInstance().getCommunityService(); + + @Rule + public ExpectedException thrown = ExpectedException.none(); + + @Test + public void metadataImportTest() throws Exception { + context.turnOffAuthorisationSystem(); + Community community = communityService.create(null, context); + Collection collection = collectionService.create(context, community); + context.restoreAuthSystemState(); + + String fileLocation = new File(testProps.get("test.importcsv").toString()).getAbsolutePath(); + String[] args = new String[] {"metadata-import", "-f", fileLocation, "-e", eperson.getEmail(), "-s"}; + TestDSpaceRunnableHandler testDSpaceRunnableHandler = new TestDSpaceRunnableHandler(); + + ScriptLauncher.handleScript(args, ScriptLauncher.getConfig(kernelImpl), testDSpaceRunnableHandler, kernelImpl); + Item importedItem = itemService.findAll(context).next(); + assertTrue( + StringUtils.equals( + itemService.getMetadata(importedItem, "dc", "contributor", "author", Item.ANY).get(0).getValue(), + "Donald, SmithImported")); + assertEquals(importedItem.getSubmitter(), eperson); + + context.turnOffAuthorisationSystem(); + itemService.delete(context, itemService.find(context, importedItem.getID())); + collectionService.delete(context, collectionService.find(context, collection.getID())); + communityService.delete(context, communityService.find(context, community.getID())); + context.restoreAuthSystemState(); + } + + @Test(expected = ParseException.class) + public void metadataImportWithoutEPersonParameterTest() + throws IllegalAccessException, InstantiationException, ParseException { + String fileLocation = new File(testProps.get("test.importcsv").toString()).getAbsolutePath(); + String[] args = new String[] {"metadata-import", "-f", fileLocation, "-s"}; + TestDSpaceRunnableHandler testDSpaceRunnableHandler = new TestDSpaceRunnableHandler(); + + ScriptService scriptService = ScriptServiceFactory.getInstance().getScriptService(); + ScriptConfiguration scriptConfiguration = scriptService.getScriptConfiguration(args[0]); + + DSpaceRunnable script = null; + if (scriptConfiguration != null) { + script = scriptService.createDSpaceRunnableForScriptConfiguration(scriptConfiguration); + } + if (script != null) { + script.initialize(args, testDSpaceRunnableHandler, null); + script.run(); + } + } +} diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/csv/CSVMetadataImportReferenceIT.java b/dspace-api/src/test/java/org/dspace/app/csv/CSVMetadataImportReferenceIT.java similarity index 50% rename from dspace-server-webapp/src/test/java/org/dspace/app/rest/csv/CSVMetadataImportReferenceIT.java rename to dspace-api/src/test/java/org/dspace/app/csv/CSVMetadataImportReferenceIT.java index baad6f0904..2dfe3a781f 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/csv/CSVMetadataImportReferenceIT.java +++ b/dspace-api/src/test/java/org/dspace/app/csv/CSVMetadataImportReferenceIT.java @@ -5,7 +5,7 @@ * * http://www.dspace.org/license/ */ -package org.dspace.app.rest.csv; +package org.dspace.app.csv; import static junit.framework.TestCase.assertEquals; @@ -19,11 +19,18 @@ import java.util.Iterator; import java.util.List; import java.util.UUID; -import org.dspace.app.rest.builder.CollectionBuilder; -import org.dspace.app.rest.builder.CommunityBuilder; -import org.dspace.app.rest.builder.ItemBuilder; -import org.dspace.app.rest.test.AbstractEntityIntegrationTest; +import org.dspace.AbstractIntegrationTestWithDatabase; +import org.dspace.app.bulkedit.MetadataImportException; +import org.dspace.app.bulkedit.MetadataImportInvalidHeadingException; +import org.dspace.app.scripts.handler.impl.TestDSpaceRunnableHandler; +import org.dspace.builder.CollectionBuilder; +import org.dspace.builder.CommunityBuilder; +import org.dspace.builder.EntityTypeBuilder; +import org.dspace.builder.ItemBuilder; +import org.dspace.builder.RelationshipTypeBuilder; import org.dspace.content.Collection; +import org.dspace.content.Community; +import org.dspace.content.EntityType; import org.dspace.content.Item; import org.dspace.content.MetadataField; import org.dspace.content.MetadataValue; @@ -33,35 +40,57 @@ import org.dspace.content.service.ItemService; import org.dspace.content.service.MetadataFieldService; import org.dspace.content.service.MetadataValueService; import org.dspace.content.service.RelationshipService; +import org.dspace.scripts.DSpaceRunnable; +import org.dspace.scripts.configuration.ScriptConfiguration; +import org.dspace.scripts.factory.ScriptServiceFactory; +import org.dspace.scripts.service.ScriptService; import org.junit.Before; import org.junit.Test; -import org.springframework.beans.factory.annotation.Autowired; /** * Created by: Andrew Wood * Date: 26 Jul 2019 */ -public class CSVMetadataImportReferenceIT extends AbstractEntityIntegrationTest { +public class CSVMetadataImportReferenceIT extends AbstractIntegrationTestWithDatabase { //Common collection to utilize for test private Collection col1; - @Autowired - private RelationshipService relationshipService; + private RelationshipService relationshipService = ContentServiceFactory.getInstance().getRelationshipService(); + private ItemService itemService = ContentServiceFactory.getInstance().getItemService(); - @Autowired - private ItemService itemService; + + Community parentCommunity; /** * Setup testing enviorment */ @Before - public void setup() { + public void setup() throws SQLException { context.turnOffAuthorisationSystem(); parentCommunity = CommunityBuilder.createCommunity(context) - .withName("Parent Community") - .build(); + .withName("Parent Community") + .build(); + col1 = CollectionBuilder.createCollection(context, parentCommunity).withName("Collection 1").build(); + + + context.turnOffAuthorisationSystem(); + + EntityType publication = EntityTypeBuilder.createEntityTypeBuilder(context, "Publication").build(); + EntityType person = EntityTypeBuilder.createEntityTypeBuilder(context, "Person").build(); + EntityType project = EntityTypeBuilder.createEntityTypeBuilder(context, "Project").build(); + EntityType orgUnit = EntityTypeBuilder.createEntityTypeBuilder(context, "OrgUnit").build(); + + RelationshipTypeBuilder + .createRelationshipTypeBuilder(context, publication, person, "isAuthorOfPublication", + "isPublicationOfAuthor", 0, null, 0, + null).withCopyToLeft(false).withCopyToRight(true).build(); + + RelationshipTypeBuilder.createRelationshipTypeBuilder(context, publication, project, "isProjectOfPublication", + "isPublicationOfProject", 0, null, 0, + null).withCopyToRight(true).build(); + context.restoreAuthSystemState(); } @@ -80,15 +109,15 @@ public class CSVMetadataImportReferenceIT extends AbstractEntityIntegrationTest int foundCount = 0; for (Relationship rel : rels) { if (rel.getRightItem().getID().equals(rightItem.getID()) - && rel.getLeftItem().getID().equals(leftItem.getID())) { + && rel.getLeftItem().getID().equals(leftItem.getID())) { foundCount++; relationship = rel; } } if (placeDirection.equalsIgnoreCase("left")) { - assertEquals(placeCount, relationship.getLeftPlace()); + assertEquals(relationship.getLeftPlace(), placeCount); } else { - assertEquals(placeCount, relationship.getRightPlace()); + assertEquals(relationship.getRightPlace(), placeCount); } assertEquals(expectedCount, foundCount); } @@ -100,8 +129,8 @@ public class CSVMetadataImportReferenceIT extends AbstractEntityIntegrationTest @Test public void testSingleMdRef() throws Exception { String[] csv = {"id,relationship.type,relation.isAuthorOfPublication,collection,dc.identifier.other", - "+,Person,," + col1.getHandle() + ",0", - "+,Publication,dc.identifier.other:0," + col1.getHandle() + ",1"}; + "+,Person,," + col1.getHandle() + ",0", + "+,Publication,dc.identifier.other:0," + col1.getHandle() + ",1"}; Item[] items = runImport(csv); assertRelationship(items[1], items[0], 1, "left", 0); } @@ -117,7 +146,7 @@ public class CSVMetadataImportReferenceIT extends AbstractEntityIntegrationTest performImportScript(csvLines, false); Item[] items = new Item[csvLines.length - 1]; for (int i = 0; i < items.length; i++) { - items[i] = itemService.findByIdOrLegacyId(context, getUUIDByIdentifierOther("" + i).toString()); + items[i] = itemService.findByIdOrLegacyId(context, getUUIDByIdentifierOther("" + i).toString()); } return items; } @@ -129,9 +158,9 @@ public class CSVMetadataImportReferenceIT extends AbstractEntityIntegrationTest @Test public void testSingleRowNameRef() throws Exception { String[] csv = {"id,dc.title,relationship.type,relation.isAuthorOfPublication,collection,rowName," + - "dc.identifier.other", - "+,Test Item 1,Person,," + col1.getHandle() + ",idVal,0", - "+,Test Item 2,Publication,rowName:idVal," + col1.getHandle() + ",anything,1"}; + "dc.identifier.other", + "+,Test Item 1,Person,," + col1.getHandle() + ",idVal,0", + "+,Test Item 2,Publication,rowName:idVal," + col1.getHandle() + ",anything,1"}; Item[] items = runImport(csv); assertRelationship(items[1], items[0], 1, "left", 0); } @@ -143,9 +172,9 @@ public class CSVMetadataImportReferenceIT extends AbstractEntityIntegrationTest @Test public void testMultiMdRef() throws Exception { String[] csv = {"id,relationship.type,relation.isAuthorOfPublication,collection,dc.identifier.other", - "+,Person,," + col1.getHandle() + ",0", - "+,Person,," + col1.getHandle() + ",1", - "+,Publication,dc.identifier.other:0||dc.identifier.other:1," + col1.getHandle() + ",2"}; + "+,Person,," + col1.getHandle() + ",0", + "+,Person,," + col1.getHandle() + ",1", + "+,Publication,dc.identifier.other:0||dc.identifier.other:1," + col1.getHandle() + ",2"}; Item[] items = runImport(csv); assertRelationship(items[2], items[0], 1, "left", 0); assertRelationship(items[2], items[1], 1, "left", 1); @@ -158,9 +187,9 @@ public class CSVMetadataImportReferenceIT extends AbstractEntityIntegrationTest @Test public void testMultiRowNameRef() throws Exception { String[] csv = {"id,relationship.type,relation.isAuthorOfPublication,collection,dc.identifier.other,rowName", - "+,Person,," + col1.getHandle() + ",0,val1", - "+,Person,," + col1.getHandle() + ",1,val2", - "+,Publication,rowName:val1||rowName:val2," + col1.getHandle() + ",2,val3"}; + "+,Person,," + col1.getHandle() + ",0,val1", + "+,Person,," + col1.getHandle() + ",1,val2", + "+,Publication,rowName:val1||rowName:val2," + col1.getHandle() + ",2,val3"}; Item[] items = runImport(csv); assertRelationship(items[2], items[0], 1, "left", 0); assertRelationship(items[2], items[1], 1, "left", 1); @@ -174,11 +203,16 @@ public class CSVMetadataImportReferenceIT extends AbstractEntityIntegrationTest public void testSingleUUIDReference() throws Exception { context.turnOffAuthorisationSystem(); Item person = ItemBuilder.createItem(context, col1) - .withRelationshipType("Person") - .build(); + .withTitle("Author1") + .withIssueDate("2017-10-17") + .withAuthor("Smith, Donald") + .withPersonIdentifierLastName("Smith") + .withPersonIdentifierFirstName("Donald") + .withRelationshipType("Person") + .build(); context.restoreAuthSystemState(); String[] csv = {"id,relationship.type,relation.isAuthorOfPublication,collection,rowName,dc.identifier.other", - "+,Publication," + person.getID().toString() + "," + col1.getHandle() + ",anything,0"}; + "+,Publication," + person.getID().toString() + "," + col1.getHandle() + ",anything,0"}; Item[] items = runImport(csv); assertRelationship(items[0], person, 1, "left", 0); } @@ -191,15 +225,24 @@ public class CSVMetadataImportReferenceIT extends AbstractEntityIntegrationTest public void testMultiUUIDReference() throws Exception { context.turnOffAuthorisationSystem(); Item person = ItemBuilder.createItem(context, col1) - .withRelationshipType("Person") - .build(); + .withTitle("Author1") + .withIssueDate("2017-10-17") + .withAuthor("Smith, Donald") + .withPersonIdentifierLastName("Smith") + .withPersonIdentifierFirstName("Donald") + .withRelationshipType("Person") + .build(); Item person2 = ItemBuilder.createItem(context, col1) - .withRelationshipType("Person") - .build(); - context.restoreAuthSystemState(); + .withTitle("Author2") + .withIssueDate("2017-10-17") + .withAuthor("Smith, John") + .withPersonIdentifierLastName("Smith") + .withPersonIdentifierFirstName("John") + .withRelationshipType("Person") + .build(); String[] csv = {"id,relationship.type,relation.isAuthorOfPublication,collection,rowName,dc.identifier.other", - "+,Publication," + person.getID().toString() + "||" + person2.getID().toString() + "," + - col1.getHandle() + ",anything,0"}; + "+,Publication," + person.getID().toString() + "||" + person2.getID().toString() + "," + + col1.getHandle() + ",anything,0"}; Item[] items = runImport(csv); assertRelationship(items[0], person, 1, "left", 0); assertRelationship(items[0], person2, 1, "left", 1); @@ -213,13 +256,17 @@ public class CSVMetadataImportReferenceIT extends AbstractEntityIntegrationTest public void testMultiRefArchivedCsv() throws Exception { context.turnOffAuthorisationSystem(); Item person = ItemBuilder.createItem(context, col1) - .withTitle("Person") - .withRelationshipType("Person") - .build(); + .withTitle("Person") + .withIssueDate("2017-10-17") + .withAuthor("Smith, Donald") + .withPersonIdentifierLastName("Smith") + .withPersonIdentifierFirstName("Donald") + .withRelationshipType("Person") + .build(); String[] csv = {"id,dc.title,relationship.type,relation.isAuthorOfPublication,collection,rowName," + - "dc.identifier.other", - "+,Person2,Person,," + col1.getHandle() + ",idVal,0", - "+,Pub1,Publication,dc.title:Person||dc.title:Person2," + col1.getHandle() + ",anything,1"}; + "dc.identifier.other", + "+,Person2,Person,," + col1.getHandle() + ",idVal,0", + "+,Pub1,Publication,dc.title:Person||dc.title:Person2," + col1.getHandle() + ",anything,1"}; context.restoreAuthSystemState(); Item[] items = runImport(csv); assertRelationship(items[1], person, 1, "left", 0); @@ -235,19 +282,28 @@ public class CSVMetadataImportReferenceIT extends AbstractEntityIntegrationTest public void testMultiMixedRefArchivedCsv() throws Exception { context.turnOffAuthorisationSystem(); Item person = ItemBuilder.createItem(context, col1) - .withTitle("Person") - .withRelationshipType("Person") - .build(); + .withTitle("Person") + .withIssueDate("2017-10-17") + .withAuthor("Smith, Donald") + .withPersonIdentifierLastName("Smith") + .withPersonIdentifierFirstName("Donald") + .withRelationshipType("Person") + .build(); Item person2 = ItemBuilder.createItem(context, col1) - .withTitle("Person2") - .withRelationshipType("Person") - .build(); + .withTitle("Person2") + .withIssueDate("2017-10-17") + .withAuthor("Smith, John") + .withPersonIdentifierLastName("Smith") + .withPersonIdentifierFirstName("John") + .withRelationshipType("Person") + .build(); + context.restoreAuthSystemState(); String[] csv = {"id,dc.title,relationship.type,relation.isAuthorOfPublication,collection,rowName," + - "dc.identifier.other", - "+,Person3,Person,," + col1.getHandle() + ",idVal,0", - "+,Pub1,Publication," + person.getID() + "||dc.title:Person2||rowName:idVal," + - col1.getHandle() + ",anything,1"}; + "dc.identifier.other", + "+,Person3,Person,," + col1.getHandle() + ",idVal,0", + "+,Pub1,Publication," + person.getID() + "||dc.title:Person2||rowName:idVal," + + col1.getHandle() + ",anything,1"}; Item[] items = runImport(csv); assertRelationship(items[1], person, 1, "left", 0); assertRelationship(items[1], person2, 1, "left", 1); @@ -261,9 +317,9 @@ public class CSVMetadataImportReferenceIT extends AbstractEntityIntegrationTest @Test public void testRefWithSpecialChar() throws Exception { String[] csv = {"id,dc.title,relationship.type,relation.isAuthorOfPublication,collection,rowName," + - "dc.identifier.other", - "+,Person:,Person,," + col1.getHandle() + ",idVal,0", - "+,Pub1,Publication,dc.title:Person:," + col1.getHandle() + ",anything,1"}; + "dc.identifier.other", + "+,Person:,Person,," + col1.getHandle() + ",idVal,0", + "+,Pub1,Publication,dc.title:Person:," + col1.getHandle() + ",anything,1"}; Item[] items = runImport(csv); assertRelationship(items[1], items[0], 1, "left", 0); } @@ -271,138 +327,158 @@ public class CSVMetadataImportReferenceIT extends AbstractEntityIntegrationTest /** * Test failure when referring to item by non unique metadata in the csv file. */ - @Test + @Test(expected = MetadataImportException.class) public void testNonUniqueMDRefInCsv() throws Exception { String[] csv = {"id,relationship.type,relation.isAuthorOfPublication,collection,dc.identifier.other", - "+,Person,," + col1.getHandle() + ",1", - "+,Person,," + col1.getHandle() + ",1", - "+,Publication,dc.identifier.other:1," + col1.getHandle() + ",2"}; - assertEquals(1, performImportScript(csv, true)); + "+,Person,," + col1.getHandle() + ",1", + "+,Person,," + col1.getHandle() + ",1", + "+,Publication,dc.identifier.other:1," + col1.getHandle() + ",2"}; + performImportScript(csv, true); } /** * Test failure when referring to item by non unique metadata in the csv file. */ - @Test + @Test(expected = MetadataImportException.class) public void testNonUniqueRowName() throws Exception { String[] csv = {"id,relationship.type,relation.isAuthorOfPublication,collection,dc.identifier.other,rowName", - "+,Person,," + col1.getHandle() + ",1,value", - "+,Person,," + col1.getHandle() + ",1,value", - "+,Publication,rowName:value," + col1.getHandle() + ",2"}; - assertEquals(1, performImportScript(csv, true)); + "+,Person,," + col1.getHandle() + ",1,value", + "+,Person,," + col1.getHandle() + ",1,value", + "+,Publication,rowName:value," + col1.getHandle() + ",2"}; + performImportScript(csv, true); } /** * Test failure when referring to item by non unique metadata in the database. */ - @Test + @Test(expected = MetadataImportException.class) public void testNonUniqueMDRefInDb() throws Exception { context.turnOffAuthorisationSystem(); - ItemBuilder.createItem(context, col1) - .withRelationshipType("Person") - .withIdentifierOther("1") - .build(); - ItemBuilder.createItem(context, col1) - .withRelationshipType("Person") - .withIdentifierOther("1") - .build(); + Item person = ItemBuilder.createItem(context, col1) + .withTitle("Person") + .withIssueDate("2017-10-17") + .withAuthor("Smith, Donald") + .withPersonIdentifierLastName("Smith") + .withPersonIdentifierFirstName("Donald") + .withRelationshipType("Person") + .withIdentifierOther("1") + .build(); + Item person2 = ItemBuilder.createItem(context, col1) + .withTitle("Person2") + .withIssueDate("2017-10-17") + .withAuthor("Smith, John") + .withPersonIdentifierLastName("Smith") + .withPersonIdentifierFirstName("John") + .withRelationshipType("Person") + .withIdentifierOther("1") + .build(); + context.restoreAuthSystemState(); String[] csv = {"id,relationship.type,relation.isAuthorOfPublication,collection,dc.identifier.other", - "+,Publication,dc.identifier.other:1," + col1.getHandle() + ",2"}; - assertEquals(1, performImportScript(csv, true)); + "+,Publication,dc.identifier.other:1," + col1.getHandle() + ",2"}; + performImportScript(csv, true); } /** * Test failure when referring to item by non unique metadata in the csv and database. */ - @Test + @Test(expected = MetadataImportException.class) public void testNonUniqueMDRefInBoth() throws Exception { context.turnOffAuthorisationSystem(); - ItemBuilder.createItem(context, col1) - .withRelationshipType("Person") - .withIdentifierOther("1") - .build(); + Item person = ItemBuilder.createItem(context, col1) + .withTitle("Person") + .withIssueDate("2017-10-17") + .withAuthor("Smith, Donald") + .withPersonIdentifierLastName("Smith") + .withPersonIdentifierFirstName("Donald") + .withRelationshipType("Person") + .withIdentifierOther("1") + .build(); context.restoreAuthSystemState(); String[] csv = {"id,relationship.type,relation.isAuthorOfPublication,collection,dc.identifier.other", - "+,Person,," + col1.getHandle() + ",1", - "+,Publication,dc.identifier.other:1," + col1.getHandle() + ",2"}; - assertEquals(1, performImportScript(csv, true)); + "+,Person,," + col1.getHandle() + ",1", + "+,Publication,dc.identifier.other:1," + col1.getHandle() + ",2"}; + performImportScript(csv, true); } /** * Test failure when refering to item by metadata that does not exist in the relation column */ - @Test + @Test(expected = Exception.class) public void testNonExistMdRef() throws Exception { String[] csv = {"id,relationship.type,relation.isAuthorOfPublication,collection,dc.identifier.other", - "+,Person,," + col1.getHandle() + ",1", - "+,Publication,dc.identifier.other:8675309," + col1.getHandle() + ",2"}; - assertEquals(1, performImportScript(csv, false)); + "+,Person,," + col1.getHandle() + ",1", + "+,Publication,dc.identifier.other:8675309," + col1.getHandle() + ",2"}; + performImportScript(csv, false); } /** * Test failure when refering to an item in the CSV that hasn't been created yet due to it's order in the CSV */ - @Test + @Test(expected = Exception.class) public void testCSVImportWrongOrder() throws Exception { String[] csv = {"id,relationship.type,relation.isAuthorOfPublication,collection,dc.identifier.other", - "+,Publication,dc.identifier.other:8675309," + col1.getHandle() + ",2", - "+,Person,," + col1.getHandle() + ",8675309",}; - assertEquals(1, performImportScript(csv, false)); + "+,Publication,dc.identifier.other:8675309," + col1.getHandle() + ",2", + "+,Person,," + col1.getHandle() + ",8675309",}; + performImportScript(csv, false); } /** * Test failure when refering to an item in the CSV that hasn't been created yet due to it's order in the CSV */ - @Test + @Test(expected = Exception.class) public void testCSVImportWrongOrderRowName() throws Exception { String[] csv = {"id,relationship.type,relation.isAuthorOfPublication,collection,dc.identifier.other,rowName", - "+,Publication,rowName:row2," + col1.getHandle() + ",2,row1", - "+,Person,," + col1.getHandle() + ",8675309,row2",}; - assertEquals(1, performImportScript(csv, false)); + "+,Publication,rowName:row2," + col1.getHandle() + ",2,row1", + "+,Person,," + col1.getHandle() + ",8675309,row2",}; + performImportScript(csv, false); } /** * Test relationship validation with invalid relationship definition */ - @Test + @Test(expected = MetadataImportException.class) public void testCSVImportInvalidRelationship() throws Exception { String[] csv = {"id,relationship.type,relation.isAuthorOfPublication,collection,rowName", - "+,Publication,," + col1.getHandle() + ",row1", - "+,Unit,rowName:row1," + col1.getHandle() + ",row2",}; - assertEquals(1, performImportScript(csv, true)); + "+,Publication,," + col1.getHandle() + ",row1", + "+,Unit,rowName:row1," + col1.getHandle() + ",row2",}; + performImportScript(csv, true); } /** * Test relationship validation with invalid relationship definition and with an archived origin referer */ - @Test + @Test(expected = MetadataImportInvalidHeadingException.class) public void testInvalidRelationshipArchivedOrigin() throws Exception { context.turnOffAuthorisationSystem(); Item testItem = ItemBuilder.createItem(context, col1) - .withRelationshipType("OrgUnit") - .build(); + .withTitle("OrgUnit") + .withIssueDate("2017-10-17") + .withRelationshipType("OrgUnit") + .build(); context.restoreAuthSystemState(); String[] csv = {"id,relationship.type,relation.isAuthorOfPublication,collection,rowName", - "+,Person,," + col1.getHandle() + ",1" + + "+,Person,," + col1.getHandle() + ",1" + testItem.getID().toString() + ",,rowName:1," + col1.getHandle() + ",2"}; - assertEquals(1, performImportScript(csv, false)); + performImportScript(csv, false); } /** * Test relationship validation with invalid relationship definition and with archived target reference */ - @Test + @Test(expected = MetadataImportInvalidHeadingException.class) public void testInvalidRelationshipArchivedTarget() throws Exception { context.turnOffAuthorisationSystem(); Item testItem = ItemBuilder.createItem(context, col1) - .withRelationshipType("OrgUnit") - .build(); + .withTitle("OrgUnit") + .withIssueDate("2017-10-17") + .withRelationshipType("OrgUnit") + .build(); context.restoreAuthSystemState(); String[] csv = {"id,relationship.type,relation.isAuthorOfPublication,collection,rowName", - testItem.getID().toString() + ",Person,," + col1.getHandle() + ",1" + + testItem.getID().toString() + ",Person,," + col1.getHandle() + ",1" + "+,OrgUnit,rowName:1," + col1.getHandle() + ",2"}; - assertEquals(1, performImportScript(csv, false)); + performImportScript(csv, false); } /** @@ -411,26 +487,42 @@ public class CSVMetadataImportReferenceIT extends AbstractEntityIntegrationTest @Test public void testValidRelationshipNoDefinedTypesInCSV() throws Exception { context.turnOffAuthorisationSystem(); - Item testItemOne = ItemBuilder.createItem(context, col1) - .withRelationshipType("Person") - .withIdentifierOther("testItemOne") - .build(); - Item testItemTwo = ItemBuilder.createItem(context, col1) - .withRelationshipType("Publication") - .withIdentifierOther("testItemTwo") - .build(); - Item testItemThree = ItemBuilder.createItem(context, col1) - .withRelationshipType("Project") - .withIdentifierOther("testItemThree") - .build(); + + Item testItem = ItemBuilder.createItem(context, col1) + .withTitle("Person") + .withIssueDate("2017-10-17") + .withAuthor("Smith, Donald") + .withPersonIdentifierLastName("Smith") + .withPersonIdentifierFirstName("Donald") + .withRelationshipType("Person") + .withIdentifierOther("testItemOne") + .build(); + + + Item testItem2 = ItemBuilder.createItem(context, col1) + .withTitle("Publication") + .withIssueDate("2017-10-17") + .withRelationshipType("Publication") + .withIdentifierOther("testItemTwo") + .build(); + + + Item testItem3 = ItemBuilder.createItem(context, col1) + .withTitle("Project") + .withIssueDate("2017-10-17") + .withRelationshipType("Project") + .withIdentifierOther("testItemThree") + .build(); + + context.restoreAuthSystemState(); String[] csv = {"id,relation.isAuthorOfPublication,relation.isPublicationOfProject,collection", - testItemOne.getID().toString() + ",,," + col1.getHandle(), - testItemTwo.getID().toString() + ",dc.identifier.other:testItemOne,," + col1.getHandle(), - testItemThree.getID().toString() + ",,dc.identifier.other:testItemTwo," + col1.getHandle()}; + testItem.getID().toString() + ",,," + col1.getHandle(), + testItem2.getID().toString() + ",dc.identifier.other:testItemOne,," + col1.getHandle(), + testItem3.getID().toString() + ",,dc.identifier.other:testItemTwo," + col1.getHandle()}; performImportScript(csv, false); - assertRelationship(testItemTwo, testItemOne, 1, "left", 0); - assertRelationship(testItemTwo, testItemThree, 1, "left", 0); + assertRelationship(testItem2, testItem, 1, "left", 0); + assertRelationship(testItem2, testItem3, 1, "left", 0); } /** @@ -439,9 +531,9 @@ public class CSVMetadataImportReferenceIT extends AbstractEntityIntegrationTest @Test public void testDuplicateRowNameReferences() throws Exception { String[] csv = {"id,relationship.type,relation.isAuthorOfPublication,collection,dc.identifier.other,rowName", - "+,Person,," + col1.getHandle() + ",0,value", - "+,Publication,rowName:value," + col1.getHandle() + ",1,1", - "+,Publication,rowName:value," + col1.getHandle() + ",2,2"}; + "+,Person,," + col1.getHandle() + ",0,value", + "+,Publication,rowName:value," + col1.getHandle() + ",1,1", + "+,Publication,rowName:value," + col1.getHandle() + ",2,2"}; Item[] items = runImport(csv); assertRelationship(items[1], items[0], 1, "left", 0); assertRelationship(items[2], items[0], 1, "left", 0); @@ -450,18 +542,21 @@ public class CSVMetadataImportReferenceIT extends AbstractEntityIntegrationTest /** * Test relationship validation with invalid relationship definition by incorrect typeName usage */ - @Test + @Test(expected = MetadataImportException.class) public void testInvalidTypeNameDefined() throws Exception { context.turnOffAuthorisationSystem(); + Item testItem = ItemBuilder.createItem(context, col1) - .withRelationshipType("Publication") - .build(); + .withTitle("Publication") + .withIssueDate("2017-10-17") + .withRelationshipType("Publication") + .build(); context.restoreAuthSystemState(); String[] csv = {"id,collection,relationship.type,dc.title," + - "relation.isProjectOfPublication,relation.isPublicationOfProject", - "+," + col1.getHandle() + ",Project,Title," + - testItem.getID().toString() + "," + testItem.getID().toString() }; - assertEquals(1, performImportScript(csv, true)); + "relation.isProjectOfPublication,relation.isPublicationOfProject", + "+," + col1.getHandle() + ",Project,Title," + + testItem.getID().toString() + "," + testItem.getID().toString()}; + performImportScript(csv, true); } /** @@ -475,17 +570,34 @@ public class CSVMetadataImportReferenceIT extends AbstractEntityIntegrationTest } out.flush(); out.close(); + String fileLocation = csvFile.getAbsolutePath(); try { + String[] args = null; if (validateOnly) { - return runDSpaceScript("metadata-import", "-f", csvFile.getAbsolutePath(), "-e", "admin@email.com", - "-s", "-v"); + args = new String[] {"metadata-import", "-f", fileLocation, "-e", eperson.getEmail(), "-s", "-v"}; } else { - return runDSpaceScript("metadata-import", "-f", csvFile.getAbsolutePath(), "-e", "admin@email.com", - "-s"); + args = new String[] {"metadata-import", "-f", fileLocation, "-e", eperson.getEmail(), "-s",}; + } + TestDSpaceRunnableHandler testDSpaceRunnableHandler = new TestDSpaceRunnableHandler(); + + ScriptService scriptService = ScriptServiceFactory.getInstance().getScriptService(); + ScriptConfiguration scriptConfiguration = scriptService.getScriptConfiguration(args[0]); + + DSpaceRunnable script = null; + if (scriptConfiguration != null) { + script = scriptService.createDSpaceRunnableForScriptConfiguration(scriptConfiguration); + } + if (script != null) { + script.initialize(args, testDSpaceRunnableHandler, null); + script.run(); + } + if (testDSpaceRunnableHandler.getException() != null) { + throw testDSpaceRunnableHandler.getException(); } } finally { csvFile.delete(); } + return 0; } /** @@ -499,7 +611,7 @@ public class CSVMetadataImportReferenceIT extends AbstractEntityIntegrationTest ArrayList uuidList = new ArrayList<>(); MetadataValueService metadataValueService = ContentServiceFactory.getInstance().getMetadataValueService(); MetadataFieldService metadataFieldService = - ContentServiceFactory.getInstance().getMetadataFieldService(); + ContentServiceFactory.getInstance().getMetadataFieldService(); MetadataField mfo = metadataFieldService.findByElement(context, "dc", "identifier", "other"); Iterator mdv = metadataValueService.findByFieldAndValue(context, mfo, value); while (mdv.hasNext()) { diff --git a/dspace-api/src/test/java/org/dspace/app/scripts/handler/impl/TestDSpaceRunnableHandler.java b/dspace-api/src/test/java/org/dspace/app/scripts/handler/impl/TestDSpaceRunnableHandler.java new file mode 100644 index 0000000000..1b5b3fa7ac --- /dev/null +++ b/dspace-api/src/test/java/org/dspace/app/scripts/handler/impl/TestDSpaceRunnableHandler.java @@ -0,0 +1,36 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.scripts.handler.impl; + +import org.dspace.scripts.handler.impl.CommandLineDSpaceRunnableHandler; + +/** + * This class will be used as a DSpaceRunnableHandler for the Tests so that we can stop the handler + * from calling System.exit() when a script would throw an exception + */ +public class TestDSpaceRunnableHandler extends CommandLineDSpaceRunnableHandler { + + private Exception exception = null; + + /** + * We're overriding this method so that we can stop the script from doing the System.exit() if + * an exception within the script is thrown + */ + @Override + public void handleException(String message, Exception e) { + exception = e; + } + + /** + * Generic getter for the exception + * @return the exception value of this TestDSpaceRunnableHandler + */ + public Exception getException() { + return exception; + } +} diff --git a/dspace-api/src/test/java/org/dspace/authenticate/IPMatcherTest.java b/dspace-api/src/test/java/org/dspace/authenticate/IPMatcherTest.java index 511ea0da25..6f73c3abc4 100644 --- a/dspace-api/src/test/java/org/dspace/authenticate/IPMatcherTest.java +++ b/dspace-api/src/test/java/org/dspace/authenticate/IPMatcherTest.java @@ -153,6 +153,14 @@ public class IPMatcherTest { assertFalse(ipMatcher.match("0:0:0:0:0:0:0:1")); } + @Test + public void testIPv6FullMaskMatching() throws Exception { + final IPMatcher ipMatcher = new IPMatcher("::2/128"); + + assertTrue(ipMatcher.match("0:0:0:0:0:0:0:2")); + assertFalse(ipMatcher.match("0:0:0:0:0:0:0:1")); + } + @Test public void testAsteriskMatchingSuccess() throws Exception { diff --git a/dspace-api/src/test/java/org/dspace/authority/MockAuthoritySolrServiceImpl.java b/dspace-api/src/test/java/org/dspace/authority/MockAuthoritySolrServiceImpl.java index e1e018ef33..6c0ad5ace8 100644 --- a/dspace-api/src/test/java/org/dspace/authority/MockAuthoritySolrServiceImpl.java +++ b/dspace-api/src/test/java/org/dspace/authority/MockAuthoritySolrServiceImpl.java @@ -21,4 +21,8 @@ public class MockAuthoritySolrServiceImpl extends AuthoritySolrServiceImpl imple //We don't use SOLR in the tests of this module solr = null; } + + public void reset() { + // This method intentionally left blank. + } } diff --git a/dspace-api/src/test/java/org/dspace/authorize/AuthorizeConfigIntegrationTest.java b/dspace-api/src/test/java/org/dspace/authorize/AuthorizeConfigIT.java similarity index 97% rename from dspace-api/src/test/java/org/dspace/authorize/AuthorizeConfigIntegrationTest.java rename to dspace-api/src/test/java/org/dspace/authorize/AuthorizeConfigIT.java index d338bc6e2c..3218c14d7e 100644 --- a/dspace-api/src/test/java/org/dspace/authorize/AuthorizeConfigIntegrationTest.java +++ b/dspace-api/src/test/java/org/dspace/authorize/AuthorizeConfigIT.java @@ -20,7 +20,7 @@ import org.junit.Test; * @author Andrea Bollini (andrea.bollini at 4science.it) * */ -public class AuthorizeConfigIntegrationTest extends AbstractIntegrationTest { +public class AuthorizeConfigIT extends AbstractIntegrationTest { @Test public void testReloadConfiguration() { diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/builder/AbstractBuilder.java b/dspace-api/src/test/java/org/dspace/builder/AbstractBuilder.java similarity index 94% rename from dspace-server-webapp/src/test/java/org/dspace/app/rest/builder/AbstractBuilder.java rename to dspace-api/src/test/java/org/dspace/builder/AbstractBuilder.java index 403e623c3c..31c0298824 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/builder/AbstractBuilder.java +++ b/dspace-api/src/test/java/org/dspace/builder/AbstractBuilder.java @@ -5,18 +5,18 @@ * * http://www.dspace.org/license/ */ -package org.dspace.app.rest.builder; +package org.dspace.builder; import java.sql.SQLException; import java.util.List; import org.apache.commons.collections4.CollectionUtils; import org.apache.logging.log4j.Logger; -import org.dspace.app.rest.builder.util.AbstractBuilderCleanupUtil; import org.dspace.authorize.AuthorizeException; import org.dspace.authorize.factory.AuthorizeServiceFactory; import org.dspace.authorize.service.AuthorizeService; import org.dspace.authorize.service.ResourcePolicyService; +import org.dspace.builder.util.AbstractBuilderCleanupUtil; import org.dspace.content.Bitstream; import org.dspace.content.factory.ContentServiceFactory; import org.dspace.content.service.BitstreamFormatService; @@ -55,8 +55,8 @@ import org.dspace.xmlworkflow.storedcomponents.service.XmlWorkflowItemService; /** * Abstract builder class that holds references to all available services * - * @param This param represents the Model object for the Builder - * @param This param represents the Service object for the builder + * @param This parameter represents the Model object for the Builder + * @param This parameter represents the Service object for the builder * @author Jonas Van Goolen - (jonas@atmire.com) */ public abstract class AbstractBuilder { @@ -96,7 +96,8 @@ public abstract class AbstractBuilder { * This static class will make sure that the objects built with the builders are disposed of in a foreign-key * constraint safe manner by predefining an order */ - private static AbstractBuilderCleanupUtil abstractBuilderCleanupUtil = new AbstractBuilderCleanupUtil(); + private static final AbstractBuilderCleanupUtil abstractBuilderCleanupUtil + = new AbstractBuilderCleanupUtil(); /** * log4j category */ @@ -194,6 +195,13 @@ public abstract class AbstractBuilder { } } + /** + * This method will cleanup the map of builders + */ + public static void cleanupBuilderCache() { + abstractBuilderCleanupUtil.cleanupMap(); + } + /** * This method will ensure that the DSpaceObject contained within the Builder will be cleaned up properly * @throws Exception If something goes wrong @@ -202,7 +210,7 @@ public abstract class AbstractBuilder { public abstract T build() throws SQLException, AuthorizeException; - public abstract void delete(T dso) throws Exception; + public abstract void delete(Context c, T dso) throws Exception; protected abstract S getService(); diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/builder/AbstractCRUDBuilder.java b/dspace-api/src/test/java/org/dspace/builder/AbstractCRUDBuilder.java similarity index 90% rename from dspace-server-webapp/src/test/java/org/dspace/app/rest/builder/AbstractCRUDBuilder.java rename to dspace-api/src/test/java/org/dspace/builder/AbstractCRUDBuilder.java index 884bcc9e3c..ff2bef51c2 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/builder/AbstractCRUDBuilder.java +++ b/dspace-api/src/test/java/org/dspace/builder/AbstractCRUDBuilder.java @@ -5,7 +5,7 @@ * * http://www.dspace.org/license/ */ -package org.dspace.app.rest.builder; +package org.dspace.builder; import org.dspace.core.Context; import org.dspace.core.ReloadableEntity; @@ -13,6 +13,8 @@ import org.dspace.service.DSpaceCRUDService; /** * @author Jonas Van Goolen - (jonas@atmire.com) + * + * @param A specific kind of ReloadableEntity. */ public abstract class AbstractCRUDBuilder extends AbstractBuilder { @@ -20,8 +22,10 @@ public abstract class AbstractCRUDBuilder extends Ab super(context); } + @Override protected abstract DSpaceCRUDService getService(); + @Override public abstract T build(); public void delete(T dso) throws Exception { diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/builder/AbstractDSpaceObjectBuilder.java b/dspace-api/src/test/java/org/dspace/builder/AbstractDSpaceObjectBuilder.java similarity index 87% rename from dspace-server-webapp/src/test/java/org/dspace/app/rest/builder/AbstractDSpaceObjectBuilder.java rename to dspace-api/src/test/java/org/dspace/builder/AbstractDSpaceObjectBuilder.java index 9fbd0ff0cd..69cfd0e136 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/builder/AbstractDSpaceObjectBuilder.java +++ b/dspace-api/src/test/java/org/dspace/builder/AbstractDSpaceObjectBuilder.java @@ -5,7 +5,7 @@ * * http://www.dspace.org/license/ */ -package org.dspace.app.rest.builder; +package org.dspace.builder; import java.sql.SQLException; import java.util.Date; @@ -43,12 +43,15 @@ public abstract class AbstractDSpaceObjectBuilder this.context = context; } + @Override public abstract void cleanup() throws Exception; + @Override protected abstract DSpaceObjectService getService(); + @Override protected B handleException(final Exception e) { log.error(e.getMessage(), e); return null; @@ -143,6 +146,32 @@ public abstract class AbstractDSpaceObjectBuilder } return (B) this; } + /** + * Support method to grant the {@link Constants#READ} permission over an object only to a specific group. Any other + * READ permissions will be removed + * + * @param dso + * the DSpaceObject on which grant the permission + * @param eperson + * the eperson that will be granted of the permission + * @return the builder properly configured to build the object with the additional admin permission + */ + protected > B setAdminPermission(DSpaceObject dso, EPerson eperson, + Date startDate) { + try { + + ResourcePolicy rp = authorizeService.createOrModifyPolicy(null, context, null, null, + eperson, startDate, Constants.ADMIN, + "Integration Test", dso); + if (rp != null) { + resourcePolicyService.update(context, rp); + } + } catch (Exception e) { + return handleException(e); + } + return (B) this; + + } /** * Support method to grant {@link Constants#REMOVE} permission to a specific eperson @@ -231,19 +260,15 @@ public abstract class AbstractDSpaceObjectBuilder return (B) this; } + @Override public abstract T build() throws SQLException, AuthorizeException; - public void delete(T dso) throws Exception { - - try (Context c = new Context()) { - c.turnOffAuthorisationSystem(); - T attachedDso = c.reloadEntity(dso); - if (attachedDso != null) { - getService().delete(c, attachedDso); - } - c.complete(); + @Override + public void delete(Context c, T dso) throws Exception { + if (dso != null) { + getService().delete(c, dso); } - + c.complete(); indexingService.commit(); } } diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/builder/BitstreamBuilder.java b/dspace-api/src/test/java/org/dspace/builder/BitstreamBuilder.java similarity index 87% rename from dspace-server-webapp/src/test/java/org/dspace/app/rest/builder/BitstreamBuilder.java rename to dspace-api/src/test/java/org/dspace/builder/BitstreamBuilder.java index 3de10b723d..b8942a17d0 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/builder/BitstreamBuilder.java +++ b/dspace-api/src/test/java/org/dspace/builder/BitstreamBuilder.java @@ -5,7 +5,7 @@ * * http://www.dspace.org/license/ */ -package org.dspace.app.rest.builder; +package org.dspace.builder; import java.io.IOException; import java.io.InputStream; @@ -98,6 +98,13 @@ public class BitstreamBuilder extends AbstractDSpaceObjectBuilder { return this; } + public BitstreamBuilder withProvenance(String provenance) throws SQLException { + + bitstreamService.addMetadata(context, bitstream, "dc", "description", "provenance", null, provenance); + + return this; + } + private Bundle getOriginalBundle(Item item) throws SQLException, AuthorizeException { List bundles = itemService.getBundles(item, ORIGINAL); Bundle targetBundle = null; @@ -122,6 +129,7 @@ public class BitstreamBuilder extends AbstractDSpaceObjectBuilder { return this; } + @Override public Bitstream build() { try { bitstreamService.update(context, bitstream); @@ -145,9 +153,18 @@ public class BitstreamBuilder extends AbstractDSpaceObjectBuilder { @Override public void cleanup() throws Exception { - delete(bitstream); + try (Context c = new Context()) { + c.turnOffAuthorisationSystem(); + // Ensure object and any related objects are reloaded before checking to see what needs cleanup + bitstream = c.reloadEntity(bitstream); + if (bitstream != null) { + delete(c, bitstream); + c.complete(); + } + } } + @Override protected DSpaceObjectService getService() { return bitstreamService; } diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/builder/BitstreamFormatBuilder.java b/dspace-api/src/test/java/org/dspace/builder/BitstreamFormatBuilder.java similarity index 85% rename from dspace-server-webapp/src/test/java/org/dspace/app/rest/builder/BitstreamFormatBuilder.java rename to dspace-api/src/test/java/org/dspace/builder/BitstreamFormatBuilder.java index 266f4b153d..1051712326 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/builder/BitstreamFormatBuilder.java +++ b/dspace-api/src/test/java/org/dspace/builder/BitstreamFormatBuilder.java @@ -5,7 +5,7 @@ * * http://www.dspace.org/license/ */ -package org.dspace.app.rest.builder; +package org.dspace.builder; import java.io.IOException; import java.sql.SQLException; @@ -33,7 +33,23 @@ public class BitstreamFormatBuilder extends AbstractCRUDBuilder @Override public void cleanup() throws Exception { - delete(bitstreamFormat); + try (Context c = new Context()) { + c.turnOffAuthorisationSystem(); + // Ensure object and any related objects are reloaded before checking to see what needs cleanup + bitstreamFormat = c.reloadEntity(bitstreamFormat); + if (bitstreamFormat != null) { + delete(c, bitstreamFormat); + } + c.complete(); + indexingService.commit(); + } + } + + @Override + public void delete(Context c, BitstreamFormat dso) throws Exception { + if (dso != null) { + getService().delete(c, dso); + } } @Override @@ -55,7 +71,6 @@ public class BitstreamFormatBuilder extends AbstractCRUDBuilder log.error(e); } catch (AuthorizeException e) { log.error(e); - ; } return bitstreamFormat; } diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/builder/BundleBuilder.java b/dspace-api/src/test/java/org/dspace/builder/BundleBuilder.java similarity index 61% rename from dspace-server-webapp/src/test/java/org/dspace/app/rest/builder/BundleBuilder.java rename to dspace-api/src/test/java/org/dspace/builder/BundleBuilder.java index bd98e3ced9..614cd54c6d 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/builder/BundleBuilder.java +++ b/dspace-api/src/test/java/org/dspace/builder/BundleBuilder.java @@ -5,11 +5,13 @@ * * http://www.dspace.org/license/ */ -package org.dspace.app.rest.builder; +package org.dspace.builder; +import java.io.IOException; import java.sql.SQLException; import java.util.ArrayList; import java.util.List; +import java.util.UUID; import org.dspace.authorize.AuthorizeException; import org.dspace.content.Bitstream; @@ -23,7 +25,7 @@ public class BundleBuilder extends AbstractDSpaceObjectBuilder { private Bundle bundle; private Item item; private String name; - private List bitstreams = new ArrayList<>(); + private final List bitstreams = new ArrayList<>(); protected BundleBuilder(Context context) { super(context); @@ -50,14 +52,25 @@ public class BundleBuilder extends AbstractDSpaceObjectBuilder { return this; } + @Override public void cleanup() throws Exception { - delete(bundle); + try (Context c = new Context()) { + c.turnOffAuthorisationSystem(); + // Ensure object and any related objects are reloaded before checking to see what needs cleanup + bundle = c.reloadEntity(bundle); + if (bundle != null) { + delete(c, bundle); + c.complete(); + } + } } + @Override protected DSpaceObjectService getService() { return bundleService; } + @Override public Bundle build() throws SQLException, AuthorizeException { bundle = bundleService.create(context, item, name); @@ -67,4 +80,20 @@ public class BundleBuilder extends AbstractDSpaceObjectBuilder { return bundle; } + + public static void deleteBundle(UUID uuid) throws SQLException, IOException { + try (Context c = new Context()) { + c.turnOffAuthorisationSystem(); + Bundle bundle = bundleService.find(c, uuid); + if (bundle != null) { + try { + bundleService.delete(c, bundle); + } catch (AuthorizeException e) { + // cannot occur, just wrap it to make the compiler happy + throw new RuntimeException(e); + } + } + c.complete(); + } + } } diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/builder/ClaimedTaskBuilder.java b/dspace-api/src/test/java/org/dspace/builder/ClaimedTaskBuilder.java similarity index 87% rename from dspace-server-webapp/src/test/java/org/dspace/app/rest/builder/ClaimedTaskBuilder.java rename to dspace-api/src/test/java/org/dspace/builder/ClaimedTaskBuilder.java index 81e5552f5e..338739285f 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/builder/ClaimedTaskBuilder.java +++ b/dspace-api/src/test/java/org/dspace/builder/ClaimedTaskBuilder.java @@ -5,7 +5,7 @@ * * http://www.dspace.org/license/ */ -package org.dspace.app.rest.builder; +package org.dspace.builder; import java.io.InputStream; @@ -110,42 +110,36 @@ public class ClaimedTaskBuilder extends AbstractBuilder { } } + public CollectionBuilder withProvenance(final String provenance) { + return addMetadataValue(collection, + MetadataSchemaEnum.DC.getName(), + "description", + "provenance", + provenance); + } + public CollectionBuilder withTemplateItem() throws SQLException, AuthorizeException { collectionService.createTemplateItem(context, collection); return this; @@ -155,28 +165,80 @@ public class CollectionBuilder extends AbstractDSpaceObjectBuilder { @Override public void cleanup() throws Exception { - deleteWorkflowGroups(collection); - delete(collection); + try (Context c = new Context()) { + c.turnOffAuthorisationSystem(); + // Ensure object and any related objects are reloaded before checking to see what needs cleanup + collection = c.reloadEntity(collection); + if (collection != null) { + deleteAdminGroup(c); + deleteItemTemplate(c); + deleteDefaultReadGroups(c, collection); + deleteWorkflowGroups(c, collection); + delete(c ,collection); + c.complete(); + } + } } - public void deleteWorkflowGroups(Collection collection) throws Exception { + private void deleteAdminGroup(Context c) throws SQLException, AuthorizeException, IOException { + Group group = collection.getAdministrators(); + if (group != null) { + collectionService.removeAdministrators(c, collection); + groupService.delete(c, group); + } + } - try (Context c = new Context()) { + private void deleteItemTemplate(Context c) throws SQLException, AuthorizeException, IOException { + if (collection.getTemplateItem() != null) { + collectionService.removeTemplateItem(c, collection); + } + } + + public void deleteWorkflowGroups(Context c, Collection collection) throws Exception { + for (int i = 1; i <= 3; i++) { + Group group = collectionService.getWorkflowGroup(c, collection, i); + if (group != null) { + collectionService.setWorkflowGroup(c, collection, i, null); + groupService.delete(c, group); + } + } + } + + public void deleteDefaultReadGroups(Context c, Collection collection) throws Exception { + Group defaultItemReadGroup = groupService.findByName(c, "COLLECTION_" + + collection.getID().toString() + "_ITEM_DEFAULT_READ"); + Group defaultBitstreamReadGroup = groupService.findByName(c, "COLLECTION_" + + collection.getID().toString() + "_BITSTREAM_DEFAULT_READ"); + if (defaultItemReadGroup != null) { + groupService.delete(c, defaultItemReadGroup); + } + if (defaultBitstreamReadGroup != null) { + groupService.delete(c, defaultBitstreamReadGroup); + } + } + + /** + * Delete the Test Collection referred to by the given UUID + * + * @param uuid UUID of Test Collection to delete + * @throws SQLException + * @throws IOException + * @throws SearchServiceException + */ + public static void deleteCollection(UUID uuid) throws SQLException, IOException, SearchServiceException { + try (Context c = new Context()) { c.turnOffAuthorisationSystem(); - for (int i = 1; i <= 3; i++) { - Group g = collectionService.getWorkflowGroup(c, collection, i); - if (g != null) { - Group attachedDso = c.reloadEntity(g); - if (attachedDso != null) { - collectionService.setWorkflowGroup(c, collection, i, null); - groupService.delete(c, attachedDso); - } + Collection collection = collectionService.find(c, uuid); + if (collection != null) { + try { + collectionService.delete(c, collection); + } catch (AuthorizeException e) { + throw new RuntimeException(e.getMessage(), e); } } c.complete(); - } - - indexingService.commit(); + indexingService.commit(); + } } @Override diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/builder/CommunityBuilder.java b/dspace-api/src/test/java/org/dspace/builder/CommunityBuilder.java similarity index 82% rename from dspace-server-webapp/src/test/java/org/dspace/app/rest/builder/CommunityBuilder.java rename to dspace-api/src/test/java/org/dspace/builder/CommunityBuilder.java index 788aa502a6..5500697da4 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/builder/CommunityBuilder.java +++ b/dspace-api/src/test/java/org/dspace/builder/CommunityBuilder.java @@ -5,7 +5,7 @@ * * http://www.dspace.org/license/ */ -package org.dspace.app.rest.builder; +package org.dspace.builder; import java.io.IOException; import java.io.InputStream; @@ -115,7 +115,24 @@ public class CommunityBuilder extends AbstractDSpaceObjectBuilder { @Override public void cleanup() throws Exception { - delete(community); + try (Context c = new Context()) { + c.turnOffAuthorisationSystem(); + // Ensure object and any related objects are reloaded before checking to see what needs cleanup + community = c.reloadEntity(community); + if (community != null) { + deleteAdminGroup(c); + delete(c, community); + c.complete(); + } + } + } + + private void deleteAdminGroup(Context c) throws SQLException, AuthorizeException, IOException { + Group group = community.getAdministrators(); + if (group != null) { + communityService.removeAdministrators(c, community); + groupService.delete(c, group); + } } @Override @@ -135,6 +152,11 @@ public class CommunityBuilder extends AbstractDSpaceObjectBuilder { Community community = communityService.find(c, uuid); if (community != null) { try { + Group adminGroup = community.getAdministrators(); + if (adminGroup != null) { + communityService.removeAdministrators(c, community); + groupService.delete(c, adminGroup); + } communityService.delete(c, community); } catch (AuthorizeException e) { // cannot occur, just wrap it to make the compiler happy diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/builder/EPersonBuilder.java b/dspace-api/src/test/java/org/dspace/builder/EPersonBuilder.java similarity index 79% rename from dspace-server-webapp/src/test/java/org/dspace/app/rest/builder/EPersonBuilder.java rename to dspace-api/src/test/java/org/dspace/builder/EPersonBuilder.java index a761099f83..256b3432d4 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/builder/EPersonBuilder.java +++ b/dspace-api/src/test/java/org/dspace/builder/EPersonBuilder.java @@ -5,12 +5,14 @@ * * http://www.dspace.org/license/ */ -package org.dspace.app.rest.builder; +package org.dspace.builder; import java.io.IOException; import java.sql.SQLException; import java.util.UUID; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; import org.dspace.authorize.AuthorizeException; import org.dspace.content.service.DSpaceObjectService; import org.dspace.core.Context; @@ -19,6 +21,7 @@ import org.dspace.eperson.EPerson; import org.dspace.eperson.Group; public class EPersonBuilder extends AbstractDSpaceObjectBuilder { + private static final Logger LOG = LogManager.getLogger(EPersonBuilder.class); private EPerson ePerson; @@ -28,23 +31,29 @@ public class EPersonBuilder extends AbstractDSpaceObjectBuilder { @Override public void cleanup() throws Exception { - delete(ePerson); + try (Context c = new Context()) { + c.turnOffAuthorisationSystem(); + // Ensure object and any related objects are reloaded before checking to see what needs cleanup + ePerson = c.reloadEntity(ePerson); + if (ePerson != null) { + delete(c, ePerson); + c.complete(); + } + } } + @Override protected DSpaceObjectService getService() { return ePersonService; } + @Override public EPerson build() { try { ePersonService.update(context, ePerson); indexingService.commit(); - } catch (SearchServiceException e) { - e.printStackTrace(); - } catch (SQLException e) { - e.printStackTrace(); - } catch (AuthorizeException e) { - e.printStackTrace(); + } catch (SearchServiceException | SQLException | AuthorizeException e) { + LOG.warn("Failed to complete the EPerson", e); } return ePerson; } @@ -57,10 +66,8 @@ public class EPersonBuilder extends AbstractDSpaceObjectBuilder { private EPersonBuilder create() { try { ePerson = ePersonService.create(context); - } catch (SQLException e) { - e.printStackTrace(); - } catch (AuthorizeException e) { - e.printStackTrace(); + } catch (SQLException | AuthorizeException e) { + LOG.warn("Failed to create the EPerson", e); } return this; } diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/builder/EntityTypeBuilder.java b/dspace-api/src/test/java/org/dspace/builder/EntityTypeBuilder.java similarity index 77% rename from dspace-server-webapp/src/test/java/org/dspace/app/rest/builder/EntityTypeBuilder.java rename to dspace-api/src/test/java/org/dspace/builder/EntityTypeBuilder.java index 36b1e48f85..ae0e807198 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/builder/EntityTypeBuilder.java +++ b/dspace-api/src/test/java/org/dspace/builder/EntityTypeBuilder.java @@ -5,7 +5,7 @@ * * http://www.dspace.org/license/ */ -package org.dspace.app.rest.builder; +package org.dspace.builder; import java.sql.SQLException; @@ -34,9 +34,26 @@ public class EntityTypeBuilder extends AbstractBuilder { @Override public void cleanup() throws Exception { - delete(group); + try (Context c = new Context()) { + c.turnOffAuthorisationSystem(); + // Ensure object and any related objects are reloaded before checking to see what needs cleanup + group = c.reloadEntity(group); + if (group != null) { + delete(c, group); + c.complete(); + } + } } public static GroupBuilder createGroup(final Context context) { @@ -59,6 +67,11 @@ public class GroupBuilder extends AbstractDSpaceObjectBuilder { @Override public Group build() { + try { + groupService.update(context, group); + } catch (Exception e) { + return handleException(e); + } return group; } diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/builder/ItemBuilder.java b/dspace-api/src/test/java/org/dspace/builder/ItemBuilder.java similarity index 71% rename from dspace-server-webapp/src/test/java/org/dspace/app/rest/builder/ItemBuilder.java rename to dspace-api/src/test/java/org/dspace/builder/ItemBuilder.java index cb195cf026..27fdf10038 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/builder/ItemBuilder.java +++ b/dspace-api/src/test/java/org/dspace/builder/ItemBuilder.java @@ -5,8 +5,13 @@ * * http://www.dspace.org/license/ */ -package org.dspace.app.rest.builder; +package org.dspace.builder; +import java.io.IOException; +import java.sql.SQLException; +import java.util.UUID; + +import org.dspace.authorize.AuthorizeException; import org.dspace.content.Collection; import org.dspace.content.DCDate; import org.dspace.content.Item; @@ -14,6 +19,7 @@ import org.dspace.content.MetadataSchemaEnum; import org.dspace.content.WorkspaceItem; import org.dspace.content.service.DSpaceObjectService; import org.dspace.core.Context; +import org.dspace.eperson.EPerson; import org.dspace.eperson.Group; /** @@ -84,6 +90,10 @@ public class ItemBuilder extends AbstractDSpaceObjectBuilder { return addMetadataValue(item, "relationship", "type", null, relationshipType); } + public ItemBuilder withType(final String type) { + return addMetadataValue(item, "dc", "type", null, type); + } + public ItemBuilder withPublicationIssueNumber(final String issueNumber) { return addMetadataValue(item, "publicationissue", "issueNumber", null, issueNumber); } @@ -92,6 +102,10 @@ public class ItemBuilder extends AbstractDSpaceObjectBuilder { return addMetadataValue(item, "publicationvolume", "volumeNumber", null, volumeNumber); } + public ItemBuilder withProvenanceData(final String provenanceData) { + return addMetadataValue(item, MetadataSchemaEnum.DC.getName(), "description", "provenance", provenanceData); + } + public ItemBuilder makeUnDiscoverable() { item.setDiscoverable(false); return this; @@ -117,6 +131,19 @@ public class ItemBuilder extends AbstractDSpaceObjectBuilder { return this; } + /** + * Create an admin group for the collection with the specified members + * + * @param members epersons to add to the admin group + * @return this builder + * @throws SQLException + * @throws AuthorizeException + */ + public ItemBuilder withAdminUser(EPerson ePerson) throws SQLException, AuthorizeException { + return setAdminPermission(item, ePerson, null); + } + + @Override public Item build() { try { @@ -143,11 +170,40 @@ public class ItemBuilder extends AbstractDSpaceObjectBuilder { @Override public void cleanup() throws Exception { - delete(item); + try (Context c = new Context()) { + c.turnOffAuthorisationSystem(); + // Ensure object and any related objects are reloaded before checking to see what needs cleanup + item = c.reloadEntity(item); + if (item != null) { + delete(c, item); + c.complete(); + } + } } @Override protected DSpaceObjectService getService() { return itemService; } + + /** + * Delete the Test Item referred to by the given UUID + * @param uuid UUID of Test Item to delete + * @throws SQLException + * @throws IOException + */ + public static void deleteItem(UUID uuid) throws SQLException, IOException { + try (Context c = new Context()) { + c.turnOffAuthorisationSystem(); + Item item = itemService.find(c, uuid); + if (item != null) { + try { + itemService.delete(c, item); + } catch (AuthorizeException e) { + throw new RuntimeException(e); + } + } + c.complete(); + } + } } diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/builder/MetadataFieldBuilder.java b/dspace-api/src/test/java/org/dspace/builder/MetadataFieldBuilder.java similarity index 67% rename from dspace-server-webapp/src/test/java/org/dspace/app/rest/builder/MetadataFieldBuilder.java rename to dspace-api/src/test/java/org/dspace/builder/MetadataFieldBuilder.java index 1476cf0f24..dfc9112a3f 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/builder/MetadataFieldBuilder.java +++ b/dspace-api/src/test/java/org/dspace/builder/MetadataFieldBuilder.java @@ -5,7 +5,7 @@ * * http://www.dspace.org/license/ */ -package org.dspace.app.rest.builder; +package org.dspace.builder; import java.io.IOException; import java.sql.SQLException; @@ -37,7 +37,23 @@ public class MetadataFieldBuilder extends AbstractBuilder } @Override - public void delete(PoolTask poolTask) throws Exception { - try (Context c = new Context()) { - c.turnOffAuthorisationSystem(); - PoolTask attachedPoolTask = c.reloadEntity(poolTask); - if (attachedPoolTask != null) { - // to delete a pooltask keeping the system in a consistent state you need to delete the underline - // workflowitem - WorkflowItemBuilder.deleteWorkflowItem(attachedPoolTask.getWorkflowItem().getID()); - } - c.complete(); + public void delete(Context c, PoolTask poolTask) throws Exception { + if (poolTask != null) { + // to delete a pooltask keeping the system in a consistent state you need to delete the underline + // workflowitem + WorkflowItemBuilder.deleteWorkflowItem(poolTask.getWorkflowItem().getID()); } } - private void deleteWsi(WorkspaceItem dso) throws Exception { - try (Context c = new Context()) { - c.turnOffAuthorisationSystem(); - WorkspaceItem attachedDso = c.reloadEntity(dso); - if (attachedDso != null) { - workspaceItemService.deleteAll(c, attachedDso); - } - c.complete(); + private void deleteWsi(Context c, WorkspaceItem dso) throws Exception { + if (dso != null) { + workspaceItemService.deleteAll(c, dso); } - - indexingService.commit(); } @Override public void cleanup() throws Exception { - if (workspaceItem != null) { - deleteWsi(workspaceItem); - } - if (workflowItem != null) { + try (Context c = new Context()) { + c.turnOffAuthorisationSystem(); + // Ensure object and any related objects are reloaded before checking to see what needs cleanup + workspaceItem = c.reloadEntity(workspaceItem); + if (workspaceItem != null) { + deleteWsi(c, workspaceItem); + } + if (workflowItem != null) { // to delete the pooltask keeping the system in a consistent state you need to delete the underline // workflowitem WorkflowItemBuilder.deleteWorkflowItem(workflowItem.getID()); + } + c.complete(); + indexingService.commit(); } } diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/builder/ProcessBuilder.java b/dspace-api/src/test/java/org/dspace/builder/ProcessBuilder.java similarity index 54% rename from dspace-server-webapp/src/test/java/org/dspace/app/rest/builder/ProcessBuilder.java rename to dspace-api/src/test/java/org/dspace/builder/ProcessBuilder.java index 9e200b44f2..6970cd57c3 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/builder/ProcessBuilder.java +++ b/dspace-api/src/test/java/org/dspace/builder/ProcessBuilder.java @@ -5,11 +5,15 @@ * * http://www.dspace.org/license/ */ -package org.dspace.app.rest.builder; +package org.dspace.builder; +import java.io.IOException; import java.sql.SQLException; +import java.text.ParseException; +import java.text.SimpleDateFormat; import java.util.List; +import org.dspace.authorize.AuthorizeException; import org.dspace.content.ProcessStatus; import org.dspace.core.Context; import org.dspace.eperson.EPerson; @@ -41,10 +45,33 @@ public class ProcessBuilder extends AbstractBuilder { return this; } - public void cleanup() throws Exception { - delete(process); + public ProcessBuilder withProcessStatus(ProcessStatus processStatus) { + process.setProcessStatus(processStatus); + return this; } + public ProcessBuilder withStartAndEndTime(String startTime, String endTime) throws ParseException { + SimpleDateFormat simpleDateFormat = new SimpleDateFormat("dd/MM/yyyy"); + process.setStartTime(simpleDateFormat.parse(startTime)); + process.setFinishedTime(simpleDateFormat.parse(endTime)); + return this; + } + + @Override + public void cleanup() throws Exception { + try (Context c = new Context()) { + c.turnOffAuthorisationSystem(); + // Ensure object and any related objects are reloaded before checking to see what needs cleanup + process = c.reloadEntity(process); + if (process != null) { + delete(c, process); + } + c.complete(); + indexingService.commit(); + } + } + + @Override public Process build() { try { processService.update(context, process); @@ -56,21 +83,31 @@ public class ProcessBuilder extends AbstractBuilder { return process; } - public void delete(Process dso) throws Exception { - try (Context c = new Context()) { - c.turnOffAuthorisationSystem(); - Process attachedDso = c.reloadEntity(dso); - if (attachedDso != null) { - getService().delete(c, attachedDso); - } - c.complete(); - } - - indexingService.commit(); - } - - + @Override protected ProcessService getService() { return processService; } + + @Override + public void delete(Context c, Process dso) throws Exception { + if (dso != null) { + getService().delete(c, dso); + } + } + + public static void deleteProcess(Integer integer) throws SQLException, IOException { + try (Context c = new Context()) { + c.turnOffAuthorisationSystem(); + Process process = processService.find(c, integer); + if (process != null) { + try { + processService.delete(c, process); + } catch (AuthorizeException e) { + // cannot occur, just wrap it to make the compiler happy + throw new RuntimeException(e); + } + } + c.complete(); + } + } } diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/builder/RelationshipBuilder.java b/dspace-api/src/test/java/org/dspace/builder/RelationshipBuilder.java similarity index 68% rename from dspace-server-webapp/src/test/java/org/dspace/app/rest/builder/RelationshipBuilder.java rename to dspace-api/src/test/java/org/dspace/builder/RelationshipBuilder.java index f160b5c64d..773a4a8b8b 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/builder/RelationshipBuilder.java +++ b/dspace-api/src/test/java/org/dspace/builder/RelationshipBuilder.java @@ -5,8 +5,9 @@ * * http://www.dspace.org/license/ */ -package org.dspace.app.rest.builder; +package org.dspace.builder; +import java.io.IOException; import java.sql.SQLException; import org.apache.log4j.Logger; @@ -36,9 +37,26 @@ public class RelationshipBuilder extends AbstractBuilder byRelationshipType = relationshipService .findByRelationshipType(c, relationshipType); for (Relationship relationship : byRelationshipType) { relationshipService.delete(c, relationship); } + if (relationshipType != null) { + delete(c, relationshipType); + } c.complete(); + indexingService.commit(); } - delete(relationshipType); } + @Override + public void delete(Context c, RelationshipType dso) throws Exception { + if (dso != null) { + getService().delete(c,dso); + } + } + + @Override public RelationshipType build() { try { @@ -104,7 +117,7 @@ public class RelationshipTypeBuilder extends AbstractBuilder> map = new LinkedHashMap<>(); + private final LinkedHashMap> map + = new LinkedHashMap<>(); /** * Constructor that will initialize the Map with a predefined order for deletion */ public AbstractBuilderCleanupUtil() { + initMap(); + + } + + private void initMap() { map.put(RelationshipBuilder.class.getName(), new LinkedList<>()); map.put(RelationshipTypeBuilder.class.getName(), new LinkedList<>()); map.put(EntityTypeBuilder.class.getName(), new LinkedList<>()); @@ -63,7 +69,6 @@ public class AbstractBuilderCleanupUtil { map.put(MetadataSchemaBuilder.class.getName(), new LinkedList<>()); map.put(SiteBuilder.class.getName(), new LinkedList<>()); map.put(ProcessBuilder.class.getName(), new LinkedList<>()); - } /** @@ -89,4 +94,12 @@ public class AbstractBuilderCleanupUtil { } } } + + /** + * Clears and re-initialises the map of builders + */ + public void cleanupMap() { + this.map.clear(); + initMap(); + } } diff --git a/dspace-api/src/test/java/org/dspace/content/CommunityTest.java b/dspace-api/src/test/java/org/dspace/content/CommunityTest.java index 812060d019..74f8c20cf2 100644 --- a/dspace-api/src/test/java/org/dspace/content/CommunityTest.java +++ b/dspace-api/src/test/java/org/dspace/content/CommunityTest.java @@ -1013,7 +1013,8 @@ public class CommunityTest extends AbstractDSpaceObjectTest { equalTo(c)); assertThat("testGetAdminObject 1", (Community) communityService.getAdminObject(context, c, Constants.ADD), equalTo(c)); - assertThat("testGetAdminObject 2", communityService.getAdminObject(context, c, Constants.DELETE), nullValue()); + assertThat("testGetAdminObject 2", (Community) communityService.getAdminObject(context, c, Constants.DELETE), + equalTo(c)); assertThat("testGetAdminObject 3", (Community) communityService.getAdminObject(context, c, Constants.ADMIN), equalTo(c)); } diff --git a/dspace-api/src/test/java/org/dspace/content/ItemTest.java b/dspace-api/src/test/java/org/dspace/content/ItemTest.java index 8c3cfa5a04..01a2bea0bc 100644 --- a/dspace-api/src/test/java/org/dspace/content/ItemTest.java +++ b/dspace-api/src/test/java/org/dspace/content/ItemTest.java @@ -490,8 +490,8 @@ public class ItemTest extends AbstractDSpaceObjectTest { // Set the item to have two pieces of metadata for dc.type and dc2.type String dcType = "DC-TYPE"; String testType = "TEST-TYPE"; - itemService.addMetadata(context, it, "dc", "type", null, null, dcType, "accepted", 0); - itemService.addMetadata(context, it, "test", "type", null, null, testType, "accepted", 0); + itemService.addMetadata(context, it, "dc", "type", null, null, dcType); + itemService.addMetadata(context, it, "test", "type", null, null, testType); // Check that only one is returned when we ask for all dc.type values List values = itemService.getMetadata(it, "dc", "type", null, null); @@ -1598,8 +1598,8 @@ public class ItemTest extends AbstractDSpaceObjectTest { assertThat("testGetAdminObject 0", (Item) itemService.getAdminObject(context, it, Constants.REMOVE), equalTo(it)); assertThat("testGetAdminObject 1", (Item) itemService.getAdminObject(context, it, Constants.ADD), equalTo(it)); - assertThat("testGetAdminObject 2", (Collection) itemService.getAdminObject(context, it, Constants.DELETE), - equalTo(collection)); + assertThat("testGetAdminObject 2", (Item) itemService.getAdminObject(context, it, Constants.DELETE), + equalTo(it)); assertThat("testGetAdminObject 3", (Item) itemService.getAdminObject(context, it, Constants.ADMIN), equalTo(it)); } diff --git a/dspace-api/src/test/java/org/dspace/content/SiteTest.java b/dspace-api/src/test/java/org/dspace/content/SiteTest.java index 02e868e19b..8cc57410f1 100644 --- a/dspace-api/src/test/java/org/dspace/content/SiteTest.java +++ b/dspace-api/src/test/java/org/dspace/content/SiteTest.java @@ -14,13 +14,16 @@ import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; import java.sql.SQLException; +import java.util.List; +import org.apache.commons.lang3.StringUtils; import org.apache.logging.log4j.Logger; import org.dspace.AbstractUnitTest; import org.dspace.content.factory.ContentServiceFactory; import org.dspace.content.service.SiteService; import org.dspace.core.ConfigurationManager; import org.dspace.core.Constants; +import org.dspace.eperson.Group; import org.junit.After; import org.junit.Before; import org.junit.Test; @@ -143,4 +146,17 @@ public class SiteTest extends AbstractUnitTest { assertThat("testGetURL 0", s.getURL(), equalTo(ConfigurationManager.getProperty("dspace.ui.url"))); } + @Test + public void testAnonymousReadRights() throws Exception { + List groupList = authorizeService.getAuthorizedGroups(context, s, Constants.READ); + boolean foundAnonInList = false; + for (Group group : groupList) { + if (StringUtils.equalsIgnoreCase(group.getName(), "Anonymous")) { + foundAnonInList = true; + } + } + assertTrue(foundAnonInList); + + } + } diff --git a/dspace-api/src/test/java/org/dspace/content/authority/DSpaceControlledVocabularyTest.java b/dspace-api/src/test/java/org/dspace/content/authority/DSpaceControlledVocabularyTest.java index 0d431a5a5b..77cf105dd4 100644 --- a/dspace-api/src/test/java/org/dspace/content/authority/DSpaceControlledVocabularyTest.java +++ b/dspace-api/src/test/java/org/dspace/content/authority/DSpaceControlledVocabularyTest.java @@ -78,7 +78,7 @@ public class DSpaceControlledVocabularyTest extends AbstractDSpaceTest { String text = "north 40"; Collection collection = null; int start = 0; - int limit = 0; + int limit = 10; String locale = null; // This "farm" Controlled Vocab is included in TestEnvironment data // (under /src/test/data/dspaceFolder/) and it should be auto-loaded @@ -86,8 +86,7 @@ public class DSpaceControlledVocabularyTest extends AbstractDSpaceTest { DSpaceControlledVocabulary instance = (DSpaceControlledVocabulary) CoreServiceFactory.getInstance().getPluginService().getNamedPlugin(Class.forName(PLUGIN_INTERFACE), "farm"); assertNotNull(instance); - Choices result = instance.getMatches(field, text, collection, start, - limit, locale); + Choices result = instance.getMatches(text, start, limit, locale); assertEquals("the farm::north 40", result.values[0].value); } diff --git a/dspace-api/src/test/java/org/dspace/core/ContextTest.java b/dspace-api/src/test/java/org/dspace/core/ContextTest.java index f5697a72dc..0c29e053ec 100644 --- a/dspace-api/src/test/java/org/dspace/core/ContextTest.java +++ b/dspace-api/src/test/java/org/dspace/core/ContextTest.java @@ -130,7 +130,7 @@ public class ContextTest extends AbstractUnitTest { public void testGetCurrentLocale() { //NOTE: CurrentLocale is not initialized in AbstractUnitTest. So it should be DEFAULTLOCALE assertThat("testGetCurrentLocale 0", context.getCurrentLocale(), notNullValue()); - assertThat("testGetCurrentLocale 1", context.getCurrentLocale(), equalTo(I18nUtil.DEFAULTLOCALE)); + assertThat("testGetCurrentLocale 1", context.getCurrentLocale(), equalTo(I18nUtil.getDefaultLocale())); } /** diff --git a/dspace-api/src/test/java/org/dspace/curate/CurationTest.java b/dspace-api/src/test/java/org/dspace/curate/CurationTest.java new file mode 100644 index 0000000000..dadf131c38 --- /dev/null +++ b/dspace-api/src/test/java/org/dspace/curate/CurationTest.java @@ -0,0 +1,76 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.curate; + +import org.apache.commons.cli.ParseException; +import org.dspace.AbstractIntegrationTestWithDatabase; +import org.dspace.app.scripts.handler.impl.TestDSpaceRunnableHandler; +import org.dspace.builder.CollectionBuilder; +import org.dspace.builder.CommunityBuilder; +import org.dspace.content.Collection; +import org.dspace.content.Community; +import org.dspace.scripts.DSpaceRunnable; +import org.dspace.scripts.configuration.ScriptConfiguration; +import org.dspace.scripts.factory.ScriptServiceFactory; +import org.dspace.scripts.service.ScriptService; +import org.junit.Test; + +public class CurationTest extends AbstractIntegrationTestWithDatabase { + + @Test(expected = ParseException.class) + public void curationWithoutEPersonParameterTest() throws Exception { + + context.turnOffAuthorisationSystem(); + Community community = CommunityBuilder.createCommunity(context) + .build(); + Collection collection = CollectionBuilder.createCollection(context, community) + .build(); + context.restoreAuthSystemState(); + String[] args = new String[] {"curate", "-t", CurationClientOptions.getTaskOptions().get(0), + "-i", collection.getHandle()}; + TestDSpaceRunnableHandler testDSpaceRunnableHandler = new TestDSpaceRunnableHandler(); + + ScriptService scriptService = ScriptServiceFactory.getInstance().getScriptService(); + ScriptConfiguration scriptConfiguration = scriptService.getScriptConfiguration(args[0]); + + DSpaceRunnable script = null; + if (scriptConfiguration != null) { + script = scriptService.createDSpaceRunnableForScriptConfiguration(scriptConfiguration); + } + if (script != null) { + script.initialize(args, testDSpaceRunnableHandler, null); + script.run(); + } + } + + @Test + public void curationWithEPersonParameterTest() throws Exception { + + context.turnOffAuthorisationSystem(); + Community community = CommunityBuilder.createCommunity(context) + .build(); + Collection collection = CollectionBuilder.createCollection(context, community) + .build(); + context.restoreAuthSystemState(); + String[] args = new String[] {"curate", "-e", "admin@email.com", "-t", + CurationClientOptions.getTaskOptions().get(0), "-i", collection.getHandle()}; + TestDSpaceRunnableHandler testDSpaceRunnableHandler = new TestDSpaceRunnableHandler(); + + ScriptService scriptService = ScriptServiceFactory.getInstance().getScriptService(); + ScriptConfiguration scriptConfiguration = scriptService.getScriptConfiguration(args[0]); + + DSpaceRunnable script = null; + if (scriptConfiguration != null) { + script = scriptService.createDSpaceRunnableForScriptConfiguration(scriptConfiguration); + } + if (script != null) { + script.initialize(args, testDSpaceRunnableHandler, null); + script.run(); + } + } +} diff --git a/dspace-api/src/test/java/org/dspace/curate/CuratorTest.java b/dspace-api/src/test/java/org/dspace/curate/CuratorTest.java index 8ca6b6c172..0abb3b48ac 100644 --- a/dspace-api/src/test/java/org/dspace/curate/CuratorTest.java +++ b/dspace-api/src/test/java/org/dspace/curate/CuratorTest.java @@ -8,23 +8,27 @@ package org.dspace.curate; import static org.junit.Assert.assertEquals; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; import java.util.HashMap; import java.util.Map; import org.dspace.AbstractUnitTest; import org.dspace.content.DSpaceObject; +import org.dspace.content.Item; import org.dspace.content.factory.ContentServiceFactory; import org.dspace.content.service.SiteService; +import org.dspace.core.factory.CoreServiceFactory; +import org.dspace.ctask.general.NoOpCurationTask; import org.dspace.services.ConfigurationService; import org.junit.Test; /** - * * @author mhwood */ -public class CuratorTest - extends AbstractUnitTest { +public class CuratorTest extends AbstractUnitTest { + private static final SiteService SITE_SERVICE = ContentServiceFactory.getInstance().getSiteService(); static final String RUN_PARAMETER_NAME = "runParameter"; @@ -32,28 +36,32 @@ public class CuratorTest static final String TASK_PROPERTY_NAME = "taskProperty"; static final String TASK_PROPERTY_VALUE = "a property"; - /** Value of a known runtime parameter, if any. */ + /** + * Value of a known runtime parameter, if any. + */ static String runParameter; - /** Value of a known task property, if any. */ + /** + * Value of a known task property, if any. + */ static String taskProperty; /** * Test of curate method, of class Curator. * Currently this just tests task properties and run parameters. + * * @throws java.lang.Exception passed through. */ @Test - public void testCurate_DSpaceObject() - throws Exception { - System.out.println("curate"); + public void testCurate_DSpaceObject() throws Exception { + CoreServiceFactory.getInstance().getPluginService().clearNamedPluginClasses(); final String TASK_NAME = "dummyTask"; // Configure the task to be run. ConfigurationService cfg = kernelImpl.getConfigurationService(); cfg.setProperty("plugin.named.org.dspace.curate.CurationTask", - DummyTask.class.getName() + " = " + TASK_NAME); + DummyTask.class.getName() + " = " + TASK_NAME); cfg.setProperty(TASK_NAME + '.' + TASK_PROPERTY_NAME, TASK_PROPERTY_VALUE); // Get and configure a Curator. @@ -72,12 +80,40 @@ public class CuratorTest // Check the result. System.out.format("Task %s result was '%s'%n", - TASK_NAME, instance.getResult(TASK_NAME)); + TASK_NAME, instance.getResult(TASK_NAME)); System.out.format("Task %s status was %d%n", - TASK_NAME, instance.getStatus(TASK_NAME)); + TASK_NAME, instance.getStatus(TASK_NAME)); assertEquals("Unexpected task status", - Curator.CURATE_SUCCESS, instance.getStatus(TASK_NAME)); + Curator.CURATE_SUCCESS, instance.getStatus(TASK_NAME)); assertEquals("Wrong run parameter", RUN_PARAMETER_VALUE, runParameter); assertEquals("Wrong task property", TASK_PROPERTY_VALUE, taskProperty); } + + @Test + public void testCurate_NoOpTask() throws Exception { + + CoreServiceFactory.getInstance().getPluginService().clearNamedPluginClasses(); + + final String TASK_NAME = "noop"; + + // Configure the noop task to be run. + ConfigurationService cfg = kernelImpl.getConfigurationService(); + cfg.setProperty("plugin.named.org.dspace.curate.CurationTask", + NoOpCurationTask.class.getName() + " = " + TASK_NAME); + + // Get and configure a Curator. + Curator curator = new Curator(); + + StringBuilder reporterOutput = new StringBuilder(); + curator.setReporter(reporterOutput); // Send any report to our StringBuilder. + + curator.addTask(TASK_NAME); + Item item = mock(Item.class); + when(item.getType()).thenReturn(2); + when(item.getHandle()).thenReturn("testHandle"); + curator.curate(context, item); + + assertEquals(Curator.CURATE_SUCCESS, curator.getStatus(TASK_NAME)); + assertEquals("No operation performed on testHandle", reporterOutput.toString()); + } } diff --git a/dspace-api/src/test/java/org/dspace/discovery/MetadataFieldIndexFactoryImplTest.java b/dspace-api/src/test/java/org/dspace/discovery/MetadataFieldIndexFactoryImplTest.java new file mode 100644 index 0000000000..b54158c002 --- /dev/null +++ b/dspace-api/src/test/java/org/dspace/discovery/MetadataFieldIndexFactoryImplTest.java @@ -0,0 +1,93 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.discovery; + +import static org.junit.Assert.assertTrue; + +import org.apache.solr.common.SolrInputDocument; +import org.dspace.AbstractUnitTest; +import org.dspace.content.MetadataField; +import org.dspace.content.MetadataSchema; +import org.dspace.content.factory.ContentServiceFactory; +import org.dspace.content.service.MetadataFieldService; +import org.dspace.content.service.MetadataSchemaService; +import org.dspace.discovery.indexobject.IndexableMetadataField; +import org.dspace.discovery.indexobject.MetadataFieldIndexFactoryImpl; +import org.junit.Test; + +/** + * Test class for {@link MetadataFieldIndexFactoryImpl} + * + * @author Maria Verdonck (Atmire) on 23/07/2020 + */ +public class MetadataFieldIndexFactoryImplTest extends AbstractUnitTest { + private MetadataSchemaService metadataSchemaService = + ContentServiceFactory.getInstance().getMetadataSchemaService(); + private MetadataFieldService metadataFieldService = ContentServiceFactory.getInstance().getMetadataFieldService(); + + private String schemaName = "schema1"; + private String elemName1 = "elem1"; + private String elemName2 = "elem2"; + private String qualName1 = "qual1"; + + private MetadataSchema schema; + private MetadataField field1; + private MetadataField field2; + + @Test + public void test_buildDocument_withQualifier() throws Exception { + context.turnOffAuthorisationSystem(); + schema = metadataSchemaService.create(context, schemaName, "htpp://test/schema/"); + field1 = metadataFieldService.create(context, schema, elemName1, qualName1, "note 1"); + + MetadataFieldIndexFactoryImpl fieldIndexFactory = new MetadataFieldIndexFactoryImpl(); + IndexableMetadataField indexableMetadataField = new IndexableMetadataField(this.field1); + SolrInputDocument solrInputDocument = fieldIndexFactory.buildDocument(context, indexableMetadataField); + + assertTrue(solrInputDocument.getFieldValues(MetadataFieldIndexFactoryImpl.SCHEMA_FIELD_NAME + "_keyword") + .contains(this.field1.getMetadataSchema().getName())); + assertTrue(solrInputDocument.getFieldValues(MetadataFieldIndexFactoryImpl.ELEMENT_FIELD_NAME + "_keyword") + .contains(this.field1.getElement())); + assertTrue(solrInputDocument.getFieldValues(MetadataFieldIndexFactoryImpl.QUALIFIER_FIELD_NAME + "_keyword") + .contains(this.field1.getQualifier())); + + assertTrue(solrInputDocument.getFieldValues(MetadataFieldIndexFactoryImpl.FIELD_NAME_VARIATIONS + "_keyword") + .contains(this.field1.getQualifier())); + assertTrue(solrInputDocument.getFieldValues(MetadataFieldIndexFactoryImpl.FIELD_NAME_VARIATIONS + "_keyword") + .contains(this.field1.getElement() + "." + this.field1.getQualifier())); + assertTrue(solrInputDocument.getFieldValues(MetadataFieldIndexFactoryImpl.FIELD_NAME_VARIATIONS + "_keyword") + .contains(this.field1.toString('.'))); + + metadataSchemaService.delete(context, schema); + metadataFieldService.delete(context, field1); + context.restoreAuthSystemState(); + } + + @Test + public void test_buildDocument_noQualifier() throws Exception { + context.turnOffAuthorisationSystem(); + schema = metadataSchemaService.create(context, schemaName, "htpp://test/schema/"); + field2 = metadataFieldService.create(context, schema, elemName2, null, "note 2"); + MetadataFieldIndexFactoryImpl fieldIndexFactory = new MetadataFieldIndexFactoryImpl(); + IndexableMetadataField indexableMetadataField = new IndexableMetadataField(this.field2); + SolrInputDocument solrInputDocument = fieldIndexFactory.buildDocument(context, indexableMetadataField); + assertTrue(solrInputDocument.getFieldValues(MetadataFieldIndexFactoryImpl.SCHEMA_FIELD_NAME + "_keyword") + .contains(this.field2.getMetadataSchema().getName())); + assertTrue(solrInputDocument.getFieldValues(MetadataFieldIndexFactoryImpl.ELEMENT_FIELD_NAME + "_keyword") + .contains(this.field2.getElement())); + + assertTrue(solrInputDocument.getFieldValues(MetadataFieldIndexFactoryImpl.FIELD_NAME_VARIATIONS + "_keyword") + .contains(this.field2.getElement())); + assertTrue(solrInputDocument.getFieldValues(MetadataFieldIndexFactoryImpl.FIELD_NAME_VARIATIONS + "_keyword") + .contains(this.field2.toString('.'))); + + metadataSchemaService.delete(context, schema); + metadataFieldService.delete(context, field2); + context.restoreAuthSystemState(); + } +} diff --git a/dspace-api/src/test/java/org/dspace/discovery/MockSolrSearchCore.java b/dspace-api/src/test/java/org/dspace/discovery/MockSolrSearchCore.java index 1934ba9f0f..b81e18a473 100644 --- a/dspace-api/src/test/java/org/dspace/discovery/MockSolrSearchCore.java +++ b/dspace-api/src/test/java/org/dspace/discovery/MockSolrSearchCore.java @@ -7,19 +7,35 @@ */ package org.dspace.discovery; +import org.dspace.solr.MockSolrServer; +import org.springframework.beans.factory.DisposableBean; import org.springframework.beans.factory.InitializingBean; import org.springframework.stereotype.Service; /** - * Mock SOLR service for the Search Core + * Mock SOLR service for the Search Core. Manages an in-process Solr server + * with an in-memory "search" core. */ @Service -public class MockSolrSearchCore extends SolrSearchCore implements InitializingBean { +public class MockSolrSearchCore extends SolrSearchCore + implements InitializingBean, DisposableBean { + private MockSolrServer mockSolrServer; @Override public void afterPropertiesSet() throws Exception { - //We don't use SOLR in the tests of this module - solr = null; + mockSolrServer = new MockSolrServer("search"); + solr = mockSolrServer.getSolrServer(); } + /** + * Reset the core for the next test. See {@link MockSolrServer#reset()}. + */ + public void reset() { + mockSolrServer.reset(); + } + + @Override + public void destroy() throws Exception { + mockSolrServer.destroy(); + } } diff --git a/dspace-api/src/test/java/org/dspace/eperson/EPersonInWorkflowIT.java b/dspace-api/src/test/java/org/dspace/eperson/EPersonInWorkflowIT.java new file mode 100644 index 0000000000..90b094ccbc --- /dev/null +++ b/dspace-api/src/test/java/org/dspace/eperson/EPersonInWorkflowIT.java @@ -0,0 +1,1558 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.eperson; + +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertNull; +import static org.junit.Assert.assertTrue; +import static org.junit.Assert.fail; + +import java.sql.SQLException; +import java.util.List; +import javax.servlet.http.HttpServletRequest; + +import org.apache.commons.lang3.StringUtils; +import org.apache.logging.log4j.Logger; +import org.dspace.AbstractIntegrationTestWithDatabase; +import org.dspace.builder.CollectionBuilder; +import org.dspace.builder.CommunityBuilder; +import org.dspace.builder.EPersonBuilder; +import org.dspace.builder.WorkspaceItemBuilder; +import org.dspace.content.Collection; +import org.dspace.content.Community; +import org.dspace.content.WorkspaceItem; +import org.dspace.content.factory.ContentServiceFactory; +import org.dspace.content.service.CollectionService; +import org.dspace.content.service.CommunityService; +import org.dspace.content.service.InstallItemService; +import org.dspace.content.service.ItemService; +import org.dspace.content.service.WorkspaceItemService; +import org.dspace.eperson.factory.EPersonServiceFactory; +import org.dspace.eperson.service.EPersonService; +import org.dspace.eperson.service.GroupService; +import org.dspace.workflow.WorkflowService; +import org.dspace.workflow.factory.WorkflowServiceFactory; +import org.dspace.xmlworkflow.factory.XmlWorkflowServiceFactory; +import org.dspace.xmlworkflow.service.XmlWorkflowService; +import org.dspace.xmlworkflow.state.Workflow; +import org.dspace.xmlworkflow.storedcomponents.CollectionRole; +import org.dspace.xmlworkflow.storedcomponents.XmlWorkflowItem; +import org.dspace.xmlworkflow.storedcomponents.service.CollectionRoleService; +import org.junit.Before; +import org.junit.Test; +import org.springframework.mock.web.MockHttpServletRequest; + +/** + * Class to test interaction between EPerson deletion and tasks present in the workflow + */ +public class EPersonInWorkflowIT extends AbstractIntegrationTestWithDatabase { + + private final String REVIEW_STEP = "reviewstep"; + private final String CLAIM_ACTION = "claimaction"; + private final String REVIEW_ACTION = "reviewaction"; + private final String REVIEW_ROLE = "reviewer"; + private final String EDIT_STEP = "editstep"; + private final String EDIT_ACTION = "editaction"; + private final String FINAL_EDIT_ROLE = "finaleditor"; + private final String FINAL_EDIT_STEP = "finaleditstep"; + private final String FINAL_EDIT_ACTION = "finaleditaction"; + private final String EDIT_ROLE = "editor"; + protected EPersonService ePersonService = EPersonServiceFactory.getInstance().getEPersonService(); + protected GroupService groupService = EPersonServiceFactory.getInstance().getGroupService(); + protected CommunityService communityService = ContentServiceFactory.getInstance().getCommunityService(); + protected CollectionService collectionService = ContentServiceFactory.getInstance().getCollectionService(); + protected ItemService itemService = ContentServiceFactory.getInstance().getItemService(); + protected InstallItemService installItemService = ContentServiceFactory.getInstance().getInstallItemService(); + protected WorkflowService workflowService = WorkflowServiceFactory.getInstance().getWorkflowService(); + protected WorkspaceItemService workspaceItemService = ContentServiceFactory.getInstance() + .getWorkspaceItemService(); + protected XmlWorkflowService xmlWorkflowService = XmlWorkflowServiceFactory.getInstance().getXmlWorkflowService(); + protected CollectionRoleService collectionRoleService = XmlWorkflowServiceFactory.getInstance() + .getCollectionRoleService(); + + + private EPerson workflowUserA; + private EPerson workflowUserB; + private EPerson workflowUserC; + private EPerson workflowUserD; + + private static final Logger log = org.apache.logging.log4j.LogManager.getLogger(EPersonInWorkflowIT.class); + + /** + * This method will be run before every test as per @Before. It will + * initialize resources required for the tests. + * + * Other methods can be annotated with @Before here or in subclasses but no + * execution order is guaranteed + */ + @Before + @Override + public void setUp() throws Exception { + super.setUp(); + + context.turnOffAuthorisationSystem(); + + workflowUserA = EPersonBuilder.createEPerson(context).withEmail("workflowUserA@example.org").build(); + workflowUserB = EPersonBuilder.createEPerson(context).withEmail("workflowUserB@example.org").build(); + workflowUserC = EPersonBuilder.createEPerson(context).withEmail("workflowUserC@example.org").build(); + workflowUserD = EPersonBuilder.createEPerson(context).withEmail("workflowUserD@example.org").build(); + + context.restoreAuthSystemState(); + + } + + + /** + * This test verifies that an EPerson cannot be removed if they are the only member of a Workflow Group that has + * tasks currently assigned to it. This test verifies this with the task claimed by the user to be deleted. + * This test also verifies that after the task has been passed and the user has been removed from the workflow + * group, the EPerson can be removed. + * + * @throws Exception + */ + @Test + public void testDeleteUserWhenOnlyUserInGroup1() throws Exception { + /* + * This test has the following setup: + * - Step 1: user B + * - Step 2: user C + * - Step 3: user B + * + * This test will perform the following checks: + * - create a workspace item, and let it move to step 1 + * - claim it by user B + * - delete user B + * - verify the delete is refused + * - remove user B from step 1 + * - verify that the removal is refused due to B being the last member in the workflow group and the group + * having a claimed item + * - approve it by user B and let it move to step 2 + * - remove user B from step 3 + * - approve it by user C + * - verify that the item is archived without any actions apart from removing user B + * - delete user B + * - verify the delete succeeds + */ + context.turnOffAuthorisationSystem(); + + Community parent = CommunityBuilder.createCommunity(context).build(); + Collection collection = CollectionBuilder.createCollection(context, parent) + .withWorkflowGroup(1, workflowUserB) + .withWorkflowGroup(2, workflowUserC) + .withWorkflowGroup(3, workflowUserB) + .build(); + + WorkspaceItem wsi = WorkspaceItemBuilder.createWorkspaceItem(context, collection) + .withSubmitter(workflowUserA) + .withTitle("Test item full workflow") + .withIssueDate("2019-03-06") + .withSubject("ExtraEntry") + .build(); + + Workflow workflow = XmlWorkflowServiceFactory.getInstance().getWorkflowFactory().getWorkflow(collection); + + XmlWorkflowItem workflowItem = xmlWorkflowService.startWithoutNotify(context, wsi); + MockHttpServletRequest httpServletRequest = new MockHttpServletRequest(); + httpServletRequest.setParameter("submit_approve", "submit_approve"); + + executeWorkflowAction(httpServletRequest, workflowUserB, workflow, workflowItem, REVIEW_STEP, CLAIM_ACTION); + assertRemovalOfEpersonFromWorkflowGroup(workflowUserB, collection, REVIEW_ROLE, false); + + executeWorkflowAction(httpServletRequest, workflowUserB, workflow, workflowItem, REVIEW_STEP, REVIEW_ACTION); + + executeWorkflowAction(httpServletRequest, workflowUserC, workflow, workflowItem, EDIT_STEP, CLAIM_ACTION); + + + assertDeletionOfEperson(workflowUserB, false); + + assertRemovalOfEpersonFromWorkflowGroup(workflowUserB, collection, FINAL_EDIT_ROLE, true); + assertRemovalOfEpersonFromWorkflowGroup(workflowUserB, collection, REVIEW_ROLE, true); + + assertDeletionOfEperson(workflowUserB, true); + + executeWorkflowAction(httpServletRequest, workflowUserC, workflow, workflowItem, EDIT_STEP, EDIT_ACTION); + + assertTrue(workflowItem.getItem().isArchived()); + + } + + /** + * This test verifies that an EPerson cannot be removed if they are the only member of a Workflow Group that has + * tasks currently assigned to it. This test verifies this with a pooled task. + * This test also verifies that after the task has been passed and the user has been removed from the workflow + * group, the EPerson can be removed. + * + * @throws Exception + */ + @Test + public void testDeleteUserWhenOnlyUserInGroup2() throws Exception { + /* + * This test has the following setup: + * - Step 1: user B + * - Step 2: user C + * - Step 3: user B + * + * This test will perform the following checks: + * - create a workspace item, and let it move to step 1 + * - delete user B + * - verify the delete is refused + * - remove user B from step 1 + * - verify that the removal is refused due to B being the last member in the workflow group and the group + * having a pool task + * - approve it by user B and let it move to step 2 + * - remove user B from step 3 + * - delete user B + * - verify the delete succeeds + * - Approve it by user C + * - verify that the item is archived without any actions apart from the approving in step 2 + */ + context.turnOffAuthorisationSystem(); + + Community parent = CommunityBuilder.createCommunity(context).build(); + Collection collection = CollectionBuilder.createCollection(context, parent) + .withWorkflowGroup(1, workflowUserB) + .withWorkflowGroup(2, workflowUserC) + .withWorkflowGroup(3, workflowUserB) + .build(); + + WorkspaceItem wsi = WorkspaceItemBuilder.createWorkspaceItem(context, collection) + .withSubmitter(workflowUserA) + .withTitle("Test item full workflow") + .withIssueDate("2019-03-06") + .withSubject("ExtraEntry") + .build(); + + Workflow workflow = XmlWorkflowServiceFactory.getInstance().getWorkflowFactory().getWorkflow(collection); + + XmlWorkflowItem workflowItem = xmlWorkflowService.startWithoutNotify(context, wsi); + MockHttpServletRequest httpServletRequest = new MockHttpServletRequest(); + httpServletRequest.setParameter("submit_approve", "submit_approve"); + + assertDeletionOfEperson(workflowUserB, false); + assertRemovalOfEpersonFromWorkflowGroup(workflowUserB, collection, REVIEW_ROLE, false); + + executeWorkflowAction(httpServletRequest, workflowUserB, workflow, workflowItem, REVIEW_STEP, CLAIM_ACTION); + executeWorkflowAction(httpServletRequest, workflowUserB, workflow, workflowItem, REVIEW_STEP, REVIEW_ACTION); + + assertRemovalOfEpersonFromWorkflowGroup(workflowUserB, collection, REVIEW_ROLE, true); + assertRemovalOfEpersonFromWorkflowGroup(workflowUserB, collection, "finaleditor", true); + + assertDeletionOfEperson(workflowUserB, true); + + executeWorkflowAction(httpServletRequest, workflowUserC, workflow, workflowItem, EDIT_STEP, CLAIM_ACTION); + executeWorkflowAction(httpServletRequest, workflowUserC, workflow, workflowItem, EDIT_STEP, EDIT_ACTION); + + assertTrue(workflowItem.getItem().isArchived()); + + } + + + /** + * This test verifies that an EPerson cannot be removed if they are the only member of a Workflow Group that has + * tasks currently assigned to it. This test verifies this with a group without a task. + * This test also verifies that after user has been removed from the workflow + * group, the EPerson can be removed. + * + * @throws Exception + */ + @Test + public void testDeleteUserWhenOnlyUserInGroup3() throws Exception { + /* + * This test has the following setup: + * - Step 1: user B + * - Step 2: user C + * - Step 3: user B + * + * This test will perform the following checks: + * - create a workspace item, and let it move to step 1 + * - delete user C + * - verify the delete is refused + * - remove user C from step 2 + * - delete user C + * - verify the delete succeeds + * - Approve it by user B + * - verify that the item moved to step 3 without any actions apart from the approving in step 1 + * - Approve it by user B + * - verify that the item is archived + */ + context.turnOffAuthorisationSystem(); + + Community parent = CommunityBuilder.createCommunity(context).build(); + Collection collection = CollectionBuilder.createCollection(context, parent) + .withWorkflowGroup(1, workflowUserB) + .withWorkflowGroup(2, workflowUserC) + .withWorkflowGroup(3, workflowUserB) + .build(); + + WorkspaceItem wsi = WorkspaceItemBuilder.createWorkspaceItem(context, collection) + .withSubmitter(workflowUserA) + .withTitle("Test item full workflow") + .withIssueDate("2019-03-06") + .withSubject("ExtraEntry") + .build(); + + Workflow workflow = XmlWorkflowServiceFactory.getInstance().getWorkflowFactory().getWorkflow(collection); + + XmlWorkflowItem workflowItem = xmlWorkflowService.startWithoutNotify(context, wsi); + MockHttpServletRequest httpServletRequest = new MockHttpServletRequest(); + httpServletRequest.setParameter("submit_approve", "submit_approve"); + + assertDeletionOfEperson(workflowUserC, false); + + assertRemovalOfEpersonFromWorkflowGroup(workflowUserC, collection, EDIT_ROLE, true); + + assertDeletionOfEperson(workflowUserC, true); + + executeWorkflowAction(httpServletRequest, workflowUserB, workflow, workflowItem, REVIEW_STEP, CLAIM_ACTION); + executeWorkflowAction(httpServletRequest, workflowUserB, workflow, workflowItem, REVIEW_STEP, REVIEW_ACTION); + + executeWorkflowAction(httpServletRequest, workflowUserB, workflow, workflowItem, FINAL_EDIT_STEP, + CLAIM_ACTION); + executeWorkflowAction(httpServletRequest, workflowUserB, workflow, workflowItem, FINAL_EDIT_STEP, + FINAL_EDIT_ACTION); + + assertTrue(workflowItem.getItem().isArchived()); + + } + + /** + * This test verifies that an EPerson cannot be removed if they are the only member of a Workflow Group that has + * tasks currently assigned to it. This test verifies a user can't be removed from a workflow step they have claimed + * items for that task. This test also verifies that the user can be removed from another workflow group where + * they have no claimed items for that task. This test also verifies that after user has performed the task, and the + * user has been removed from the workflow group, the EPerson can be removed. + * + * @throws Exception + */ + @Test + public void testDeleteUserWhenOnlyUserInGroup4() throws Exception { + /* + * This test has the following setup: + * - Step 1: user B + * - Step 2: user C + * - Step 3: user B + * + * This test will perform the following checks: + * - create a workspace item, and let it move to step 1 + * - approve it by user B, and let it move to step 2 + * - approve it by user C, and let it move to step 3 + * - claim it by user B + * - remove user B from step 1 + * - delete user B + * - verify the delete is refused + * - remove user B from step 3, verify that the removal is refused due to user B having a claimed task and there + * being no other members in step 3 + * - approve it by user B + * - delete user B + * - verify the delete suceeds + * - verify that the item is archived + */ + context.turnOffAuthorisationSystem(); + + Community parent = CommunityBuilder.createCommunity(context).build(); + Collection collection = CollectionBuilder.createCollection(context, parent) + .withWorkflowGroup(1, workflowUserB) + .withWorkflowGroup(2, workflowUserC) + .withWorkflowGroup(3, workflowUserB) + .build(); + + WorkspaceItem wsi = WorkspaceItemBuilder.createWorkspaceItem(context, collection) + .withSubmitter(workflowUserA) + .withTitle("Test item full workflow") + .withIssueDate("2019-03-06") + .withSubject("ExtraEntry") + .build(); + + Workflow workflow = XmlWorkflowServiceFactory.getInstance().getWorkflowFactory().getWorkflow(collection); + + XmlWorkflowItem workflowItem = xmlWorkflowService.startWithoutNotify(context, wsi); + MockHttpServletRequest httpServletRequest = new MockHttpServletRequest(); + httpServletRequest.setParameter("submit_approve", "submit_approve"); + + executeWorkflowAction(httpServletRequest, workflowUserB, workflow, workflowItem, REVIEW_STEP, CLAIM_ACTION); + executeWorkflowAction(httpServletRequest, workflowUserB, workflow, workflowItem, REVIEW_STEP, REVIEW_ACTION); + executeWorkflowAction(httpServletRequest, workflowUserC, workflow, workflowItem, EDIT_STEP, CLAIM_ACTION); + executeWorkflowAction(httpServletRequest, workflowUserC, workflow, workflowItem, EDIT_STEP, EDIT_ACTION); + executeWorkflowAction(httpServletRequest, workflowUserB, workflow, workflowItem, FINAL_EDIT_STEP, CLAIM_ACTION); + + assertDeletionOfEperson(workflowUserB, false); + assertRemovalOfEpersonFromWorkflowGroup(workflowUserB, collection, REVIEW_ROLE, true); + assertRemovalOfEpersonFromWorkflowGroup(workflowUserB, collection, FINAL_EDIT_ROLE, false); + + executeWorkflowAction(httpServletRequest, workflowUserB, workflow, workflowItem, FINAL_EDIT_STEP, + FINAL_EDIT_ACTION); + + assertRemovalOfEpersonFromWorkflowGroup(workflowUserB, collection, FINAL_EDIT_ROLE, true); + assertDeletionOfEperson(workflowUserB, true); + + assertTrue(workflowItem.getItem().isArchived()); + + } + + /** + * This test verifies that an EPerson cannot be removed if they are the only member of a Workflow Group that has + * tasks currently assigned to it. This test verifies a user can't be removed from a workflow step they have claimed + * items for that task. This test also verifies that this verification is using both the step and the collection + * to determine whether the user can be removed from a workflow group. This test also verifies that after user has + * been removed from the workflow group and the task has been passed, the EPerson can be removed. + * + * @throws Exception + */ + @Test + public void testDeleteUserWhenOnlyUserInGroup5() throws Exception { + /* + * This test has the following setup: + * - Collection A - Step 1: user B + * - Collection A - Step 2: user C + * - Collection A - Step 3: user B + * + * - Collection B - Step 1: user B + * + * This test will perform the following checks: + * - create a workspace item in Collection A, and let it move to step 1 + * - claim it by user B + * - delete user B + * - verify the delete is refused + * - remove user B from Col A - step 3 + * - remove user B from Col B - step 1 + * - remove user B from Col A - step 1 + * - Verify that the removal from Col A - step 1 is refused because user B has a claimed task in that + * collection and no other user is present + * - approve it by user B, and let it move to step 2 + * - remove user B from Col A - step 1 + * - verify it succeeds + * - delete user B + * - verify it succeeds + * - approve it by user C + * - verify that the item is archived + */ + context.turnOffAuthorisationSystem(); + + Community parent = CommunityBuilder.createCommunity(context).build(); + Collection collectionA = CollectionBuilder.createCollection(context, parent) + .withWorkflowGroup(1, workflowUserB) + .withWorkflowGroup(2, workflowUserC) + .withWorkflowGroup(3, workflowUserB) + .build(); + + Collection collectionB = CollectionBuilder.createCollection(context, parent) + .withWorkflowGroup(1, workflowUserB) + .build(); + + WorkspaceItem wsi = WorkspaceItemBuilder.createWorkspaceItem(context, collectionA) + .withSubmitter(workflowUserA) + .withTitle("Test item full workflow") + .withIssueDate("2019-03-06") + .withSubject("ExtraEntry") + .build(); + + Workflow workflow = XmlWorkflowServiceFactory.getInstance().getWorkflowFactory().getWorkflow(collectionA); + + XmlWorkflowItem workflowItem = xmlWorkflowService.startWithoutNotify(context, wsi); + MockHttpServletRequest httpServletRequest = new MockHttpServletRequest(); + httpServletRequest.setParameter("submit_approve", "submit_approve"); + + executeWorkflowAction(httpServletRequest, workflowUserB, workflow, workflowItem, REVIEW_STEP, CLAIM_ACTION); + + assertRemovalOfEpersonFromWorkflowGroup(workflowUserB, collectionA, FINAL_EDIT_ROLE, true); + assertRemovalOfEpersonFromWorkflowGroup(workflowUserB, collectionB, REVIEW_ROLE, true); + assertRemovalOfEpersonFromWorkflowGroup(workflowUserB, collectionA, REVIEW_ROLE, false); + + executeWorkflowAction(httpServletRequest, workflowUserB, workflow, workflowItem, REVIEW_STEP, REVIEW_ACTION); + + assertRemovalOfEpersonFromWorkflowGroup(workflowUserB, collectionA, REVIEW_ROLE, true); + assertDeletionOfEperson(workflowUserB, true); + + + executeWorkflowAction(httpServletRequest, workflowUserC, workflow, workflowItem, EDIT_STEP, CLAIM_ACTION); + executeWorkflowAction(httpServletRequest, workflowUserC, workflow, workflowItem, EDIT_STEP, EDIT_ACTION); + + assertTrue(workflowItem.getItem().isArchived()); + + } + + /** + * This test verifies that the submitter can be removed, and the workflow steps will still be supported + * if there's no submitter assigned to the item + * + * @throws Exception + */ + @Test + public void testDeleteUserWhenOnlyUserInGroup6() throws Exception { + /* + * This test has the following setup: + * - Submitter: user A + * - Step 1: user B + * - Step 2: user C + * - Step 3: user B + * + * - create a workspace item, and let it move to step 1 + * - delete the submitter + * - verify it succeeds + * - Approve it by user B + * - verify that the item moved to step 2 + * - Approve it by user C + * - verify that the item moved to step 3 + * - Approve it by user B + * - verify that the item is archived + */ + context.turnOffAuthorisationSystem(); + + Community parent = CommunityBuilder.createCommunity(context).build(); + Collection collection = CollectionBuilder.createCollection(context, parent) + .withWorkflowGroup(1, workflowUserB) + .withWorkflowGroup(2, workflowUserC) + .withWorkflowGroup(3, workflowUserB) + .build(); + + WorkspaceItem wsi = WorkspaceItemBuilder.createWorkspaceItem(context, collection) + .withSubmitter(workflowUserA) + .withTitle("Test item full workflow") + .withIssueDate("2019-03-06") + .withSubject("ExtraEntry") + .build(); + + Workflow workflow = XmlWorkflowServiceFactory.getInstance().getWorkflowFactory().getWorkflow(collection); + + XmlWorkflowItem workflowItem = xmlWorkflowService.startWithoutNotify(context, wsi); + MockHttpServletRequest httpServletRequest = new MockHttpServletRequest(); + httpServletRequest.setParameter("submit_approve", "submit_approve"); + + assertDeletionOfEperson(workflowUserA, true); + + executeWorkflowAction(httpServletRequest, workflowUserB, workflow, workflowItem, REVIEW_STEP, CLAIM_ACTION); + executeWorkflowAction(httpServletRequest, workflowUserB, workflow, workflowItem, REVIEW_STEP, REVIEW_ACTION); + + executeWorkflowAction(httpServletRequest, workflowUserC, workflow, workflowItem, EDIT_STEP, CLAIM_ACTION); + executeWorkflowAction(httpServletRequest, workflowUserC, workflow, workflowItem, EDIT_STEP, EDIT_ACTION); + + executeWorkflowAction(httpServletRequest, workflowUserB, workflow, workflowItem, FINAL_EDIT_STEP, CLAIM_ACTION); + executeWorkflowAction(httpServletRequest, workflowUserB, workflow, workflowItem, FINAL_EDIT_STEP, + FINAL_EDIT_ACTION); + + assertTrue(workflowItem.getItem().isArchived()); + + } + + /** + * This test verifies that an EPerson cannot be removed if they are the only member of a Workflow Group that has + * tasks currently assigned to it. This test also verifies the user can't be removed from a step with a pooled + * task if they are the only member. This test also verifies the user can be removed from a step with no tasks + * even if they are the only member. This test also verifies that after the task has been passed and the user has + * been removed from the workflow, the EPerson can be removed. This test also verifies that an item is correctly + * arhived if the last step has no members left. + * + * @throws Exception + */ + @Test + public void testDeleteUserWhenOnlyUserInGroup7() throws Exception { + /* + * This test has the following setup: + * - Step 1: user B + * - Step 2: user C + * - Step 3: user B + * + * This test will perform the following checks: + * - create a workspace item, and let it move to step 1 + * - delete user B + * - verify the delete is refused + * - remove user B from step 1 + * - verify the removal is refused + * - remove user B from step 3 + * - verify the removal succeeds + * - approve it by user B + * - verify that the item moved to step 2 + * - remove user B from step 1 + * - delete user B + * - verify the delete succeeds + * - approve it by user C + * - verify that the item is archived without any actions apart from removing user B + */ + context.turnOffAuthorisationSystem(); + + Community parent = CommunityBuilder.createCommunity(context).build(); + Collection collection = CollectionBuilder.createCollection(context, parent) + .withWorkflowGroup(1, workflowUserB) + .withWorkflowGroup(2, workflowUserC) + .withWorkflowGroup(3, workflowUserB) + .build(); + + WorkspaceItem wsi = WorkspaceItemBuilder.createWorkspaceItem(context, collection) + .withSubmitter(workflowUserA) + .withTitle("Test item full workflow") + .withIssueDate("2019-03-06") + .withSubject("ExtraEntry") + .build(); + + Workflow workflow = XmlWorkflowServiceFactory.getInstance().getWorkflowFactory().getWorkflow(collection); + + XmlWorkflowItem workflowItem = xmlWorkflowService.startWithoutNotify(context, wsi); + MockHttpServletRequest httpServletRequest = new MockHttpServletRequest(); + httpServletRequest.setParameter("submit_approve", "submit_approve"); + + assertDeletionOfEperson(workflowUserB, false); + assertRemovalOfEpersonFromWorkflowGroup(workflowUserB, collection, REVIEW_ROLE, false); + assertRemovalOfEpersonFromWorkflowGroup(workflowUserB, collection, FINAL_EDIT_ROLE, true); + + executeWorkflowAction(httpServletRequest, workflowUserB, workflow, workflowItem, REVIEW_STEP, CLAIM_ACTION); + executeWorkflowAction(httpServletRequest, workflowUserB, workflow, workflowItem, REVIEW_STEP, REVIEW_ACTION); + + assertRemovalOfEpersonFromWorkflowGroup(workflowUserB, collection, REVIEW_ROLE, true); + assertDeletionOfEperson(workflowUserB, true); + + executeWorkflowAction(httpServletRequest, workflowUserC, workflow, workflowItem, EDIT_STEP, CLAIM_ACTION); + executeWorkflowAction(httpServletRequest, workflowUserC, workflow, workflowItem, EDIT_STEP, EDIT_ACTION); + + assertTrue(workflowItem.getItem().isArchived()); + + } + + + /** + * This test verifies that an EPerson cannot be removed if they are the only member of a Workflow Group that has + * tasks currently assigned to it. This test verifies this with a pooled task in the last workflow step. + * This test also verifies that after after another user has been added to the workflow groups, the original EPerson + * can be removed. + * + * @throws Exception + */ + @Test + public void testDeleteUserAfterReplacingUser1() throws Exception { + /* + * This test has the following setup: + * - Step 1: user B + * - Step 2: user C + * - Step 3: user B + * + * This test will perform the following checks: + * - create a workspace item, and let it move to step 1 + * - approve it by user B + * - verify that the item moved to step 2 + * - Approve it by user C + * - delete user B + * - verify the delete is refused + * - add user D to workflow step 3 + * - delete user B + * - verify the delete is refused + * - add user D to workflow step 1 + * - delete user B + * - verify the delete succeeds + * - Approve it by user D + * - verify that the item is archived + */ + context.turnOffAuthorisationSystem(); + + Community parent = CommunityBuilder.createCommunity(context).build(); + Collection collection = CollectionBuilder.createCollection(context, parent) + .withWorkflowGroup(1, workflowUserB) + .withWorkflowGroup(2, workflowUserC) + .withWorkflowGroup(3, workflowUserB) + .build(); + + WorkspaceItem wsi = WorkspaceItemBuilder.createWorkspaceItem(context, collection) + .withSubmitter(workflowUserA) + .withTitle("Test item full workflow") + .withIssueDate("2019-03-06") + .withSubject("ExtraEntry") + .build(); + + Workflow workflow = XmlWorkflowServiceFactory.getInstance().getWorkflowFactory().getWorkflow(collection); + + XmlWorkflowItem workflowItem = xmlWorkflowService.startWithoutNotify(context, wsi); + MockHttpServletRequest httpServletRequest = new MockHttpServletRequest(); + httpServletRequest.setParameter("submit_approve", "submit_approve"); + + executeWorkflowAction(httpServletRequest, workflowUserB, workflow, workflowItem, REVIEW_STEP, CLAIM_ACTION); + executeWorkflowAction(httpServletRequest, workflowUserB, workflow, workflowItem, REVIEW_STEP, REVIEW_ACTION); + + + executeWorkflowAction(httpServletRequest, workflowUserC, workflow, workflowItem, EDIT_STEP, CLAIM_ACTION); + executeWorkflowAction(httpServletRequest, workflowUserC, workflow, workflowItem, EDIT_STEP, EDIT_ACTION); + + assertDeletionOfEperson(workflowUserB, false); + + addUserToWorkflowGroup(workflowUserD, collection, FINAL_EDIT_ROLE); + assertDeletionOfEperson(workflowUserB, false); + addUserToWorkflowGroup(workflowUserD, collection, REVIEW_ROLE); + + assertDeletionOfEperson(workflowUserB, true); + + executeWorkflowAction(httpServletRequest, workflowUserD, workflow, workflowItem, FINAL_EDIT_STEP, CLAIM_ACTION); + executeWorkflowAction(httpServletRequest, workflowUserD, workflow, workflowItem, FINAL_EDIT_STEP, + FINAL_EDIT_ACTION); + + assertTrue(workflowItem.getItem().isArchived()); + + } + + /** + * This test verifies that an EPerson cannot be removed if they are the only member of a Workflow Group that has + * tasks currently assigned to it. This test verifies this with a pooled task at the beginning of the workflow. + * This test also verifies that after after another user has been added to the workflow groups from which the + * original user is being removed, the EPerson can be removed and the workflow process can be resumed with the newly + * added user. + * + * @throws Exception + */ + @Test + public void testDeleteUserAfterReplacingUser2() throws Exception { + /* + * This test has the following setup: + * - Step 1: user B + * - Step 2: user C + * - Step 3: user B + * + * This test will perform the following checks: + * - create a workspace item, and let it move to step 1 + * - delete user B + * - verify the delete is refused + * - add user D to workflow step 1 + * - add user D to workflow step 3 + * - delete user B + * - verify the delete succeeds + * - Approve it by user D + * - verify that the item moved to step 2 + * - Approve it by user C + * - Approve it by user D + * - verify that the item is archived + */ + context.turnOffAuthorisationSystem(); + + Community parent = CommunityBuilder.createCommunity(context).build(); + Collection collection = CollectionBuilder.createCollection(context, parent) + .withWorkflowGroup(1, workflowUserB) + .withWorkflowGroup(2, workflowUserC) + .withWorkflowGroup(3, workflowUserB) + .build(); + + WorkspaceItem wsi = WorkspaceItemBuilder.createWorkspaceItem(context, collection) + .withSubmitter(workflowUserA) + .withTitle("Test item full workflow") + .withIssueDate("2019-03-06") + .withSubject("ExtraEntry") + .build(); + + Workflow workflow = XmlWorkflowServiceFactory.getInstance().getWorkflowFactory().getWorkflow(collection); + + XmlWorkflowItem workflowItem = xmlWorkflowService.startWithoutNotify(context, wsi); + MockHttpServletRequest httpServletRequest = new MockHttpServletRequest(); + httpServletRequest.setParameter("submit_approve", "submit_approve"); + + assertDeletionOfEperson(workflowUserB, false); + addUserToWorkflowGroup(workflowUserD, collection, REVIEW_ROLE); + addUserToWorkflowGroup(workflowUserD, collection, FINAL_EDIT_ROLE); + assertDeletionOfEperson(workflowUserB, true); + + executeWorkflowAction(httpServletRequest, workflowUserD, workflow, workflowItem, REVIEW_STEP, CLAIM_ACTION); + executeWorkflowAction(httpServletRequest, workflowUserD, workflow, workflowItem, REVIEW_STEP, REVIEW_ACTION); + + + executeWorkflowAction(httpServletRequest, workflowUserC, workflow, workflowItem, EDIT_STEP, CLAIM_ACTION); + executeWorkflowAction(httpServletRequest, workflowUserC, workflow, workflowItem, EDIT_STEP, EDIT_ACTION); + + executeWorkflowAction(httpServletRequest, workflowUserD, workflow, workflowItem, FINAL_EDIT_STEP, CLAIM_ACTION); + executeWorkflowAction(httpServletRequest, workflowUserD, workflow, workflowItem, FINAL_EDIT_STEP, + FINAL_EDIT_ACTION); + + assertTrue(workflowItem.getItem().isArchived()); + + } + + /** + * This test verifies that an EPerson cannot be removed if they are the only member of a Workflow Group that has an + * item present in a workflow with a pooled task. This test verifies this with an item that has entered the workflow + * and is still to progress to the step where the user will be removed. + * This test also verifies that after a new user has been added to this step, the original user can be removed. This + * test then verifies that the item can proceed through the full workflow and is correctly archived at the end. + * + * @throws Exception + */ + @Test + public void testDeleteUserAfterReplacingUser3() throws Exception { + /* + * This test has the following setup: + * - Step 1: user B + * - Step 2: user C + * - Step 3: user B + * + * This test will perform the following checks: + * - create a workspace item, and let it move to step 1 + * - delete user C + * - verify the delete is refused + * - add user D to workflow step 2 + * - delete user C + * - verify the delete succeeds + * - Approve it by user B + * - verify that the item moved to step 2 + * - Approve it by user D + * - verify that the item moved to step 3 + * - Approve it by user B + * - verify that the item is archived + */ + context.turnOffAuthorisationSystem(); + + Community parent = CommunityBuilder.createCommunity(context).build(); + Collection collection = CollectionBuilder.createCollection(context, parent) + .withWorkflowGroup(1, workflowUserB) + .withWorkflowGroup(2, workflowUserC) + .withWorkflowGroup(3, workflowUserB) + .build(); + + WorkspaceItem wsi = WorkspaceItemBuilder.createWorkspaceItem(context, collection) + .withSubmitter(workflowUserA) + .withTitle("Test item full workflow") + .withIssueDate("2019-03-06") + .withSubject("ExtraEntry") + .build(); + + Workflow workflow = XmlWorkflowServiceFactory.getInstance().getWorkflowFactory().getWorkflow(collection); + + XmlWorkflowItem workflowItem = xmlWorkflowService.startWithoutNotify(context, wsi); + MockHttpServletRequest httpServletRequest = new MockHttpServletRequest(); + httpServletRequest.setParameter("submit_approve", "submit_approve"); + + assertDeletionOfEperson(workflowUserC, false); + addUserToWorkflowGroup(workflowUserD, collection, EDIT_ROLE); + assertDeletionOfEperson(workflowUserC, true); + + executeWorkflowAction(httpServletRequest, workflowUserB, workflow, workflowItem, REVIEW_STEP, CLAIM_ACTION); + executeWorkflowAction(httpServletRequest, workflowUserB, workflow, workflowItem, REVIEW_STEP, REVIEW_ACTION); + + + executeWorkflowAction(httpServletRequest, workflowUserD, workflow, workflowItem, EDIT_STEP, CLAIM_ACTION); + executeWorkflowAction(httpServletRequest, workflowUserD, workflow, workflowItem, EDIT_STEP, EDIT_ACTION); + + executeWorkflowAction(httpServletRequest, workflowUserB, workflow, workflowItem, FINAL_EDIT_STEP, CLAIM_ACTION); + executeWorkflowAction(httpServletRequest, workflowUserB, workflow, workflowItem, FINAL_EDIT_STEP, + FINAL_EDIT_ACTION); + + assertTrue(workflowItem.getItem().isArchived()); + + } + + /** + * This test verifies that an EPerson cannot be removed if they are the only member of a Workflow Group that has an + * item present in a workflow with a pooled task. This test verifies this with a claimed task in the first workflow + * step. + * This test also verifies that after another user has been added to the respective workflow groups, the original + * user can be deleted. The claimed task will then become available again in the workflow pool where the new user + * can claim it and approve it. + * This test will verify that the remainder of the workflow can be completed. + * + * @throws Exception + */ + @Test + public void testDeleteUserAfterReplacingUser4() throws Exception { + /* + * This test has the following setup: + * - Step 1: user B + * - Step 2: user C + * - Step 3: user B + * + * This test will perform the following checks: + * - create a workspace item, and let it move to step 1 + * - claim it by user B, but don’t approve it + * - delete user B + * - verify the delete is refused + * - add user D to workflow step 1 + * - add user D to workflow step 3 + * - delete user B + * - verify the delete succeeds + * - Verify user D can now claim and approve it + * - verify that the item moved to step 2 + * - claim it by user C + * - approve it by user C + * - verify that the item moved to step 3 + * - Verify user D can claim and approve it + * - verify that the item is archived successfully + */ + context.turnOffAuthorisationSystem(); + + Community parent = CommunityBuilder.createCommunity(context).build(); + Collection collection = CollectionBuilder.createCollection(context, parent) + .withWorkflowGroup(1, workflowUserB) + .withWorkflowGroup(2, workflowUserC) + .withWorkflowGroup(3, workflowUserB) + .build(); + + WorkspaceItem wsi = WorkspaceItemBuilder.createWorkspaceItem(context, collection) + .withSubmitter(workflowUserA) + .withTitle("Test item full workflow") + .withIssueDate("2019-03-06") + .withSubject("ExtraEntry") + .build(); + + Workflow workflow = XmlWorkflowServiceFactory.getInstance().getWorkflowFactory().getWorkflow(collection); + + XmlWorkflowItem workflowItem = xmlWorkflowService.startWithoutNotify(context, wsi); + MockHttpServletRequest httpServletRequest = new MockHttpServletRequest(); + httpServletRequest.setParameter("submit_approve", "submit_approve"); + + executeWorkflowAction(httpServletRequest, workflowUserB, workflow, workflowItem, REVIEW_STEP, CLAIM_ACTION); + + assertDeletionOfEperson(workflowUserB, false); + addUserToWorkflowGroup(workflowUserD, collection, REVIEW_ROLE); + addUserToWorkflowGroup(workflowUserD, collection, FINAL_EDIT_ROLE); + assertDeletionOfEperson(workflowUserB, true); + + executeWorkflowAction(httpServletRequest, workflowUserD, workflow, workflowItem, REVIEW_STEP, CLAIM_ACTION); + executeWorkflowAction(httpServletRequest, workflowUserD, workflow, workflowItem, REVIEW_STEP, REVIEW_ACTION); + + + executeWorkflowAction(httpServletRequest, workflowUserC, workflow, workflowItem, EDIT_STEP, CLAIM_ACTION); + executeWorkflowAction(httpServletRequest, workflowUserC, workflow, workflowItem, EDIT_STEP, EDIT_ACTION); + + executeWorkflowAction(httpServletRequest, workflowUserD, workflow, workflowItem, FINAL_EDIT_STEP, CLAIM_ACTION); + executeWorkflowAction(httpServletRequest, workflowUserD, workflow, workflowItem, FINAL_EDIT_STEP, + FINAL_EDIT_ACTION); + + assertTrue(workflowItem.getItem().isArchived()); + + } + + /** + * This test verifies that an EPerson cannot be removed if they are the only member of a Workflow Group that has + * tasks currently assigned to it. This test verifies this with a claimed task in the middle of the workflow by the + * user to be deleted. + * This test also verifies that after another user is added to the middle workflow step, the original user can be + * deleted and that the task will become available in the pool tasks of the new user. + * This test then verifies that the workflow can be progressed by the new user and completed through the final step, + * and that the item will be archived. + * + * @throws Exception + */ + @Test + public void testDeleteUserAfterReplacingUser5() throws Exception { + /* + * This test has the following setup: + * - Step 1: user B + * - Step 2: user C + * - Step 3: user B + * + * This test will perform the following checks: + * - create a workspace item, and let it move to step 1 + * - Approve it by user B + * - verify that the item moved to step 2 + * - claim it by user C, but don’t approve it + * - delete user C + * - verify the delete is refused + * - add user D to workflow step 2 + * - delete user C + * - verify the delete succeeds + * - Verify user D can now claim and approve it + * - verify that the item moved to step 3 + * - verify that user B can claim and approve it + * - verify that the item is archived + */ + context.turnOffAuthorisationSystem(); + + Community parent = CommunityBuilder.createCommunity(context).build(); + Collection collection = CollectionBuilder.createCollection(context, parent) + .withWorkflowGroup(1, workflowUserB) + .withWorkflowGroup(2, workflowUserC) + .withWorkflowGroup(3, workflowUserB) + .build(); + + WorkspaceItem wsi = WorkspaceItemBuilder.createWorkspaceItem(context, collection) + .withSubmitter(workflowUserA) + .withTitle("Test item full workflow") + .withIssueDate("2019-03-06") + .withSubject("ExtraEntry") + .build(); + + Workflow workflow = XmlWorkflowServiceFactory.getInstance().getWorkflowFactory().getWorkflow(collection); + + XmlWorkflowItem workflowItem = xmlWorkflowService.startWithoutNotify(context, wsi); + MockHttpServletRequest httpServletRequest = new MockHttpServletRequest(); + httpServletRequest.setParameter("submit_approve", "submit_approve"); + + executeWorkflowAction(httpServletRequest, workflowUserB, workflow, workflowItem, REVIEW_STEP, CLAIM_ACTION); + executeWorkflowAction(httpServletRequest, workflowUserB, workflow, workflowItem, REVIEW_STEP, REVIEW_ACTION); + + executeWorkflowAction(httpServletRequest, workflowUserC, workflow, workflowItem, EDIT_STEP, CLAIM_ACTION); + + assertDeletionOfEperson(workflowUserC, false); + addUserToWorkflowGroup(workflowUserD, collection, EDIT_ROLE); + assertDeletionOfEperson(workflowUserC, true); + + executeWorkflowAction(httpServletRequest, workflowUserD, workflow, workflowItem, EDIT_STEP, CLAIM_ACTION); + executeWorkflowAction(httpServletRequest, workflowUserD, workflow, workflowItem, EDIT_STEP, EDIT_ACTION); + + executeWorkflowAction(httpServletRequest, workflowUserB, workflow, workflowItem, FINAL_EDIT_STEP, CLAIM_ACTION); + executeWorkflowAction(httpServletRequest, workflowUserB, workflow, workflowItem, FINAL_EDIT_STEP, + FINAL_EDIT_ACTION); + + assertTrue(workflowItem.getItem().isArchived()); + + } + + /** + * This test verifies that an EPerson cannot be removed if they are the only member of a Workflow Group that has + * tasks currently assigned to it. This test verifies this with a claimed task by the user to be deleted in the + * final workflow step. + * This test also verifies that after another user has been added to the workflow groups of the to be deleted user, + * the original user can be successfully deleted. + * Afterwards the task can be claimed in the final step by the newly added user and the workflow can be completed. + * + * @throws Exception + */ + @Test + public void testDeleteUserAfterReplacingUser6() throws Exception { + /* + * This test has the following setup: + * - Step 1: user B + * - Step 2: user C + * - Step 3: user B + * + * This test will perform the following checks: + * - create a workspace item, and let it move to step 1 + * - Approve it by user B + * - verify that the item moved to step 2 + * - Approve it by user C + * - verify that the item moved to step 3 + * - claim it by user B, but don’t approve it + * - delete user B + * - verify the delete is refused + * - add user D to workflow step 1 + * - add user D to workflow step 3 + * - delete user B + * - verify the delete succeeds + * - Verify user D can now claim and approve it + * - verify that the item is archived + */ + context.turnOffAuthorisationSystem(); + + Community parent = CommunityBuilder.createCommunity(context).build(); + Collection collection = CollectionBuilder.createCollection(context, parent) + .withWorkflowGroup(1, workflowUserB) + .withWorkflowGroup(2, workflowUserC) + .withWorkflowGroup(3, workflowUserB) + .build(); + + WorkspaceItem wsi = WorkspaceItemBuilder.createWorkspaceItem(context, collection) + .withSubmitter(workflowUserA) + .withTitle("Test item full workflow") + .withIssueDate("2019-03-06") + .withSubject("ExtraEntry") + .build(); + + Workflow workflow = XmlWorkflowServiceFactory.getInstance().getWorkflowFactory().getWorkflow(collection); + + XmlWorkflowItem workflowItem = xmlWorkflowService.startWithoutNotify(context, wsi); + MockHttpServletRequest httpServletRequest = new MockHttpServletRequest(); + httpServletRequest.setParameter("submit_approve", "submit_approve"); + + executeWorkflowAction(httpServletRequest, workflowUserB, workflow, workflowItem, REVIEW_STEP, CLAIM_ACTION); + executeWorkflowAction(httpServletRequest, workflowUserB, workflow, workflowItem, REVIEW_STEP, REVIEW_ACTION); + + executeWorkflowAction(httpServletRequest, workflowUserC, workflow, workflowItem, EDIT_STEP, CLAIM_ACTION); + executeWorkflowAction(httpServletRequest, workflowUserC, workflow, workflowItem, EDIT_STEP, EDIT_ACTION); + + assertDeletionOfEperson(workflowUserB, false); + addUserToWorkflowGroup(workflowUserD, collection, REVIEW_ROLE); + addUserToWorkflowGroup(workflowUserD, collection, FINAL_EDIT_ROLE); + assertDeletionOfEperson(workflowUserB, true); + + + executeWorkflowAction(httpServletRequest, workflowUserD, workflow, workflowItem, FINAL_EDIT_STEP, CLAIM_ACTION); + executeWorkflowAction(httpServletRequest, workflowUserD, workflow, workflowItem, FINAL_EDIT_STEP, + FINAL_EDIT_ACTION); + + assertTrue(workflowItem.getItem().isArchived()); + + } + + /** + * This test verifies that an EPerson can be removed if there is another user is present in the Workflow Group. + * This test verifies this with a pool task in the final workflow step. + * This test also verifies that the other user can claim the task and complete the workflow process. + * + * @throws Exception + */ + @Test + public void testDeleteUserWhenMultipleUser1() throws Exception { + /* + * This test has the following setup: + * - Step 1: user B and D + * - Step 2: user C and D + * - Step 3: user B and D + * + * This test will perform the following checks: + * - create a workspace item, and let it move to step 1 + * - approve it by user B + * - verify that the item moved to step 2 + * - Approve it by user C + * - delete user B + * - verify the delete succeeds + * - Approve it by user D + * - verify that the item is archived + */ + context.turnOffAuthorisationSystem(); + + Community parent = CommunityBuilder.createCommunity(context).build(); + Collection collection = CollectionBuilder.createCollection(context, parent) + .withWorkflowGroup(1, workflowUserB, workflowUserD) + .withWorkflowGroup(2, workflowUserC, workflowUserD) + .withWorkflowGroup(3, workflowUserB, workflowUserD) + .build(); + + WorkspaceItem wsi = WorkspaceItemBuilder.createWorkspaceItem(context, collection) + .withSubmitter(workflowUserA) + .withTitle("Test item full workflow") + .withIssueDate("2019-03-06") + .withSubject("ExtraEntry") + .build(); + + Workflow workflow = XmlWorkflowServiceFactory.getInstance().getWorkflowFactory().getWorkflow(collection); + + XmlWorkflowItem workflowItem = xmlWorkflowService.startWithoutNotify(context, wsi); + MockHttpServletRequest httpServletRequest = new MockHttpServletRequest(); + httpServletRequest.setParameter("submit_approve", "submit_approve"); + + executeWorkflowAction(httpServletRequest, workflowUserB, workflow, workflowItem, REVIEW_STEP, CLAIM_ACTION); + executeWorkflowAction(httpServletRequest, workflowUserB, workflow, workflowItem, REVIEW_STEP, REVIEW_ACTION); + + executeWorkflowAction(httpServletRequest, workflowUserC, workflow, workflowItem, EDIT_STEP, CLAIM_ACTION); + executeWorkflowAction(httpServletRequest, workflowUserC, workflow, workflowItem, EDIT_STEP, EDIT_ACTION); + + assertDeletionOfEperson(workflowUserB, true); + + executeWorkflowAction(httpServletRequest, workflowUserD, workflow, workflowItem, FINAL_EDIT_STEP, CLAIM_ACTION); + executeWorkflowAction(httpServletRequest, workflowUserD, workflow, workflowItem, FINAL_EDIT_STEP, + FINAL_EDIT_ACTION); + + assertTrue(workflowItem.getItem().isArchived()); + + } + + /** + * This test verifies that an EPerson can be removed if there is another user is present in the Workflow Group. + * This test verifies this with a pool task in the first workflow step. + * This test also verifies that the other user can claim the task and complete the workflow process the deleted + * user was part of. + * + * @throws Exception + */ + @Test + public void testDeleteUserWhenMultipleUser2() throws Exception { + /* + * This test has the following setup: + * - Step 1: user B and D + * - Step 2: user C and D + * - Step 3: user B and D + * + * This test will perform the following checks: + * - create a workspace item, and let it move to step 1 + * - delete user B + * - verify the delete succeeds + * - Approve it by user D + * - verify that the item moved to step 2 + * - Approve it by user C + * - verify that the item moved to step 3 + * - Approve it by user D + * - verify that the item is archived + */ + context.turnOffAuthorisationSystem(); + + Community parent = CommunityBuilder.createCommunity(context).build(); + Collection collection = CollectionBuilder.createCollection(context, parent) + .withWorkflowGroup(1, workflowUserB, workflowUserD) + .withWorkflowGroup(2, workflowUserC, workflowUserD) + .withWorkflowGroup(3, workflowUserB, workflowUserD) + .build(); + + WorkspaceItem wsi = WorkspaceItemBuilder.createWorkspaceItem(context, collection) + .withSubmitter(workflowUserA) + .withTitle("Test item full workflow") + .withIssueDate("2019-03-06") + .withSubject("ExtraEntry") + .build(); + + Workflow workflow = XmlWorkflowServiceFactory.getInstance().getWorkflowFactory().getWorkflow(collection); + + XmlWorkflowItem workflowItem = xmlWorkflowService.startWithoutNotify(context, wsi); + MockHttpServletRequest httpServletRequest = new MockHttpServletRequest(); + httpServletRequest.setParameter("submit_approve", "submit_approve"); + + assertDeletionOfEperson(workflowUserB, true); + + executeWorkflowAction(httpServletRequest, workflowUserD, workflow, workflowItem, REVIEW_STEP, CLAIM_ACTION); + executeWorkflowAction(httpServletRequest, workflowUserD, workflow, workflowItem, REVIEW_STEP, REVIEW_ACTION); + + executeWorkflowAction(httpServletRequest, workflowUserC, workflow, workflowItem, EDIT_STEP, CLAIM_ACTION); + executeWorkflowAction(httpServletRequest, workflowUserC, workflow, workflowItem, EDIT_STEP, EDIT_ACTION); + + + executeWorkflowAction(httpServletRequest, workflowUserD, workflow, workflowItem, FINAL_EDIT_STEP, CLAIM_ACTION); + executeWorkflowAction(httpServletRequest, workflowUserD, workflow, workflowItem, FINAL_EDIT_STEP, + FINAL_EDIT_ACTION); + + assertTrue(workflowItem.getItem().isArchived()); + + } + + /** + * This test verifies that an EPerson can be removed if there is another user is present in the Workflow Group. + * This test verifies this with a pool task in the middle workflow step. + * This test also verifies that the other user can claim the task and complete the workflow process. + * + * @throws Exception + */ + @Test + public void testDeleteUserWhenMultipleUser3() throws Exception { + /* + * This test has the following setup: + * - Step 1: user B and D + * - Step 2: user C and D + * - Step 3: user B and D + * + * This test will perform the following checks: + * - create a workspace item, and let it move to step 1 + * - delete user C + * - verify the delete succeeds + * - Approve it by user B + * - verify that the item moved to step 2 + * - Approve it by user D + * - verify that the item moved to step 3 + * - Approve it by user B + * - verify that the item is archived + */ + context.turnOffAuthorisationSystem(); + + Community parent = CommunityBuilder.createCommunity(context).build(); + Collection collection = CollectionBuilder.createCollection(context, parent) + .withWorkflowGroup(1, workflowUserB, workflowUserD) + .withWorkflowGroup(2, workflowUserC, workflowUserD) + .withWorkflowGroup(3, workflowUserB, workflowUserD) + .build(); + + WorkspaceItem wsi = WorkspaceItemBuilder.createWorkspaceItem(context, collection) + .withSubmitter(workflowUserA) + .withTitle("Test item full workflow") + .withIssueDate("2019-03-06") + .withSubject("ExtraEntry") + .build(); + + Workflow workflow = XmlWorkflowServiceFactory.getInstance().getWorkflowFactory().getWorkflow(collection); + + XmlWorkflowItem workflowItem = xmlWorkflowService.startWithoutNotify(context, wsi); + MockHttpServletRequest httpServletRequest = new MockHttpServletRequest(); + httpServletRequest.setParameter("submit_approve", "submit_approve"); + + assertDeletionOfEperson(workflowUserC, true); + + executeWorkflowAction(httpServletRequest, workflowUserB, workflow, workflowItem, REVIEW_STEP, CLAIM_ACTION); + executeWorkflowAction(httpServletRequest, workflowUserB, workflow, workflowItem, REVIEW_STEP, REVIEW_ACTION); + + executeWorkflowAction(httpServletRequest, workflowUserD, workflow, workflowItem, EDIT_STEP, CLAIM_ACTION); + executeWorkflowAction(httpServletRequest, workflowUserD, workflow, workflowItem, EDIT_STEP, EDIT_ACTION); + + + executeWorkflowAction(httpServletRequest, workflowUserB, workflow, workflowItem, FINAL_EDIT_STEP, CLAIM_ACTION); + executeWorkflowAction(httpServletRequest, workflowUserB, workflow, workflowItem, FINAL_EDIT_STEP, + FINAL_EDIT_ACTION); + + assertTrue(workflowItem.getItem().isArchived()); + + } + + /** + * This test verifies that an EPerson can be removed if there is another user is present in the Workflow Group. + * This test verifies this with a claimed task in the first workflow step. + * This test also verifies that the claimed task will return the first workflow's step task pool and that the other + * user can claim the task and progress it. + * This test then verifies that the workflow can be completed and the item will be archived. + * + * @throws Exception + */ + @Test + public void testDeleteUserWhenMultipleUser4() throws Exception { + /* + * This test has the following setup: + * - Step 1: user B and D + * - Step 2: user C and D + * - Step 3: user B and D + * + * This test will perform the following checks: + * - create a workspace item, and let it move to step 1 + * - claim it by user B, but don’t approve it + * - delete user B + * - verify the delete succeeds + * - Verify user D can now claim and approve it + * - verify that the item moved to step 2 + * - Approve it by user C + * - verify that the item moved to step 3 + * - Approve it by user D + * - verify that the item is archived + */ + context.turnOffAuthorisationSystem(); + + Community parent = CommunityBuilder.createCommunity(context).build(); + Collection collection = CollectionBuilder.createCollection(context, parent) + .withWorkflowGroup(1, workflowUserB, workflowUserD) + .withWorkflowGroup(2, workflowUserC, workflowUserD) + .withWorkflowGroup(3, workflowUserB, workflowUserD) + .build(); + + WorkspaceItem wsi = WorkspaceItemBuilder.createWorkspaceItem(context, collection) + .withSubmitter(workflowUserA) + .withTitle("Test item full workflow") + .withIssueDate("2019-03-06") + .withSubject("ExtraEntry") + .build(); + + Workflow workflow = XmlWorkflowServiceFactory.getInstance().getWorkflowFactory().getWorkflow(collection); + + XmlWorkflowItem workflowItem = xmlWorkflowService.startWithoutNotify(context, wsi); + MockHttpServletRequest httpServletRequest = new MockHttpServletRequest(); + httpServletRequest.setParameter("submit_approve", "submit_approve"); + + executeWorkflowAction(httpServletRequest, workflowUserB, workflow, workflowItem, REVIEW_STEP, CLAIM_ACTION); + + assertDeletionOfEperson(workflowUserB, true); + + executeWorkflowAction(httpServletRequest, workflowUserD, workflow, workflowItem, REVIEW_STEP, CLAIM_ACTION); + executeWorkflowAction(httpServletRequest, workflowUserD, workflow, workflowItem, REVIEW_STEP, REVIEW_ACTION); + + executeWorkflowAction(httpServletRequest, workflowUserC, workflow, workflowItem, EDIT_STEP, CLAIM_ACTION); + executeWorkflowAction(httpServletRequest, workflowUserC, workflow, workflowItem, EDIT_STEP, EDIT_ACTION); + + + executeWorkflowAction(httpServletRequest, workflowUserD, workflow, workflowItem, FINAL_EDIT_STEP, CLAIM_ACTION); + executeWorkflowAction(httpServletRequest, workflowUserD, workflow, workflowItem, FINAL_EDIT_STEP, + FINAL_EDIT_ACTION); + + assertTrue(workflowItem.getItem().isArchived()); + + } + + /** + * This test verifies that an EPerson can be removed if there is another user is present in the Workflow Group. + * This test verifies this with a claimed task in the middle workflow step. + * This test also verifies that the claimed task will return the middle workflow's step task pool and that the other + * user can claim the task and progress it. + * This test then verifies that the workflow can be completed and the item will be archived. + * + * @throws Exception + */ + @Test + public void testDeleteUserWhenMultipleUser5() throws Exception { + /* + * This test has the following setup: + * - Step 1: user B and D + * - Step 2: user C and D + * - Step 3: user B and D + * + * This test will perform the following checks: + * - create a workspace item, and let it move to step 1 + * - Approve it by user B + * - verify that the item moved to step 2 + * - claim it by user C, but don’t approve it + * - delete user C + * - verify the delete succeeds + * - Verify user D can now claim and approve it + * - verify that the item moved to step 3 + * - Approve it by user B + * - verify that the item is archived + */ + context.turnOffAuthorisationSystem(); + + Community parent = CommunityBuilder.createCommunity(context).build(); + Collection collection = CollectionBuilder.createCollection(context, parent) + .withWorkflowGroup(1, workflowUserB, workflowUserD) + .withWorkflowGroup(2, workflowUserC, workflowUserD) + .withWorkflowGroup(3, workflowUserB, workflowUserD) + .build(); + + WorkspaceItem wsi = WorkspaceItemBuilder.createWorkspaceItem(context, collection) + .withSubmitter(workflowUserA) + .withTitle("Test item full workflow") + .withIssueDate("2019-03-06") + .withSubject("ExtraEntry") + .build(); + + Workflow workflow = XmlWorkflowServiceFactory.getInstance().getWorkflowFactory().getWorkflow(collection); + + XmlWorkflowItem workflowItem = xmlWorkflowService.startWithoutNotify(context, wsi); + MockHttpServletRequest httpServletRequest = new MockHttpServletRequest(); + httpServletRequest.setParameter("submit_approve", "submit_approve"); + + executeWorkflowAction(httpServletRequest, workflowUserB, workflow, workflowItem, REVIEW_STEP, CLAIM_ACTION); + executeWorkflowAction(httpServletRequest, workflowUserB, workflow, workflowItem, REVIEW_STEP, REVIEW_ACTION); + + executeWorkflowAction(httpServletRequest, workflowUserC, workflow, workflowItem, EDIT_STEP, CLAIM_ACTION); + assertDeletionOfEperson(workflowUserC, true); + + executeWorkflowAction(httpServletRequest, workflowUserD, workflow, workflowItem, EDIT_STEP, CLAIM_ACTION); + executeWorkflowAction(httpServletRequest, workflowUserD, workflow, workflowItem, EDIT_STEP, EDIT_ACTION); + + + executeWorkflowAction(httpServletRequest, workflowUserB, workflow, workflowItem, FINAL_EDIT_STEP, CLAIM_ACTION); + executeWorkflowAction(httpServletRequest, workflowUserB, workflow, workflowItem, FINAL_EDIT_STEP, + FINAL_EDIT_ACTION); + + assertTrue(workflowItem.getItem().isArchived()); + + } + + /** + * This test verifies that an EPerson can be removed if there is another user is present in the Workflow Group. + * This test verifies this with a claimed task in the final workflow step. + * This test also verifies that the claimed task will return the final workflow's step task pool and that the other + * user can claim the task and complete workflow. + * This test then verifies that the item will be archived. + * + * @throws Exception + */ + @Test + public void testDeleteUserWhenMultipleUser6() throws Exception { + /* + * This test has the following setup: + * - Step 1: user B and D + * - Step 2: user C and D + * - Step 3: user B and D + * + * This test will perform the following checks: + * - create a workspace item, and let it move to step 1 + * - Approve it by user B + * - verify that the item moved to step 2 + * - Approve it by user C + * - verify that the item moved to step 3 + * - claim it by user B, but don’t approve it + * - delete user B + * - verify the delete succeeds + * - Verify user D can now claim and approve it + * - verify that the item is archived + */ + context.turnOffAuthorisationSystem(); + + Community parent = CommunityBuilder.createCommunity(context).build(); + Collection collection = CollectionBuilder.createCollection(context, parent) + .withWorkflowGroup(1, workflowUserB, workflowUserD) + .withWorkflowGroup(2, workflowUserC, workflowUserD) + .withWorkflowGroup(3, workflowUserB, workflowUserD) + .build(); + + WorkspaceItem wsi = WorkspaceItemBuilder.createWorkspaceItem(context, collection) + .withSubmitter(workflowUserA) + .withTitle("Test item full workflow") + .withIssueDate("2019-03-06") + .withSubject("ExtraEntry") + .build(); + + Workflow workflow = XmlWorkflowServiceFactory.getInstance().getWorkflowFactory().getWorkflow(collection); + + XmlWorkflowItem workflowItem = xmlWorkflowService.startWithoutNotify(context, wsi); + MockHttpServletRequest httpServletRequest = new MockHttpServletRequest(); + httpServletRequest.setParameter("submit_approve", "submit_approve"); + + executeWorkflowAction(httpServletRequest, workflowUserB, workflow, workflowItem, REVIEW_STEP, CLAIM_ACTION); + executeWorkflowAction(httpServletRequest, workflowUserB, workflow, workflowItem, REVIEW_STEP, REVIEW_ACTION); + + + executeWorkflowAction(httpServletRequest, workflowUserC, workflow, workflowItem, EDIT_STEP, CLAIM_ACTION); + executeWorkflowAction(httpServletRequest, workflowUserC, workflow, workflowItem, EDIT_STEP, EDIT_ACTION); + + + executeWorkflowAction(httpServletRequest, workflowUserB, workflow, workflowItem, FINAL_EDIT_STEP, CLAIM_ACTION); + assertDeletionOfEperson(workflowUserB, true); + + executeWorkflowAction(httpServletRequest, workflowUserD, workflow, workflowItem, FINAL_EDIT_STEP, CLAIM_ACTION); + executeWorkflowAction(httpServletRequest, workflowUserD, workflow, workflowItem, FINAL_EDIT_STEP, + FINAL_EDIT_ACTION); + + assertTrue(workflowItem.getItem().isArchived()); + + } + + + private void addUserToWorkflowGroup(EPerson ePerson, Collection collection, String roleName) throws SQLException { + List roles = collectionRoleService.findByCollection(context, collection); + for (CollectionRole role : roles) { + if (StringUtils.equals(role.getRoleId(), roleName)) { + Group group = role.getGroup(); + groupService.addMember(context, group, ePerson); + } + } + + } + + private void executeWorkflowAction(HttpServletRequest httpServletRequest, EPerson user, + Workflow workflow, XmlWorkflowItem workflowItem, String stepId, String actionId) + throws Exception { + context.setCurrentUser(user); + xmlWorkflowService.doState(context, user, httpServletRequest, workflowItem.getID(), workflow, + workflow.getStep(stepId).getActionConfig(actionId)); + context.setCurrentUser(null); + } + + private void assertRemovalOfEpersonFromWorkflowGroup(EPerson ePerson, Collection collection, String roleName, + boolean shouldSucceed) { + boolean deleteSuccess = false; + boolean deleteError = false; + + try { + List roles = collectionRoleService.findByCollection(context, collection); + for (CollectionRole role : roles) { + if (StringUtils.equals(role.getRoleId(), roleName)) { + Group group = role.getGroup(); + groupService.removeMember(context, group, ePerson); + deleteSuccess = true; + } + } + } catch (Exception ex) { + if (ex instanceof IllegalStateException) { + deleteSuccess = false; + deleteError = true; + } else { + deleteSuccess = false; + log.error("Caught an Exception while deleting an EPerson. " + ex.getClass().getName() + ": ", ex); + fail("Caught an Exception while deleting an EPerson. " + ex.getClass().getName() + + ": " + ex.getMessage()); + } + } + if (shouldSucceed) { + assertTrue(deleteSuccess); + assertFalse(deleteError); + } else { + assertTrue(deleteError); + assertFalse(deleteSuccess); + } + } + + private void assertDeletionOfEperson(EPerson ePerson, boolean shouldSucceed) throws SQLException { + boolean deleteSuccess; + boolean deleteError = false; + try { + ePersonService.delete(context, ePerson); + deleteSuccess = true; + } catch (Exception ex) { + if (ex instanceof IllegalStateException) { + deleteSuccess = false; + deleteError = true; + } else { + deleteSuccess = false; + log.error("Caught an Exception while deleting an EPerson. " + ex.getClass().getName() + ": ", ex); + fail("Caught an Exception while deleting an EPerson. " + ex.getClass().getName() + + ": " + ex.getMessage()); + } + } + + EPerson ePersonCheck = ePersonService.find(context, ePerson.getID()); + if (shouldSucceed) { + assertTrue(deleteSuccess); + assertFalse(deleteError); + assertNull(ePersonCheck); + } else { + assertTrue(deleteError); + assertFalse(deleteSuccess); + assertNotNull(ePerson); + } + } +} diff --git a/dspace-api/src/test/java/org/dspace/eperson/EPersonTest.java b/dspace-api/src/test/java/org/dspace/eperson/EPersonTest.java index 8950bfa409..24bc00cce4 100644 --- a/dspace-api/src/test/java/org/dspace/eperson/EPersonTest.java +++ b/dspace-api/src/test/java/org/dspace/eperson/EPersonTest.java @@ -5,21 +5,43 @@ * * http://www.dspace.org/license/ */ - package org.dspace.eperson; import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertNull; import static org.junit.Assert.fail; +import java.io.IOException; import java.sql.SQLException; +import java.util.Iterator; +import java.util.List; +import javax.mail.MessagingException; import org.apache.commons.codec.DecoderException; +import org.apache.commons.lang3.StringUtils; import org.apache.logging.log4j.Logger; import org.dspace.AbstractUnitTest; import org.dspace.authorize.AuthorizeException; +import org.dspace.content.Collection; +import org.dspace.content.Community; +import org.dspace.content.Item; +import org.dspace.content.WorkspaceItem; +import org.dspace.content.factory.ContentServiceFactory; +import org.dspace.content.service.CollectionService; +import org.dspace.content.service.CommunityService; +import org.dspace.content.service.InstallItemService; +import org.dspace.content.service.ItemService; +import org.dspace.content.service.WorkspaceItemService; import org.dspace.core.Constants; import org.dspace.eperson.factory.EPersonServiceFactory; import org.dspace.eperson.service.EPersonService; +import org.dspace.eperson.service.GroupService; +import org.dspace.workflow.WorkflowException; +import org.dspace.workflow.WorkflowItem; +import org.dspace.workflow.WorkflowItemService; +import org.dspace.workflow.WorkflowService; +import org.dspace.workflow.factory.WorkflowServiceFactory; import org.junit.Before; import org.junit.Test; @@ -27,9 +49,29 @@ import org.junit.Test; * @author mwood */ public class EPersonTest extends AbstractUnitTest { - protected EPersonService ePersonService = EPersonServiceFactory.getInstance().getEPersonService(); - private static final Logger log = org.apache.logging.log4j.LogManager.getLogger(EPersonTest.class); + protected EPersonService ePersonService = EPersonServiceFactory.getInstance().getEPersonService(); + protected GroupService groupService = EPersonServiceFactory.getInstance().getGroupService(); + protected CommunityService communityService = ContentServiceFactory.getInstance().getCommunityService(); + protected CollectionService collectionService = ContentServiceFactory.getInstance().getCollectionService(); + protected ItemService itemService = ContentServiceFactory.getInstance().getItemService(); + protected InstallItemService installItemService = ContentServiceFactory.getInstance().getInstallItemService(); + protected WorkflowItemService workflowItemService = WorkflowServiceFactory.getInstance().getWorkflowItemService(); + protected WorkflowService workflowService = WorkflowServiceFactory.getInstance().getWorkflowService(); + protected WorkspaceItemService workspaceItemService = ContentServiceFactory.getInstance() + .getWorkspaceItemService(); + + private Community community = null; + private Collection collection = null; + private Item item = null; + + private static final String EMAIL = "test@example.com"; + private static final String FIRSTNAME = "Kevin"; + private static final String LASTNAME = "Van de Velde"; + private static final String NETID = "1985"; + private static final String PASSWORD = "test"; + + private static final Logger log = org.apache.logging.log4j.LogManager.getLogger(EPersonTest.class); public EPersonTest() { } @@ -38,8 +80,8 @@ public class EPersonTest extends AbstractUnitTest { * This method will be run before every test as per @Before. It will * initialize resources required for the tests. * - * Other methods can be annotated with @Before here or in subclasses - * but no execution order is guaranteed + * Other methods can be annotated with @Before here or in subclasses but no + * execution order is guaranteed */ @Before @Override @@ -49,12 +91,14 @@ public class EPersonTest extends AbstractUnitTest { context.turnOffAuthorisationSystem(); try { EPerson eperson = ePersonService.create(context); - eperson.setEmail("kevin@dspace.org"); - eperson.setFirstName(context, "Kevin"); - eperson.setLastName(context, "Van de Velde"); - eperson.setNetid("1985"); - eperson.setPassword("test"); + eperson.setEmail(EMAIL); + eperson.setFirstName(context, FIRSTNAME); + eperson.setLastName(context, LASTNAME); + eperson.setNetid(NETID); + eperson.setPassword(PASSWORD); ePersonService.update(context, eperson); + this.community = communityService.create(null, context); + this.collection = collectionService.create(context, this.community); } catch (SQLException | AuthorizeException ex) { log.error("Error in init", ex); fail("Error in init: " + ex.getMessage()); @@ -67,18 +111,76 @@ public class EPersonTest extends AbstractUnitTest { public void destroy() { context.turnOffAuthorisationSystem(); try { - EPerson testPerson = ePersonService.findByEmail(context, "kevin@dspace.org"); + EPerson testPerson = ePersonService.findByEmail(context, EMAIL); if (testPerson != null) { ePersonService.delete(context, testPerson); } - } catch (Exception ex) { + } catch (IOException | SQLException | AuthorizeException ex) { log.error("Error in destroy", ex); fail("Error in destroy: " + ex.getMessage()); } + if (item != null) { + try { + item = itemService.find(context, item.getID()); + itemService.delete(context, item); + } catch (SQLException | AuthorizeException | IOException ex) { + log.error("Error in destroy", ex); + fail("Error in destroy: " + ex.getMessage()); + } + } + if (this.collection != null) { + try { + this.collection = collectionService.find(context, this.collection.getID()); + collectionService.delete(context, this.collection); + } catch (SQLException | AuthorizeException | IOException ex) { + log.error("Error in destroy", ex); + fail("Error in destroy: " + ex.getMessage()); + } + } + if (this.community != null) { + try { + this.community = communityService.find(context, this.community.getID()); + communityService.delete(context, this.community); + } catch (SQLException | AuthorizeException | IOException ex) { + log.error("Error in destroy", ex); + fail("Error in destroy: " + ex.getMessage()); + } + } + context.restoreAuthSystemState(); + item = null; + this.collection = null; + this.community = null; super.destroy(); } + @Test + public void testPreferences() throws Exception { + + String cookies = + "{" + + "\"token_item\":true," + + "\"impersonation\":true," + + "\"redirect\":true," + + "\"language\":true," + + "\"klaro\":true," + + "\"google-analytics\":false" + + "}"; + + ePersonService.addMetadata(context, eperson, "dspace", "agreements", "cookies", null, cookies); + ePersonService.addMetadata(context, eperson, "dspace", "agreements", "end-user", null, "true"); + ePersonService.update(context, eperson); + + assertEquals( + cookies, + ePersonService.getMetadataFirstValue(eperson, "dspace", "agreements", "cookies", null) + ); + assertEquals( + "true", + ePersonService.getMetadataFirstValue(eperson, "dspace", "agreements", "end-user", null) + ); + } + /** * Test of equals method, of class EPerson. */ @@ -684,36 +786,25 @@ public class EPersonTest extends AbstractUnitTest { /** * Test of checkPassword method, of class EPerson. + * + * @throws SQLException + * @throws DecoderException */ @Test public void testCheckPassword() - throws SQLException, DecoderException { - EPerson eperson = ePersonService.findByEmail(context, "kevin@dspace.org"); - ePersonService.checkPassword(context, eperson, "test"); + throws SQLException, DecoderException { + EPerson eperson = ePersonService.findByEmail(context, EMAIL); + ePersonService.checkPassword(context, eperson, PASSWORD); } - /** - * Test of update method, of class EPerson. - */ -/* - @Test - public void testUpdate() - throws Exception - { - System.out.println("update"); - EPerson instance = null; - instance.update(); - // TODO review the generated test code and remove the default call to fail. - fail("The test case is a prototype."); - } -*/ - /** * Test of getType method, of class EPerson. + * + * @throws SQLException */ @Test public void testGetType() - throws SQLException { + throws SQLException { System.out.println("getType"); int expResult = Constants.EPERSON; int result = eperson.getType(); @@ -721,37 +812,270 @@ public class EPersonTest extends AbstractUnitTest { } /** - * Test of getDeleteConstraints method, of class EPerson. + * Simple test if deletion of an EPerson throws any exceptions. + * + * @throws SQLException + * @throws AuthorizeException */ -/* @Test - public void testGetDeleteConstraints() - throws Exception - { - System.out.println("getDeleteConstraints"); - EPerson instance = null; - List expResult = null; - List result = instance.getDeleteConstraints(); - assertEquals(expResult, result); - // TODO review the generated test code and remove the default call to fail. - fail("The test case is a prototype."); + public void testDeleteEPerson() throws SQLException, AuthorizeException { + EPerson deleteEperson = ePersonService.findByEmail(context, EMAIL); + context.turnOffAuthorisationSystem(); + + try { + ePersonService.delete(context, deleteEperson); + } catch (AuthorizeException | IOException ex) { + log.error("Cannot delete EPersion, caught " + ex.getClass().getName() + ":", ex); + fail("Caught an Exception while deleting an EPerson. " + ex.getClass().getName() + + ": " + ex.getMessage()); + } + context.restoreAuthSystemState(); + context.commit(); + EPerson findDeletedEperson = ePersonService.findByEmail(context, EMAIL); + assertNull("EPerson has not been deleted correctly!", findDeletedEperson); } -*/ /** - * Test of getName method, of class EPerson. + * Test that an EPerson has a delete constraint if it submitted an Item. + * + * @throws SQLException */ -/* @Test - public void testGetName() - { - System.out.println("getName"); - EPerson instance = null; - String expResult = ""; - String result = instance.getName(); - assertEquals(expResult, result); - // TODO review the generated test code and remove the default call to fail. - fail("The test case is a prototype."); + public void testDeletionConstraintOfSubmitter() + throws SQLException { + EPerson ep = ePersonService.findByEmail(context, EMAIL); + try { + item = prepareItem(ep); + } catch (SQLException | AuthorizeException | IOException ex) { + log.error("Caught an Exception while initializing an Item. " + ex.getClass().getName() + ": ", ex); + fail("Caught an Exception while initializing an Item. " + ex.getClass().getName() + + ": " + ex.getMessage()); + } + + context.turnOffAuthorisationSystem(); + + List tableList = ePersonService.getDeleteConstraints(context, ep); + Iterator iterator = tableList.iterator(); + while (iterator.hasNext()) { + String tableName = iterator.next(); + if (StringUtils.equalsIgnoreCase(tableName, "item")) { + return; + } + } + // if we did not get and EPersonDeletionException or it did not contain the item table, we should fail + // because it was not recognized that the EPerson is used as submitter. + fail("It was not recognized that a EPerson is referenced in the item table."); + } + + /** + * Test that the submitter is set to null if the specified EPerson was + * deleted using cascading. + * + * @throws SQLException + * @throws AuthorizeException + */ + @Test + public void testDeletionOfSubmitterWithAnItem() + throws SQLException, AuthorizeException { + EPerson ep = ePersonService.findByEmail(context, EMAIL); + try { + item = prepareItem(ep); + } catch (SQLException | AuthorizeException | IOException ex) { + log.error("Caught an Exception while initializing an Item. " + ex.getClass().getName() + ": ", ex); + fail("Caught an Exception while initializing an Item. " + ex.getClass().getName() + + ": " + ex.getMessage()); + } + assertNotNull(item); + context.turnOffAuthorisationSystem(); + try { + ePersonService.delete(context, ep); + } catch (SQLException | IOException | AuthorizeException ex) { + if (ex.getCause() instanceof EPersonDeletionException) { + fail("Caught an EPersonDeletionException while trying to cascading delete an EPerson: " + + ex.getMessage()); + } else { + log.error("Caught an Exception while deleting an EPerson. " + ex.getClass().getName() + ": ", ex); + fail("Caught an Exception while deleting an EPerson. " + ex.getClass().getName() + + ": " + ex.getMessage()); + } + } + item = itemService.find(context, item.getID()); + assertNotNull("Could not load item after cascading deletion of the submitter.", item); + assertNull("Cascading deletion of an EPerson did not set the submitter of an submitted item null.", + item.getSubmitter()); + } + + /** + * Test that an unsubmitted workspace items get deleted when an EPerson gets + * deleted. + * + * @throws SQLException + * @throws IOException + * @throws AuthorizeException + */ + @Test + public void testCascadingDeletionOfUnsubmittedWorkspaceItem() + throws SQLException, AuthorizeException, IOException { + EPerson ep = ePersonService.findByEmail(context, EMAIL); + + context.turnOffAuthorisationSystem(); + WorkspaceItem wsi = prepareWorkspaceItem(ep); + Item item = wsi.getItem(); + itemService.addMetadata(context, item, "dc", "title", null, "en", "Testdocument 1"); + itemService.update(context, item); + context.restoreAuthSystemState(); + context.commit(); + context.turnOffAuthorisationSystem(); + + try { + ePersonService.delete(context, ep); + } catch (SQLException | IOException | AuthorizeException ex) { + if (ex.getCause() instanceof EPersonDeletionException) { + fail("Caught an EPersonDeletionException while trying to cascading delete an EPerson: " + + ex.getMessage()); + } else { + log.error("Caught an Exception while deleting an EPerson. " + ex.getClass().getName() + + ": ", ex); + fail("Caught an Exception while deleting an EPerson. " + ex.getClass().getName() + + ": " + ex.getMessage()); + } + } + + context.restoreAuthSystemState(); + context.commit(); + + try { + WorkspaceItem restoredWsi = workspaceItemService.find(context, wsi.getID()); + Item restoredItem = itemService.find(context, item.getID()); + assertNull("An unsubmited WorkspaceItem wasn't deleted while cascading deleting the submitter.", + restoredWsi); + assertNull("An unsubmited Item wasn't deleted while cascading deleting the submitter.", restoredItem); + } catch (SQLException ex) { + log.error("SQLException while trying to load previously stored. " + ex); + } + } + + /** + * Test that submitted but not yet archived items do not get delete while + * cascading deletion of an EPerson. + * + * @throws SQLException + * @throws AuthorizeException + * @throws IOException + * @throws MessagingException + * @throws WorkflowException + */ + @Test + public void testCascadingDeleteSubmitterPreservesWorkflowItems() + throws SQLException, AuthorizeException, IOException, MessagingException, WorkflowException { + EPerson ep = ePersonService.findByEmail(context, EMAIL); + WorkspaceItem wsi = null; + + try { + wsi = prepareWorkspaceItem(ep); + } catch (SQLException | AuthorizeException | IOException ex) { + log.error("Caught an Exception while initializing an WorkspaceItem. " + ex.getClass().getName() + + ": ", ex); + fail("Caught an Exception while initializing an WorkspaceItem. " + ex.getClass().getName() + + ": " + ex.getMessage()); + } + assertNotNull(wsi); + context.turnOffAuthorisationSystem(); + + // for this test we need an workflow item that is not yet submitted. Currently the Workflow advance + // automatically if nobody is defined to perform a step (see comments of DS-1941). + // We need to configure a collection to have a workflow step and set a person to perform this step. Then we can + // create an item, start the workflow and delete the item's submitter. + Group wfGroup = collectionService.createWorkflowGroup(context, wsi.getCollection(), 1); + collectionService.update(context, wsi.getCollection()); + EPerson groupMember = ePersonService.create(context); + groupMember.setEmail("testCascadingDeleteSubmitterPreservesWorkflowItems2@example.org"); + ePersonService.update(context, groupMember); + wfGroup.addMember(groupMember); + groupService.update(context, wfGroup); + + // DSpace currently contains two workflow systems. The newer XMLWorfklow needs additional tables that are not + // part of the test database yet. While it is expected that it becomes the default workflow system (DS-2059) + // one day, this won't happen before it its backported to JSPUI (DS-2121). + // TODO: add tests using the configurable workflowsystem + int wfiID = workflowService.startWithoutNotify(context, wsi).getID(); + context.restoreAuthSystemState(); + context.commit(); + context.turnOffAuthorisationSystem(); + + // check that the workflow item exists. + assertNotNull("Cannot find currently created WorkflowItem!", workflowItemService.find(context, wfiID)); + + // delete the submitter + try { + ePersonService.delete(context, ep); + } catch (SQLException | IOException | AuthorizeException ex) { + if (ex.getCause() instanceof EPersonDeletionException) { + fail("Caught an EPersonDeletionException while trying to cascading delete an EPerson: " + + ex.getMessage()); + } else { + log.error("Caught an Exception while deleting an EPerson. " + ex.getClass().getName() + + ": ", ex); + fail("Caught an Exception while deleting an EPerson. " + ex.getClass().getName() + + ": " + ex.getMessage()); + } + } + + context.restoreAuthSystemState(); + context.commit(); + context.turnOffAuthorisationSystem(); + + // check whether the workflow item still exists. + WorkflowItem wfi = workflowItemService.find(context, wfiID); + assertNotNull("Could not load WorkflowItem after cascading deletion of the submitter.", wfi); + assertNull("Cascading deletion of an EPerson did not set the submitter of an submitted WorkflowItem null.", + wfi.getSubmitter()); + } + + /** + * Creates an item, sets the specified submitter. + * + * This method is just an shortcut, so we must not use all the code again + * and again. + * + * @param submitter + * @return the created item. + * @throws SQLException + * @throws AuthorizeException + * @throws IOException + */ + private Item prepareItem(EPerson submitter) + throws SQLException, AuthorizeException, IOException { + context.turnOffAuthorisationSystem(); + WorkspaceItem wsi = prepareWorkspaceItem(submitter); + item = installItemService.installItem(context, wsi); + //we need to commit the changes so we don't block the table for testing + context.restoreAuthSystemState(); + return item; + } + + /** + * Creates a WorkspaceItem and sets the specified submitter. + * + * This method is just an shortcut, so we must not use all the code again + * and again. + * + * @param submitter + * @return the created WorkspaceItem. + * @throws SQLException + * @throws AuthorizeException + * @throws IOException + */ + private WorkspaceItem prepareWorkspaceItem(EPerson submitter) + throws SQLException, AuthorizeException, IOException { + context.turnOffAuthorisationSystem(); + // create a community, a collection and a WorkspaceItem + + WorkspaceItem wsi = workspaceItemService.create(context, this.collection, false); + // set the submitter + wsi.getItem().setSubmitter(submitter); + workspaceItemService.update(context, wsi); + context.restoreAuthSystemState(); + return wsi; } -*/ } diff --git a/dspace-api/src/test/java/org/dspace/eperson/GroupTest.java b/dspace-api/src/test/java/org/dspace/eperson/GroupTest.java index 744c1d666f..7fc3563bae 100644 --- a/dspace-api/src/test/java/org/dspace/eperson/GroupTest.java +++ b/dspace-api/src/test/java/org/dspace/eperson/GroupTest.java @@ -171,7 +171,6 @@ public class GroupTest extends AbstractUnitTest { public void findAll() throws SQLException { List groups = groupService.findAll(context, null); assertThat("findAll 1", groups, notNullValue()); - System.out.println("TEST GROUP OUTPUT " + groups); assertTrue("findAll 2", 0 < groups.size()); } diff --git a/dspace-api/src/test/java/org/dspace/license/MockCCLicenseConnectorServiceImpl.java b/dspace-api/src/test/java/org/dspace/license/MockCCLicenseConnectorServiceImpl.java new file mode 100644 index 0000000000..bc687a43f5 --- /dev/null +++ b/dspace-api/src/test/java/org/dspace/license/MockCCLicenseConnectorServiceImpl.java @@ -0,0 +1,130 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.license; + +import java.io.IOException; +import java.io.InputStream; +import java.util.HashMap; +import java.util.LinkedList; +import java.util.List; +import java.util.Map; + +import org.apache.commons.lang3.StringUtils; +import org.jdom.Document; +import org.jdom.JDOMException; + +/** + * Mock implementation for the Creative commons license connector service. + * This class will return a structure of CC Licenses similar to the CC License API but without having to contact it + */ +public class MockCCLicenseConnectorServiceImpl extends CCLicenseConnectorServiceImpl { + + /** + * Retrieves mock CC Licenses for the provided language + * @param language - the language + * @return a map of mocked licenses with the id and the license + */ + @Override + public Map retrieveLicenses(String language) { + Map ccLicenses = new HashMap<>(); + CCLicense mockLicense1 = createMockLicense(1, new int[]{3, 2, 3}); + CCLicense mockLicense2 = createMockLicense(2, new int[]{2}); + CCLicense mockLicense3 = createMockLicense(3, new int[]{}); + + ccLicenses.put(mockLicense1.getLicenseId(), mockLicense1); + ccLicenses.put(mockLicense2.getLicenseId(), mockLicense2); + ccLicenses.put(mockLicense3.getLicenseId(), mockLicense3); + + return ccLicenses; + } + + private CCLicense createMockLicense(int count, int[] amountOfFieldsAndEnums) { + String licenseId = "license" + count; + String licenseName = "License " + count + " - Name"; + List mockLicenseFields = createMockLicenseFields(count, amountOfFieldsAndEnums); + return new CCLicense(licenseId, licenseName, mockLicenseFields); + } + + private List createMockLicenseFields(int count, int[] amountOfFieldsAndEnums) { + List ccLicenseFields = new LinkedList<>(); + for (int index = 0; index < amountOfFieldsAndEnums.length; index++) { + String licenseFieldId = "license" + count + "-field" + index; + String licenseFieldLabel = "License " + count + " - Field " + index + " - Label"; + String licenseFieldDescription = "License " + count + " - Field " + index + " - Description"; + List mockLicenseFields = createMockLicenseFields(count, + index, + amountOfFieldsAndEnums[index]); + ccLicenseFields.add(new CCLicenseField(licenseFieldId, + licenseFieldLabel, + licenseFieldDescription, + mockLicenseFields)); + + } + + return ccLicenseFields; + } + + private List createMockLicenseFields(int count, int index, int amountOfEnums) { + List ccLicenseFieldEnumList = new LinkedList<>(); + for (int i = 0; i < amountOfEnums; i++) { + String enumId = "license" + count + "-field" + index + "-enum" + i; + String enumLabel = "License " + count + " - Field " + index + " - Enum " + i + " - Label"; + String enumDescription = "License " + count + " - Field " + index + " - Enum " + i + " - " + + "Description"; + ccLicenseFieldEnumList.add(new CCLicenseFieldEnum(enumId, enumLabel, enumDescription)); + } + return ccLicenseFieldEnumList; + + } + + /** + * Retrieve a mock CC License URI + * + * @param licenseId - the ID of the license + * @param language - the language for which to retrieve the full answerMap + * @param answerMap - the answers to the different field questions + * @return the CC License URI + */ + @Override + public String retrieveRightsByQuestion(final String licenseId, + final String language, + final Map answerMap) { + + return "mock-license-uri"; + } + + /** + * Retrieve a mock license RDF document. + * When the uri contains "invalid", null will be returned to simulate that no document was found for the provided + * URI + * + * @param licenseURI - The license URI for which to retrieve the license RDF document + * @return a mock license RDF document or null when the URI contains invalid + * @throws IOException + */ + @Override + public Document retrieveLicenseRDFDoc(String licenseURI) throws IOException { + if (!StringUtils.contains(licenseURI, "invalid")) { + InputStream cclicense = null; + try { + cclicense = getClass().getResourceAsStream("cc-license-rdf.xml"); + + Document doc = parser.build(cclicense); + return doc; + } catch (JDOMException e) { + throw new RuntimeException(e); + } finally { + if (cclicense != null) { + cclicense.close(); + } + } + } + return null; + } + +} diff --git a/dspace-api/src/test/java/org/dspace/scripts/MockDSpaceRunnableScriptConfiguration.java b/dspace-api/src/test/java/org/dspace/scripts/MockDSpaceRunnableScriptConfiguration.java new file mode 100644 index 0000000000..1197370e32 --- /dev/null +++ b/dspace-api/src/test/java/org/dspace/scripts/MockDSpaceRunnableScriptConfiguration.java @@ -0,0 +1,68 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.scripts; + +import java.io.InputStream; +import java.sql.SQLException; + +import org.apache.commons.cli.Options; +import org.dspace.authorize.service.AuthorizeService; +import org.dspace.core.Context; +import org.dspace.scripts.configuration.ScriptConfiguration; +import org.dspace.scripts.impl.MockDSpaceRunnableScript; +import org.springframework.beans.factory.annotation.Autowired; + +public class MockDSpaceRunnableScriptConfiguration extends ScriptConfiguration { + + + @Autowired + private AuthorizeService authorizeService; + + private Class dspaceRunnableClass; + + @Override + public Class getDspaceRunnableClass() { + return dspaceRunnableClass; + } + + /** + * Generic setter for the dspaceRunnableClass + * @param dspaceRunnableClass The dspaceRunnableClass to be set on this MetadataExportScriptConfiguration + */ + @Override + public void setDspaceRunnableClass(Class dspaceRunnableClass) { + this.dspaceRunnableClass = dspaceRunnableClass; + } + + @Override + public boolean isAllowedToExecute(Context context) { + try { + return authorizeService.isAdmin(context); + } catch (SQLException e) { + throw new RuntimeException("SQLException occurred when checking if the current user is an admin", e); + } + } + + @Override + public Options getOptions() { + if (options == null) { + Options options = new Options(); + + options.addOption("r", "remove", true, "description r"); + options.getOption("r").setType(String.class); + options.addOption("i", "index", false, "description i"); + options.getOption("i").setType(boolean.class); + options.getOption("i").setRequired(true); + options.addOption("f", "file", true, "source file"); + options.getOption("f").setType(InputStream.class); + options.getOption("f").setRequired(false); + super.options = options; + } + return options; + } +} diff --git a/dspace-api/src/test/java/org/dspace/scripts/impl/MockDSpaceRunnableScript.java b/dspace-api/src/test/java/org/dspace/scripts/impl/MockDSpaceRunnableScript.java index 75f723d64b..960927e90a 100644 --- a/dspace-api/src/test/java/org/dspace/scripts/impl/MockDSpaceRunnableScript.java +++ b/dspace-api/src/test/java/org/dspace/scripts/impl/MockDSpaceRunnableScript.java @@ -7,19 +7,20 @@ */ package org.dspace.scripts.impl; -import org.apache.commons.cli.Options; import org.apache.commons.cli.ParseException; import org.dspace.scripts.DSpaceRunnable; +import org.dspace.scripts.MockDSpaceRunnableScriptConfiguration; +import org.dspace.utils.DSpace; -public class MockDSpaceRunnableScript extends DSpaceRunnable { - - private MockDSpaceRunnableScript() { - Options options = constructOptions(); - this.options = options; +public class MockDSpaceRunnableScript extends DSpaceRunnable { + @Override + public void internalRun() throws Exception { } @Override - public void internalRun() throws Exception { + public MockDSpaceRunnableScriptConfiguration getScriptConfiguration() { + return new DSpace().getServiceManager() + .getServiceByName("mock-script", MockDSpaceRunnableScriptConfiguration.class); } @Override @@ -28,15 +29,4 @@ public class MockDSpaceRunnableScript extends DSpaceRunnable { throw new ParseException("-i is a mandatory parameter"); } } - - private Options constructOptions() { - Options options = new Options(); - - options.addOption("r", "remove", true, "description r"); - options.getOption("r").setType(String.class); - options.addOption("i", "index", true, "description i"); - options.getOption("i").setType(boolean.class); - options.getOption("i").setRequired(true); - return options; - } } diff --git a/dspace-server-webapp/src/test/java/org/dspace/solr/MockSolrServer.java b/dspace-api/src/test/java/org/dspace/solr/MockSolrServer.java similarity index 97% rename from dspace-server-webapp/src/test/java/org/dspace/solr/MockSolrServer.java rename to dspace-api/src/test/java/org/dspace/solr/MockSolrServer.java index 237f35e63f..6faf9a7d1b 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/solr/MockSolrServer.java +++ b/dspace-api/src/test/java/org/dspace/solr/MockSolrServer.java @@ -19,7 +19,7 @@ import org.apache.solr.client.solrj.SolrClient; import org.apache.solr.client.solrj.SolrServerException; import org.apache.solr.client.solrj.embedded.EmbeddedSolrServer; import org.apache.solr.core.CoreContainer; -import org.dspace.app.rest.test.AbstractDSpaceIntegrationTest; +import org.dspace.AbstractDSpaceIntegrationTest; /** * Factory of connections to an in-process embedded Solr service. @@ -110,7 +110,7 @@ public class MockSolrServer { server.deleteByQuery("*:*"); server.commit(); } catch (SolrServerException | IOException e) { - e.printStackTrace(System.err); + log.error("Failed to empty Solr index: {}", e.getMessage(), e); } loadedCores.put(coreName, server); diff --git a/dspace-api/src/test/java/org/dspace/statistics/MockSolrLoggerServiceImpl.java b/dspace-api/src/test/java/org/dspace/statistics/MockSolrLoggerServiceImpl.java index cca05a12cc..7cb20c23d1 100644 --- a/dspace-api/src/test/java/org/dspace/statistics/MockSolrLoggerServiceImpl.java +++ b/dspace-api/src/test/java/org/dspace/statistics/MockSolrLoggerServiceImpl.java @@ -16,6 +16,7 @@ import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.List; +import java.util.Map; import com.maxmind.geoip2.DatabaseReader; import com.maxmind.geoip2.model.CityResponse; @@ -27,27 +28,29 @@ import com.maxmind.geoip2.record.MaxMind; import com.maxmind.geoip2.record.Postal; import com.maxmind.geoip2.record.RepresentedCountry; import com.maxmind.geoip2.record.Traits; +import org.dspace.solr.MockSolrServer; +import org.springframework.beans.factory.DisposableBean; import org.springframework.beans.factory.InitializingBean; +import org.springframework.stereotype.Service; /** * Mock service that uses an embedded SOLR server for the statistics core. - *

    - * NOTE: this class is overridden by one of the same name - * defined in dspace-server-webapp and declared as a bean there. - * See {@code test/data/dspaceFolder/config/spring/api/solr-services.xml}. Some kind of classpath - * magic makes this work. */ +@Service public class MockSolrLoggerServiceImpl extends SolrLoggerServiceImpl - implements InitializingBean { + implements InitializingBean, DisposableBean { + + private MockSolrServer mockSolrServer; public MockSolrLoggerServiceImpl() { } @Override public void afterPropertiesSet() throws Exception { - //We don't use SOLR in the tests of this module - solr = null; + // Initialize our service with a Mock Solr statistics core + mockSolrServer = new MockSolrServer("statistics"); + solr = mockSolrServer.getSolrServer(); // Mock GeoIP's DatabaseReader DatabaseReader reader = mock(DatabaseReader.class); @@ -58,14 +61,18 @@ public class MockSolrLoggerServiceImpl } /** - * A mock/fake GeoIP CityResponse, which will be used for *all* test statistical requests + * A mock/fake GeoIP CityResponse, which will be used for *all* test + * statistical requests. + * * @return faked CityResponse */ private CityResponse mockCityResponse() { - List cityNames = new ArrayList(Collections.singleton("New York")); - City city = new City(cityNames, 1, 1, new HashMap()); + List cityLocales = new ArrayList(Collections.singleton("en")); + Map cityNames = new HashMap<>(); + cityNames.put("en", "New York"); + City city = new City(cityLocales, 1, 1, cityNames); - List countryNames = new ArrayList(Collections.singleton("United States")); + List countryNames = new ArrayList<>(Collections.singleton("United States")); Country country = new Country(countryNames, 1, 1, "US", new HashMap()); Location location = new Location(1, 1, 40.760498D, -73.9933D, 501, 1, "EST"); @@ -73,7 +80,17 @@ public class MockSolrLoggerServiceImpl Postal postal = new Postal("10036", 1); return new CityResponse(city, new Continent(), country, location, new MaxMind(), postal, - country, new RepresentedCountry(), new ArrayList<>(0), - new Traits()); + country, new RepresentedCountry(), new ArrayList<>(0), + new Traits()); + } + + /** Reset the core for the next test. See {@link MockSolrServer#reset()}. */ + public void reset() { + mockSolrServer.reset(); + } + + @Override + public void destroy() throws Exception { + mockSolrServer.destroy(); } } diff --git a/dspace-api/src/test/java/org/dspace/statistics/export/FailedOpenURLTrackerServiceImplTest.java b/dspace-api/src/test/java/org/dspace/statistics/export/FailedOpenURLTrackerServiceImplTest.java new file mode 100644 index 0000000000..25c1a9b02b --- /dev/null +++ b/dspace-api/src/test/java/org/dspace/statistics/export/FailedOpenURLTrackerServiceImplTest.java @@ -0,0 +1,85 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.statistics.export; + +import static org.junit.Assert.assertEquals; +import static org.mockito.Matchers.any; +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.when; + +import java.sql.SQLException; +import java.util.ArrayList; +import java.util.List; + +import org.dspace.core.Context; +import org.dspace.statistics.export.dao.OpenURLTrackerDAO; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.mockito.InjectMocks; +import org.mockito.Mock; +import org.mockito.Mockito; +import org.mockito.runners.MockitoJUnitRunner; + +/** + * Class to test the FailedOpenURLTrackerServiceImpl + */ +@RunWith(MockitoJUnitRunner.class) +public class FailedOpenURLTrackerServiceImplTest { + + @InjectMocks + private FailedOpenURLTrackerServiceImpl openURLTrackerLoggerService; + + @Mock + private Context context; + + @Mock + private OpenURLTracker openURLTracker; + + @Mock + private OpenURLTrackerDAO openURLTrackerDAO; + + /** + * Tests the remove method + * @throws SQLException + */ + @Test + public void testRemove() throws SQLException { + openURLTrackerLoggerService.remove(context, openURLTracker); + + Mockito.verify(openURLTrackerDAO, times(1)).delete(context, openURLTracker); + + } + + /** + * Tests the findAll method + * @throws SQLException + */ + @Test + public void testFindAll() throws SQLException { + List trackers = new ArrayList<>(); + + when(openURLTrackerDAO.findAll(context, OpenURLTracker.class)).thenReturn(trackers); + + assertEquals("TestFindAll 0", trackers, openURLTrackerLoggerService.findAll(context)); + } + + /** + * Tests the create method + * @throws SQLException + */ + @Test + public void testCreate() throws SQLException { + OpenURLTracker tracker = new OpenURLTracker(); + + when(openURLTrackerDAO.create(any(), any())).thenReturn(tracker); + + assertEquals("TestCreate 0", tracker, openURLTrackerLoggerService.create(context)); + } + + +} diff --git a/dspace-api/src/test/java/org/dspace/statistics/export/ITIrusExportUsageEventListener.java b/dspace-api/src/test/java/org/dspace/statistics/export/ITIrusExportUsageEventListener.java new file mode 100644 index 0000000000..75ee6e4008 --- /dev/null +++ b/dspace-api/src/test/java/org/dspace/statistics/export/ITIrusExportUsageEventListener.java @@ -0,0 +1,418 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.statistics.export; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertTrue; +import static org.mockito.Matchers.anyString; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + +import java.io.File; +import java.io.FileInputStream; +import java.io.UnsupportedEncodingException; +import java.net.URLEncoder; +import java.sql.SQLException; +import java.util.ArrayList; +import java.util.List; +import java.util.regex.Pattern; +import javax.servlet.http.HttpServletRequest; + +import org.apache.commons.codec.CharEncoding; +import org.apache.log4j.Logger; +import org.dspace.AbstractIntegrationTestWithDatabase; +import org.dspace.authorize.AuthorizeException; +import org.dspace.builder.BitstreamBuilder; +import org.dspace.builder.CollectionBuilder; +import org.dspace.builder.CommunityBuilder; +import org.dspace.builder.EntityTypeBuilder; +import org.dspace.builder.ItemBuilder; +import org.dspace.content.Bitstream; +import org.dspace.content.Collection; +import org.dspace.content.Community; +import org.dspace.content.EntityType; +import org.dspace.content.Item; +import org.dspace.content.factory.ContentServiceFactory; +import org.dspace.content.service.BitstreamService; +import org.dspace.content.service.BundleService; +import org.dspace.content.service.CollectionService; +import org.dspace.content.service.CommunityService; +import org.dspace.content.service.EntityTypeService; +import org.dspace.content.service.InstallItemService; +import org.dspace.content.service.ItemService; +import org.dspace.content.service.WorkspaceItemService; +import org.dspace.core.Context; +import org.dspace.eperson.factory.EPersonServiceFactory; +import org.dspace.eperson.service.EPersonService; +import org.dspace.eperson.service.GroupService; +import org.dspace.services.ConfigurationService; +import org.dspace.services.factory.DSpaceServicesFactory; +import org.dspace.statistics.export.factory.OpenURLTrackerLoggerServiceFactory; +import org.dspace.statistics.export.service.FailedOpenURLTrackerService; +import org.dspace.usage.UsageEvent; +import org.junit.After; +import org.junit.Before; +import org.junit.Test; + +/** + * Test class for the IrusExportUsageEventListener + */ +//@RunWith(MockitoJUnitRunner.class) +public class ITIrusExportUsageEventListener extends AbstractIntegrationTestWithDatabase { + + private static Logger log = Logger.getLogger(ITIrusExportUsageEventListener.class); + + + protected CommunityService communityService = ContentServiceFactory.getInstance().getCommunityService(); + protected ConfigurationService configurationService = DSpaceServicesFactory.getInstance().getConfigurationService(); + protected CollectionService collectionService = ContentServiceFactory.getInstance().getCollectionService(); + protected ItemService itemService = ContentServiceFactory.getInstance().getItemService(); + protected InstallItemService installItemService = ContentServiceFactory.getInstance().getInstallItemService(); + protected WorkspaceItemService workspaceItemService = ContentServiceFactory.getInstance().getWorkspaceItemService(); + protected EPersonService ePersonService = EPersonServiceFactory.getInstance().getEPersonService(); + protected GroupService groupService = EPersonServiceFactory.getInstance().getGroupService(); + protected BundleService bundleService = ContentServiceFactory.getInstance().getBundleService(); + protected BitstreamService bitstreamService = ContentServiceFactory.getInstance().getBitstreamService(); + protected EntityTypeService entityTypeService = ContentServiceFactory.getInstance().getEntityTypeService(); + protected FailedOpenURLTrackerService failedOpenURLTrackerService = + OpenURLTrackerLoggerServiceFactory.getInstance().getOpenUrlTrackerLoggerService(); + + protected ArrayList testProcessedUrls = DSpaceServicesFactory.getInstance().getServiceManager() + .getServiceByName("testProcessedUrls", + ArrayList.class); + + private IrusExportUsageEventListener exportUsageEventListener = + DSpaceServicesFactory.getInstance() + .getServiceManager() + .getServicesByType(IrusExportUsageEventListener.class) + .get(0); + + private Item item; + private Item itemNotToBeProcessed; + private Bitstream bitstream; + private Bitstream bitstreamNotToBeProcessed; + private EntityType entityType; + private Community community; + private Collection collection; + + private String encodedUrl; + private String encodedUIUrl; + + + /** + * Initializes the test by setting up all objects needed to create a test item + */ + @Before() + public void setUp() throws Exception { + super.setUp(); + + configurationService.setProperty("irus.statistics.tracker.enabled", true); + configurationService.setProperty("irus.statistics.tracker.type-field", "dc.type"); + configurationService.setProperty("irus.statistics.tracker.type-value", "Excluded type"); + + + context.turnOffAuthorisationSystem(); + try { + + entityType = EntityTypeBuilder.createEntityTypeBuilder(context, "Publication").build(); + community = CommunityBuilder.createCommunity(context).build(); + collection = CollectionBuilder.createCollection(context, community).build(); + item = ItemBuilder.createItem(context, collection) + .withRelationshipType(entityType.getLabel()) + .build(); + + File f = new File(testProps.get("test.bitstream").toString()); + bitstream = BitstreamBuilder.createBitstream(context, item, new FileInputStream(f)).build(); + + itemNotToBeProcessed = ItemBuilder.createItem(context, collection) + .withRelationshipType(entityType.getLabel()) + .withType("Excluded type") + .build(); + File itemNotToBeProcessedFile = new File(testProps.get("test.bitstream").toString()); + bitstreamNotToBeProcessed = BitstreamBuilder + .createBitstream(context, itemNotToBeProcessed, new FileInputStream(itemNotToBeProcessedFile)) + .build(); + + String dspaceUrl = configurationService.getProperty("dspace.server.url"); + encodedUrl = URLEncoder.encode(dspaceUrl, CharEncoding.UTF_8); + String dspaceUIUrl = configurationService.getProperty("dspace.ui.url"); + encodedUIUrl = URLEncoder.encode(dspaceUIUrl, CharEncoding.UTF_8); + + + } catch (Exception e) { + log.error(e.getMessage(), e); + } finally { + context.restoreAuthSystemState(); + } + } + + /** + * Clean up the created objects + * Empty the testProcessedUrls used to store succeeded urls + * Empty the database table where the failed urls are logged + */ + @After + public void destroy() throws Exception { + try { + context.turnOffAuthorisationSystem(); + + List all = failedOpenURLTrackerService.findAll(context); + for (OpenURLTracker tracker : all) { + failedOpenURLTrackerService.remove(context, tracker); + } + + // Clear the list of processedUrls + testProcessedUrls.clear(); + + } catch (Exception e) { + log.error(e.getMessage(), e); + } finally { + try { + context.complete(); + } catch (SQLException e) { + log.error(e); + } + } + super.destroy(); + } + + /** + * Test whether the usage event of an item meeting all conditions is processed and succeeds + */ + @Test + public void testReceiveEventOnItemThatShouldBeProcessed() throws UnsupportedEncodingException, SQLException { + HttpServletRequest request = mock(HttpServletRequest.class); + when(request.getRemoteAddr()).thenReturn("client-ip"); + when(request.getHeader(anyString())).thenReturn(null); + + UsageEvent usageEvent = mock(UsageEvent.class); + when(usageEvent.getObject()).thenReturn(item); + when(usageEvent.getRequest()).thenReturn(request); + when(usageEvent.getContext()).thenReturn(new Context()); + + exportUsageEventListener.receiveEvent(usageEvent); + + + List all = failedOpenURLTrackerService.findAll(context); + + + String regex = "https://irus.jisc.ac.uk/counter/test/\\?url_ver=Z39.88-2004&req_id=" + + URLEncoder.encode(request.getRemoteAddr(), "UTF-8") + "&req_dat=&rft" + + ".artnum=oai%3Alocalhost%3A" + URLEncoder.encode(item.getHandle(), "UTF-8") + "&rfr_dat=&rfr_id" + + "=localhost&url_tim=" + ".*" + "?&svc_dat=" + encodedUIUrl + "%2Fhandle%2F" + URLEncoder + .encode(item.getHandle(), "UTF-8") + "&rft_dat=Investigation"; + + boolean isMatch = matchesString(String.valueOf(testProcessedUrls.get(0)), regex); + + assertEquals(1, testProcessedUrls.size()); + assertTrue(isMatch); + assertEquals(0, all.size()); + + + } + + /** + * Test whether the usage event of an item meeting all conditions is processed but fails + */ + @Test + public void testReceiveEventOnItemThatShouldBeProcessedFailed() throws SQLException, UnsupportedEncodingException { + HttpServletRequest request = mock(HttpServletRequest.class); + when(request.getRemoteAddr()).thenReturn("client-ip-fail"); + when(request.getHeader(anyString())).thenReturn(null); + + UsageEvent usageEvent = mock(UsageEvent.class); + when(usageEvent.getObject()).thenReturn(item); + when(usageEvent.getRequest()).thenReturn(request); + when(usageEvent.getContext()).thenReturn(new Context()); + + exportUsageEventListener.receiveEvent(usageEvent); + + + List all = failedOpenURLTrackerService.findAll(context); + + String regex = "https://irus.jisc.ac.uk/counter/test/\\?url_ver=Z39.88-2004&req_id=" + + URLEncoder.encode(request.getRemoteAddr(), "UTF-8") + "&req_dat=&rft" + + ".artnum=oai%3Alocalhost%3A" + URLEncoder.encode(item.getHandle(), "UTF-8") + "&rfr_dat=&rfr_id" + + "=localhost&url_tim=" + ".*" + "?&svc_dat=" + encodedUIUrl + "%2Fhandle%2F" + URLEncoder + .encode(item.getHandle(), "UTF-8") + "&rft_dat=Investigation"; + + boolean isMatch = matchesString(all.get(0).getUrl(), regex); + + assertEquals(0, testProcessedUrls.size()); + + assertEquals(1, all.size()); + assertTrue(isMatch); + } + + /** + * Test whether the usage event of an item that does not meet all conditions is not processed + */ + @Test + public void testReceiveEventOnItemThatShouldNotBeProcessed() throws SQLException, AuthorizeException { + context.turnOffAuthorisationSystem(); + + HttpServletRequest request = mock(HttpServletRequest.class); + + UsageEvent usageEvent = mock(UsageEvent.class); + when(usageEvent.getObject()).thenReturn(itemNotToBeProcessed); + when(usageEvent.getRequest()).thenReturn(request); + when(usageEvent.getContext()).thenReturn(new Context()); + + itemService.clearMetadata(context, item, "relationship", "type", null, Item.ANY); + itemService.addMetadata(context, item, "relationship", "type", null, null, "OrgUnit"); + itemService.update(context, item); + + context.restoreAuthSystemState(); + + // doCallRealMethod().when(IrusExportUsageEventListener).receiveEvent(usageEvent); + exportUsageEventListener.receiveEvent(usageEvent); + + List all = failedOpenURLTrackerService.findAll(context); + + + assertEquals(0, testProcessedUrls.size()); + assertEquals(0, all.size()); + } + + /** + * Test whether the usage event of a bitstream meeting all conditions is processed and succeeds + */ + @Test + public void testReceiveEventOnBitstreamThatShouldBeProcessed() throws SQLException, UnsupportedEncodingException { + HttpServletRequest request = mock(HttpServletRequest.class); + when(request.getRemoteAddr()).thenReturn("client-ip"); + when(request.getHeader(anyString())).thenReturn(null); + + UsageEvent usageEvent = mock(UsageEvent.class); + when(usageEvent.getObject()).thenReturn(bitstream); + when(usageEvent.getRequest()).thenReturn(request); + when(usageEvent.getContext()).thenReturn(new Context()); + + exportUsageEventListener.receiveEvent(usageEvent); + + String regex = "https://irus.jisc.ac.uk/counter/test/\\?url_ver=Z39.88-2004&req_id=" + + URLEncoder.encode(request.getRemoteAddr(), "UTF-8") + "&req_dat=&rft" + + ".artnum=oai%3Alocalhost%3A" + URLEncoder.encode(item.getHandle(), "UTF-8") + "&rfr_dat=&rfr_id" + + "=localhost&url_tim=" + ".*" + "?&svc_dat=" + encodedUrl + "%2Fapi%2Fcore%2Fbitstreams" + + "%2F" + bitstream.getID() + "%2Fcontent" + "&rft_dat=Request"; + + boolean isMatch = matchesString(String.valueOf(testProcessedUrls.get(0)), regex); + + assertEquals(1, testProcessedUrls.size()); + assertTrue(isMatch); + + List all = failedOpenURLTrackerService.findAll(context); + assertEquals(0, all.size()); + } + + /** + * Test whether the usage event of a bitstream meeting all conditions is processed but fails + */ + @Test + public void testReceiveEventOnBitstreamThatShouldBeProcessedFail() throws UnsupportedEncodingException, + SQLException { + HttpServletRequest request = mock(HttpServletRequest.class); + when(request.getRemoteAddr()).thenReturn("client-ip-fail"); + when(request.getHeader(anyString())).thenReturn(null); + + UsageEvent usageEvent = mock(UsageEvent.class); + when(usageEvent.getObject()).thenReturn(bitstream); + when(usageEvent.getRequest()).thenReturn(request); + when(usageEvent.getContext()).thenReturn(new Context()); + + exportUsageEventListener.receiveEvent(usageEvent); + + List all = failedOpenURLTrackerService.findAll(context); + + String regex = "https://irus.jisc.ac.uk/counter/test/\\?url_ver=Z39.88-2004&req_id=" + + URLEncoder.encode(request.getRemoteAddr(), "UTF-8") + "&req_dat=&rft" + + ".artnum=oai%3Alocalhost%3A" + URLEncoder.encode(item.getHandle(), "UTF-8") + "&rfr_dat=&rfr_id" + + "=localhost&url_tim=" + ".*" + "?&svc_dat=" + encodedUrl + "%2Fapi%2Fcore%2Fbitstreams" + + "%2F" + bitstream.getID() + "%2Fcontent" + "&rft_dat=Request"; + + + boolean isMatch = matchesString(all.get(0).getUrl(), regex); + + assertEquals(1, all.size()); + assertEquals(true, isMatch); + assertEquals(0, testProcessedUrls.size()); + + } + + /** + * Test whether the usage event of a bitstream that does not meet all conditions is not processed + */ + @Test + public void testReceiveEventOnBitstreamThatShouldNotBeProcessed() throws SQLException, AuthorizeException { + context.turnOffAuthorisationSystem(); + HttpServletRequest request = mock(HttpServletRequest.class); + when(request.getRemoteAddr()).thenReturn("client-ip-fail"); + when(request.getHeader(anyString())).thenReturn(null); + + UsageEvent usageEvent = mock(UsageEvent.class); + when(usageEvent.getObject()).thenReturn(bitstreamNotToBeProcessed); + when(usageEvent.getRequest()).thenReturn(request); + when(usageEvent.getContext()).thenReturn(new Context()); + + itemService.clearMetadata(context, item, "relationship", "type", null, Item.ANY); + itemService.addMetadata(context, item, "relationship", "type", null, null, "OrgUnit"); + itemService.update(context, item); + + context.restoreAuthSystemState(); + + exportUsageEventListener.receiveEvent(usageEvent); + + List all = failedOpenURLTrackerService.findAll(context); + + + assertEquals(0, all.size()); + assertEquals(0, testProcessedUrls.size()); + + } + + /** + * Test that an object that is not an Item or Bitstream is not processed + */ + @Test + public void testReceiveEventOnNonRelevantObject() throws SQLException { + + HttpServletRequest request = mock(HttpServletRequest.class); + + UsageEvent usageEvent = mock(UsageEvent.class); + when(usageEvent.getObject()).thenReturn(community); + when(usageEvent.getContext()).thenReturn(new Context()); + + exportUsageEventListener.receiveEvent(usageEvent); + + List all = failedOpenURLTrackerService.findAll(context); + + + assertEquals(0, all.size()); + assertEquals(0, testProcessedUrls.size()); + + } + + /** + * Method to test if a string matches a regex + * + * @param string + * @param regex + * @return whether the regex matches the string + */ + private boolean matchesString(String string, String regex) { + + Pattern p = Pattern.compile(regex); + + if (p.matcher(string).matches()) { + return true; + } + return false; + } + + +} diff --git a/dspace-api/src/test/java/org/dspace/statistics/export/ITRetryFailedOpenUrlTracker.java b/dspace-api/src/test/java/org/dspace/statistics/export/ITRetryFailedOpenUrlTracker.java new file mode 100644 index 0000000000..a445a6540f --- /dev/null +++ b/dspace-api/src/test/java/org/dspace/statistics/export/ITRetryFailedOpenUrlTracker.java @@ -0,0 +1,182 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.statistics.export; + +import static org.junit.Assert.assertEquals; + +import java.sql.SQLException; +import java.util.ArrayList; +import java.util.List; + +import org.apache.log4j.Logger; +import org.dspace.AbstractIntegrationTest; +import org.dspace.app.scripts.handler.impl.TestDSpaceRunnableHandler; +import org.dspace.scripts.DSpaceRunnable; +import org.dspace.scripts.configuration.ScriptConfiguration; +import org.dspace.scripts.factory.ScriptServiceFactory; +import org.dspace.scripts.service.ScriptService; +import org.dspace.services.factory.DSpaceServicesFactory; +import org.dspace.statistics.export.factory.OpenURLTrackerLoggerServiceFactory; +import org.dspace.statistics.export.service.FailedOpenURLTrackerService; +import org.junit.After; +import org.junit.Test; + +/** + * Class to test the RetryFailedOpenUrlTracker + */ +public class ITRetryFailedOpenUrlTracker extends AbstractIntegrationTest { + + private static Logger log = Logger.getLogger(ITRetryFailedOpenUrlTracker.class); + + + protected FailedOpenURLTrackerService failedOpenURLTrackerService = + OpenURLTrackerLoggerServiceFactory.getInstance().getOpenUrlTrackerLoggerService(); + + protected ArrayList testProcessedUrls = DSpaceServicesFactory.getInstance().getServiceManager() + .getServiceByName("testProcessedUrls", + ArrayList.class); + + private ScriptService scriptService = ScriptServiceFactory.getInstance().getScriptService(); + + + /** + * Clean up the logged entries from the db after each test + */ + @After + @Override + public void destroy() { + try { + context.turnOffAuthorisationSystem(); + + List all = failedOpenURLTrackerService.findAll(context); + for (OpenURLTracker tracker : all) { + failedOpenURLTrackerService.remove(context, tracker); + } + + // Clear the list of processedUrls + testProcessedUrls.clear(); + + } catch (Exception e) { + log.error(e.getMessage(), e); + } finally { + try { + context.complete(); + } catch (SQLException e) { + log.error(e); + } + } + super.destroy(); + } + + /** + * Test the mode of the script that allows the user to add a failed url to the database + * + * @throws Exception + */ + @Test + public void testAddNewFailedUrl() throws Exception { + + TestDSpaceRunnableHandler testDSpaceRunnableHandler = new TestDSpaceRunnableHandler(); + ScriptConfiguration retryOpenUrlTrackerConfig = scriptService.getScriptConfiguration("retry-tracker"); + DSpaceRunnable retryOpenUrlTracker = + scriptService.createDSpaceRunnableForScriptConfiguration(retryOpenUrlTrackerConfig); + String urlToAdd = "test-failed-url"; + String[] args = {"-a", urlToAdd}; + + retryOpenUrlTracker.initialize(args, testDSpaceRunnableHandler, eperson); + retryOpenUrlTracker.internalRun(); + + List all = failedOpenURLTrackerService.findAll(context); + + assertEquals(0, testProcessedUrls.size()); + assertEquals(1, all.size()); + assertEquals(urlToAdd, all.get(0).getUrl()); + } + + /** + * Test to check that all logged failed urls are reprocessed succesfully and removed from the db + * + * @throws Exception + */ + @Test + public void testReprocessAllUrls() throws Exception { + + TestDSpaceRunnableHandler testDSpaceRunnableHandler = new TestDSpaceRunnableHandler(); + ScriptConfiguration retryOpenUrlTrackerConfig = scriptService.getScriptConfiguration("retry-tracker"); + DSpaceRunnable retryOpenUrlTracker = + scriptService.createDSpaceRunnableForScriptConfiguration(retryOpenUrlTrackerConfig); + String[] args = {"-r"}; + + OpenURLTracker tracker1 = failedOpenURLTrackerService.create(context); + tracker1.setUrl("test-url-1"); + OpenURLTracker tracker2 = failedOpenURLTrackerService.create(context); + tracker2.setUrl("test-url-2"); + OpenURLTracker tracker3 = failedOpenURLTrackerService.create(context); + tracker3.setUrl("test-url-3"); + + + retryOpenUrlTracker.initialize(args, testDSpaceRunnableHandler, eperson); + retryOpenUrlTracker.internalRun(); + + List all = failedOpenURLTrackerService.findAll(context); + + assertEquals(3, testProcessedUrls.size()); + assertEquals(true, testProcessedUrls.contains("test-url-1")); + assertEquals(true, testProcessedUrls.contains("test-url-2")); + assertEquals(true, testProcessedUrls.contains("test-url-3")); + + assertEquals(0, all.size()); + } + + /** + * Test to check that the successful retries are removed, but the failed retries remain in the db + * + * @throws Exception + */ + @Test + public void testReprocessPartOfUrls() throws Exception { + + TestDSpaceRunnableHandler testDSpaceRunnableHandler = new TestDSpaceRunnableHandler(); + ScriptConfiguration retryOpenUrlTrackerConfig = scriptService.getScriptConfiguration("retry-tracker"); + DSpaceRunnable retryOpenUrlTracker = + scriptService.createDSpaceRunnableForScriptConfiguration(retryOpenUrlTrackerConfig); + String[] args = {"-r"}; + + OpenURLTracker tracker1 = failedOpenURLTrackerService.create(context); + tracker1.setUrl("test-url-1"); + OpenURLTracker tracker2 = failedOpenURLTrackerService.create(context); + tracker2.setUrl("test-url-2-fail"); + OpenURLTracker tracker3 = failedOpenURLTrackerService.create(context); + tracker3.setUrl("test-url-3-fail"); + OpenURLTracker tracker4 = failedOpenURLTrackerService.create(context); + tracker4.setUrl("test-url-4-fail"); + OpenURLTracker tracker5 = failedOpenURLTrackerService.create(context); + tracker5.setUrl("test-url-5"); + + + retryOpenUrlTracker.initialize(args, testDSpaceRunnableHandler, eperson); + retryOpenUrlTracker.internalRun(); + + List all = failedOpenURLTrackerService.findAll(context); + List storedTrackerUrls = new ArrayList<>(); + for (OpenURLTracker tracker : all) { + storedTrackerUrls.add(tracker.getUrl()); + } + + assertEquals(2, testProcessedUrls.size()); + assertEquals(true, testProcessedUrls.contains("test-url-1")); + assertEquals(true, testProcessedUrls.contains("test-url-5")); + + assertEquals(3, all.size()); + assertEquals(true, storedTrackerUrls.contains("test-url-2-fail")); + assertEquals(true, storedTrackerUrls.contains("test-url-3-fail")); + assertEquals(true, storedTrackerUrls.contains("test-url-4-fail")); + } + + +} diff --git a/dspace-api/src/test/java/org/dspace/statistics/export/processor/BitstreamEventProcessorTest.java b/dspace-api/src/test/java/org/dspace/statistics/export/processor/BitstreamEventProcessorTest.java new file mode 100644 index 0000000000..62556d1594 --- /dev/null +++ b/dspace-api/src/test/java/org/dspace/statistics/export/processor/BitstreamEventProcessorTest.java @@ -0,0 +1,87 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.statistics.export.processor; + +import static org.hamcrest.core.Is.is; +import static org.junit.Assert.assertThat; +import static org.mockito.Mockito.mock; + +import java.io.File; +import java.io.FileInputStream; +import java.io.UnsupportedEncodingException; +import java.net.URLEncoder; +import javax.servlet.http.HttpServletRequest; + +import org.apache.commons.codec.CharEncoding; +import org.dspace.AbstractIntegrationTestWithDatabase; +import org.dspace.builder.BitstreamBuilder; +import org.dspace.builder.CollectionBuilder; +import org.dspace.builder.CommunityBuilder; +import org.dspace.builder.ItemBuilder; +import org.dspace.content.Bitstream; +import org.dspace.content.Collection; +import org.dspace.content.Community; +import org.dspace.content.Item; +import org.dspace.services.ConfigurationService; +import org.dspace.services.factory.DSpaceServicesFactory; +import org.junit.Before; +import org.junit.Test; + +/** + * Test class for the BitstreamEventProcessor + */ +public class BitstreamEventProcessorTest extends AbstractIntegrationTestWithDatabase { + + private ConfigurationService configurationService = DSpaceServicesFactory.getInstance().getConfigurationService(); + + + private String encodedUrl; + + + @Before + public void setUp() throws Exception { + super.setUp(); + configurationService.setProperty("irus.statistics.tracker.enabled", true); + + String dspaceUrl = configurationService.getProperty("dspace.server.url"); + try { + encodedUrl = URLEncoder.encode(dspaceUrl, CharEncoding.UTF_8); + } catch (UnsupportedEncodingException e) { + throw new AssertionError("Error occurred in setup()", e); + } + + } + + @Test + /** + * Test the method that adds data based on the object types + */ + public void testAddObectSpecificData() throws Exception { + HttpServletRequest request = mock(HttpServletRequest.class); + + context.turnOffAuthorisationSystem(); + Community community = CommunityBuilder.createCommunity(context).build(); + Collection collection = CollectionBuilder.createCollection(context, community).build(); + Item item = ItemBuilder.createItem(context, collection).build(); + + File f = new File(testProps.get("test.bitstream").toString()); + Bitstream bitstream = BitstreamBuilder.createBitstream(context, item, new FileInputStream(f)).build(); + + context.restoreAuthSystemState(); + + BitstreamEventProcessor bitstreamEventProcessor = new BitstreamEventProcessor(context, request, bitstream); + + String result = bitstreamEventProcessor.addObjectSpecificData("existing-string", bitstream); + + assertThat(result, + is("existing-string&svc_dat=" + encodedUrl + "%2Fapi%2Fcore%2Fbitstreams%2F" + bitstream.getID() + + "%2Fcontent&rft_dat=Request")); + + } + +} diff --git a/dspace-api/src/test/java/org/dspace/statistics/export/processor/ExportEventProcessorTest.java b/dspace-api/src/test/java/org/dspace/statistics/export/processor/ExportEventProcessorTest.java new file mode 100644 index 0000000000..5df7405ed4 --- /dev/null +++ b/dspace-api/src/test/java/org/dspace/statistics/export/processor/ExportEventProcessorTest.java @@ -0,0 +1,281 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.statistics.export.processor; + +import static org.hamcrest.CoreMatchers.startsWith; +import static org.hamcrest.core.Is.is; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertThat; +import static org.junit.Assert.assertTrue; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + +import java.io.UnsupportedEncodingException; +import java.net.URLEncoder; +import java.sql.SQLException; +import javax.servlet.http.HttpServletRequest; + +import org.apache.commons.codec.CharEncoding; +import org.dspace.AbstractIntegrationTestWithDatabase; +import org.dspace.builder.CollectionBuilder; +import org.dspace.builder.CommunityBuilder; +import org.dspace.builder.EntityTypeBuilder; +import org.dspace.builder.ItemBuilder; +import org.dspace.builder.WorkspaceItemBuilder; +import org.dspace.content.Collection; +import org.dspace.content.Community; +import org.dspace.content.EntityType; +import org.dspace.content.Item; +import org.dspace.content.WorkspaceItem; +import org.dspace.services.ConfigurationService; +import org.dspace.services.factory.DSpaceServicesFactory; +import org.junit.Before; +import org.junit.Test; +import org.mockito.Mock; + +/** + * Test for the ExportEventProcessor class + */ +public class ExportEventProcessorTest extends AbstractIntegrationTestWithDatabase { + + @Mock + private HttpServletRequest request = mock(HttpServletRequest.class); + + private ConfigurationService configurationService = DSpaceServicesFactory.getInstance().getConfigurationService(); + + private EntityType publication; + private EntityType otherEntity; + private final String excluded_type = "Excluded type"; + + @Before + public void setUp() throws Exception { + super.setUp(); + + configurationService.setProperty("irus.statistics.tracker.urlversion", "Z39.88-2004"); + configurationService.setProperty("irus.statistics.tracker.enabled", true); + configurationService.setProperty("irus.statistics.tracker.type-field", "dc.type"); + configurationService.setProperty("irus.statistics.tracker.type-value", "Excluded type"); + + context.turnOffAuthorisationSystem(); + publication = EntityTypeBuilder.createEntityTypeBuilder(context, "Publication").build(); + otherEntity = EntityTypeBuilder.createEntityTypeBuilder(context, "Other").build(); + context.restoreAuthSystemState(); + + + } + + @Test + /** + * Test the getBaseParameters method + */ + public void testGetBaseParameters() throws UnsupportedEncodingException { + + context.turnOffAuthorisationSystem(); + Community community = CommunityBuilder.createCommunity(context).build(); + Collection collection = CollectionBuilder.createCollection(context, community).build(); + Item item = ItemBuilder.createItem(context, collection).build(); + String encodedHandle = URLEncoder.encode(item.getHandle(), CharEncoding.UTF_8); + context.restoreAuthSystemState(); + + ExportEventProcessor exportEventProcessor = new ItemEventProcessor(context, request, item); + + when(request.getRemoteAddr()).thenReturn("test-client-ip"); + when(request.getHeader("USER-AGENT")).thenReturn("test-user-agent"); + when(request.getHeader("referer")).thenReturn("test-referer"); + + String result = exportEventProcessor.getBaseParameters(item); + String expected = "url_ver=Z39.88-2004&req_id=test-client-ip&req_dat=test-user-agent&rft.artnum=" + + "oai%3Alocalhost%3A" + encodedHandle + "&rfr_dat=test-referer&rfr_id=localhost&url_tim="; + + assertThat(result, startsWith(expected)); + + + } + + @Test + /** + * Test the ShouldProcessItem method where the item is null + */ + public void testShouldProcessItemWhenNull() throws SQLException { + ExportEventProcessor exportEventProcessor = new ItemEventProcessor(context, request, null); + + boolean result = exportEventProcessor.shouldProcessItem(null); + assertThat(result, is(false)); + } + + @Test + /** + * Test the ShouldProcessItem method where the item is not archived + */ + public void testShouldProcessItemWhenNotArchived() throws SQLException { + context.turnOffAuthorisationSystem(); + Community community = CommunityBuilder.createCommunity(context).build(); + Collection collection = CollectionBuilder.createCollection(context, community).build(); + WorkspaceItem workspaceItem = WorkspaceItemBuilder.createWorkspaceItem(context, collection).build(); + context.restoreAuthSystemState(); + + ExportEventProcessor exportEventProcessor = new ItemEventProcessor(context, request, workspaceItem.getItem()); + + boolean result = exportEventProcessor.shouldProcessItem(workspaceItem.getItem()); + assertFalse(result); + } + + @Test + /** + * Test the ShouldProcessItem method where the item can be edit by the current user + */ + public void testShouldProcessItemWhenCanEdit() throws SQLException { + context.turnOffAuthorisationSystem(); + Community community = CommunityBuilder.createCommunity(context).build(); + Collection collection = CollectionBuilder.createCollection(context, community).build(); + Item item = ItemBuilder.createItem(context, collection).withRelationshipType(otherEntity.getLabel()).build(); + context.restoreAuthSystemState(); + + context.setCurrentUser(admin); + ExportEventProcessor exportEventProcessor = new ItemEventProcessor(context, request, item); + + boolean result = exportEventProcessor.shouldProcessItem(item); + assertFalse(result); + + } + + @Test + /** + * Test the ShouldProcessItem method where the item type should be excluded + */ + public void testShouldProcessItemWhenShouldNotProcessType() throws Exception { + + context.turnOffAuthorisationSystem(); + Community community = CommunityBuilder.createCommunity(context).build(); + Collection collection = CollectionBuilder.createCollection(context, community).build(); + Item item = ItemBuilder.createItem(context, collection) + .withType("Excluded type") + .withRelationshipType(publication.getLabel()) + .build(); + + context.restoreAuthSystemState(); + + ExportEventProcessor exportEventProcessor = new ItemEventProcessor(context, request, item); + + boolean result = exportEventProcessor.shouldProcessItem(item); + assertFalse(result); + + } + + @Test + /** + * Test the ShouldProcessItem method where the item entity type should not be processed + */ + public void testShouldProcessItemWhenShouldNotProcessEntity() throws SQLException { + context.turnOffAuthorisationSystem(); + Community community = CommunityBuilder.createCommunity(context).build(); + Collection collection = CollectionBuilder.createCollection(context, community).build(); + Item item = ItemBuilder.createItem(context, collection).withRelationshipType(otherEntity.getLabel()).build(); + context.restoreAuthSystemState(); + + ExportEventProcessor exportEventProcessor = new ItemEventProcessor(context, request, item); + + boolean result = exportEventProcessor.shouldProcessItem(item); + assertFalse(result); + + } + + @Test + /** + * Test the ShouldProcessItem method where all conditions are met + */ + public void testShouldProcessItem() throws SQLException { + context.turnOffAuthorisationSystem(); + Community community = CommunityBuilder.createCommunity(context).build(); + Collection collection = CollectionBuilder.createCollection(context, community).build(); + Item item = ItemBuilder.createItem(context, collection).withRelationshipType(publication.getLabel()).build(); + context.restoreAuthSystemState(); + + ExportEventProcessor exportEventProcessor = new ItemEventProcessor(context, request, item); + + boolean result = exportEventProcessor.shouldProcessItem(item); + assertTrue(result); + + } + + + @Test + /** + * Test the ShouldProcessEntityType method where all conditions are met + */ + public void testShouldProcessEntityType() throws SQLException { + context.turnOffAuthorisationSystem(); + Community community = CommunityBuilder.createCommunity(context).build(); + Collection collection = CollectionBuilder.createCollection(context, community).build(); + Item item = ItemBuilder.createItem(context, collection).withRelationshipType(publication.getLabel()).build(); + context.restoreAuthSystemState(); + + ExportEventProcessor exportEventProcessor = new ItemEventProcessor(context, request, item); + + boolean result = exportEventProcessor.shouldProcessEntityType(item); + + assertTrue(result); + } + + @Test + /** + * Test the ShouldProcessEntityType method where the item entity type is not present in the configured list + */ + public void testShouldProcessEntityTypeWhenNotInList() throws SQLException { + context.turnOffAuthorisationSystem(); + Community community = CommunityBuilder.createCommunity(context).build(); + Collection collection = CollectionBuilder.createCollection(context, community).build(); + Item item = ItemBuilder.createItem(context, collection).withRelationshipType(otherEntity.getLabel()).build(); + context.restoreAuthSystemState(); + + ExportEventProcessor exportEventProcessor = new ItemEventProcessor(context, request, item); + + boolean result = exportEventProcessor.shouldProcessEntityType(item); + + assertFalse(result); + + } + + + @Test + /** + * Test the shouldProcessItemType method where the item type is present in the list of excluded types + */ + public void testShouldProcessItemTypeInExcludeTrackerTypeList() { + context.turnOffAuthorisationSystem(); + Community community = CommunityBuilder.createCommunity(context).build(); + Collection collection = CollectionBuilder.createCollection(context, community).build(); + Item item = ItemBuilder.createItem(context, collection).withType(excluded_type).build(); + context.restoreAuthSystemState(); + + ExportEventProcessor exportEventProcessor = new ItemEventProcessor(context, request, item); + + boolean result = exportEventProcessor.shouldProcessItemType(item); + assertFalse(result); + + } + + @Test + /** + * Test the shouldProcessItemType method where the item type is not present in the list of excluded types + */ + public void testShouldProcessItemTypeNotInExcludeTrackerTypeList() { + context.turnOffAuthorisationSystem(); + Community community = CommunityBuilder.createCommunity(context).build(); + Collection collection = CollectionBuilder.createCollection(context, community).build(); + Item item = ItemBuilder.createItem(context, collection).withType("Not excluded type").build(); + context.restoreAuthSystemState(); + + ExportEventProcessor exportEventProcessor = new ItemEventProcessor(context, request, item); + + boolean result = exportEventProcessor.shouldProcessItemType(item); + assertTrue(result); + + } + +} diff --git a/dspace-api/src/test/java/org/dspace/statistics/export/processor/ItemEventProcessorTest.java b/dspace-api/src/test/java/org/dspace/statistics/export/processor/ItemEventProcessorTest.java new file mode 100644 index 0000000000..ded4546f26 --- /dev/null +++ b/dspace-api/src/test/java/org/dspace/statistics/export/processor/ItemEventProcessorTest.java @@ -0,0 +1,76 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.statistics.export.processor; + +import static org.hamcrest.core.Is.is; +import static org.junit.Assert.assertThat; + +import java.io.UnsupportedEncodingException; +import java.net.URLEncoder; + +import org.apache.commons.codec.CharEncoding; +import org.dspace.AbstractIntegrationTestWithDatabase; +import org.dspace.builder.CollectionBuilder; +import org.dspace.builder.CommunityBuilder; +import org.dspace.builder.ItemBuilder; +import org.dspace.content.Collection; +import org.dspace.content.Community; +import org.dspace.content.Item; +import org.dspace.services.ConfigurationService; +import org.dspace.services.factory.DSpaceServicesFactory; +import org.junit.Before; +import org.junit.Test; + +/** + * Test class for the ItemEventProcessor + */ +public class ItemEventProcessorTest extends AbstractIntegrationTestWithDatabase { + + + private ConfigurationService configurationService = DSpaceServicesFactory.getInstance().getConfigurationService(); + + private String encodedUrl; + + @Before + public void setUp() throws Exception { + super.setUp(); + configurationService.setProperty("irus.statistics.tracker.enabled", true); + + String dspaceUrl = configurationService.getProperty("dspace.ui.url"); + try { + encodedUrl = URLEncoder.encode(dspaceUrl, CharEncoding.UTF_8); + } catch (UnsupportedEncodingException e) { + throw new AssertionError("Error occurred in setup()", e); + } + + } + + @Test + /** + * Test the method that adds data based on the object types + */ + public void testAddObectSpecificData() throws UnsupportedEncodingException { + context.turnOffAuthorisationSystem(); + Community community = CommunityBuilder.createCommunity(context).build(); + Collection collection = CollectionBuilder.createCollection(context, community).build(); + Item item = ItemBuilder.createItem(context, collection).build(); + context.restoreAuthSystemState(); + + String encodedHandle = URLEncoder.encode(item.getHandle(), CharEncoding.UTF_8); + + ItemEventProcessor itemEventProcessor = new ItemEventProcessor(context, null, item); + String result = itemEventProcessor.addObjectSpecificData("existing-string", item); + + assertThat(result, + is("existing-string&svc_dat=" + encodedUrl + "%2Fhandle%2F" + encodedHandle + + "&rft_dat=Investigation")); + + } + + +} diff --git a/dspace-api/src/test/java/org/dspace/statistics/export/service/MockOpenUrlServiceImpl.java b/dspace-api/src/test/java/org/dspace/statistics/export/service/MockOpenUrlServiceImpl.java new file mode 100644 index 0000000000..14ac9d36d5 --- /dev/null +++ b/dspace-api/src/test/java/org/dspace/statistics/export/service/MockOpenUrlServiceImpl.java @@ -0,0 +1,41 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.statistics.export.service; + +import java.io.IOException; +import java.net.HttpURLConnection; +import java.util.ArrayList; + +import org.apache.commons.lang.StringUtils; +import org.springframework.beans.factory.annotation.Autowired; + +/** + * Mock OpenUrlService that will ensure that IRUS tracker does need to be contacted in order to test the functionality + */ +public class MockOpenUrlServiceImpl extends OpenUrlServiceImpl { + + @Autowired + ArrayList testProcessedUrls; + + /** + * Returns a response code to simulate contact to the external url + * When the url contains "fail", a fail code 500 will be returned + * Otherwise the success code 200 will be returned + * @param urlStr + * @return 200 or 500 depending on whether the "fail" keyword is present in the url + * @throws IOException + */ + protected int getResponseCodeFromUrl(final String urlStr) throws IOException { + if (StringUtils.contains(urlStr, "fail")) { + return HttpURLConnection.HTTP_INTERNAL_ERROR; + } else { + testProcessedUrls.add(urlStr); + return HttpURLConnection.HTTP_OK; + } + } +} diff --git a/dspace-api/src/test/java/org/dspace/statistics/export/service/OpenUrlServiceImplTest.java b/dspace-api/src/test/java/org/dspace/statistics/export/service/OpenUrlServiceImplTest.java new file mode 100644 index 0000000000..192b771458 --- /dev/null +++ b/dspace-api/src/test/java/org/dspace/statistics/export/service/OpenUrlServiceImplTest.java @@ -0,0 +1,134 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.statistics.export.service; + +import static org.hamcrest.CoreMatchers.is; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.anyString; +import static org.mockito.Mockito.doCallRealMethod; +import static org.mockito.Mockito.doNothing; +import static org.mockito.Mockito.doReturn; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; + +import java.io.IOException; +import java.net.HttpURLConnection; +import java.sql.SQLException; +import java.util.ArrayList; +import java.util.List; + +import org.dspace.core.Context; +import org.dspace.statistics.export.OpenURLTracker; +import org.junit.Assert; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.mockito.InjectMocks; +import org.mockito.Mock; +import org.mockito.Spy; +import org.mockito.junit.MockitoJUnitRunner; + +/** + * Test class for the OpenUrlServiceImpl + */ +@RunWith(MockitoJUnitRunner.class) +public class OpenUrlServiceImplTest { + + @InjectMocks + @Spy + private OpenUrlServiceImpl openUrlService; + + @Mock + private FailedOpenURLTrackerService failedOpenURLTrackerService; + + /** + * Test the processUrl method + * @throws IOException + * @throws SQLException + */ + @Test + public void testProcessUrl() throws IOException, SQLException { + Context context = mock(Context.class); + + doReturn(HttpURLConnection.HTTP_OK).when(openUrlService) + .getResponseCodeFromUrl(anyString()); + openUrlService.processUrl(context, "test-url"); + + verify(openUrlService, times(0)).logfailed(context, "test-url"); + + + } + + /** + * Test the processUrl method when the url connection fails + * @throws IOException + * @throws SQLException + */ + @Test + public void testProcessUrlOnFail() throws IOException, SQLException { + Context context = mock(Context.class); + + doReturn(HttpURLConnection.HTTP_INTERNAL_ERROR).when(openUrlService) + .getResponseCodeFromUrl(anyString()); + doNothing().when(openUrlService).logfailed(any(Context.class), anyString()); + + openUrlService.processUrl(context, "test-url"); + + verify(openUrlService, times(1)).logfailed(context, "test-url"); + + + } + + /** + * Test the ReprocessFailedQueue method + * @throws SQLException + */ + @Test + public void testReprocessFailedQueue() throws SQLException { + Context context = mock(Context.class); + + List trackers = new ArrayList<>(); + OpenURLTracker tracker1 = mock(OpenURLTracker.class); + OpenURLTracker tracker2 = mock(OpenURLTracker.class); + OpenURLTracker tracker3 = mock(OpenURLTracker.class); + + trackers.add(tracker1); + trackers.add(tracker2); + trackers.add(tracker3); + + when(failedOpenURLTrackerService.findAll(any(Context.class))).thenReturn(trackers); + doNothing().when(openUrlService).tryReprocessFailed(any(Context.class), any(OpenURLTracker.class)); + + openUrlService.reprocessFailedQueue(context); + + verify(openUrlService, times(3)).tryReprocessFailed(any(Context.class), any(OpenURLTracker.class)); + + } + + /** + * Test the method that logs the failed urls in the db + * @throws SQLException + */ + @Test + public void testLogfailed() throws SQLException { + Context context = mock(Context.class); + OpenURLTracker tracker1 = mock(OpenURLTracker.class); + + doCallRealMethod().when(tracker1).setUrl(anyString()); + when(tracker1.getUrl()).thenCallRealMethod(); + + when(failedOpenURLTrackerService.create(any(Context.class))).thenReturn(tracker1); + + String failedUrl = "failed-url"; + openUrlService.logfailed(context, failedUrl); + + Assert.assertThat(tracker1.getUrl(), is(failedUrl)); + + } +} diff --git a/dspace-api/src/test/java/org/dspace/xmlworkflow/RoleTest.java b/dspace-api/src/test/java/org/dspace/xmlworkflow/RoleTest.java index 262804f12b..f000f8f4d9 100644 --- a/dspace-api/src/test/java/org/dspace/xmlworkflow/RoleTest.java +++ b/dspace-api/src/test/java/org/dspace/xmlworkflow/RoleTest.java @@ -30,7 +30,7 @@ public class RoleTest extends AbstractUnitTest { @Test public void defaultWorkflow_RoleReviewer() { - Role role = defaultWorkflow.getRoles().get("Reviewer"); + Role role = defaultWorkflow.getRoles().get("reviewer"); assertEquals("The people responsible for this step are able to edit the metadata of incoming submissions, " + "and then accept or reject them.", role.getDescription()); assertEquals("Reviewer", role.getName()); @@ -39,7 +39,7 @@ public class RoleTest extends AbstractUnitTest { @Test public void defaultWorkflow_RoleEditor() { - Role role = defaultWorkflow.getRoles().get("Editor"); + Role role = defaultWorkflow.getRoles().get("editor"); assertEquals("The people responsible for this step are able to edit the " + "metadata of incoming submissions, and then accept or reject them.", role.getDescription()); assertEquals("Editor", role.getName()); @@ -48,7 +48,7 @@ public class RoleTest extends AbstractUnitTest { @Test public void defaultWorkflow_RoleFinalEditor() { - Role role = defaultWorkflow.getRoles().get("Final Editor"); + Role role = defaultWorkflow.getRoles().get("finaleditor"); assertEquals("The people responsible for this step are able to edit the " + "metadata of incoming submissions, but will not be able to reject them.", role.getDescription()); assertEquals("Final Editor", role.getName()); @@ -57,21 +57,21 @@ public class RoleTest extends AbstractUnitTest { @Test public void selectSingleReviewer_RoleReviewManagers() { - Role role = selectSingleReviewer.getRoles().get("ReviewManagers"); + Role role = selectSingleReviewer.getRoles().get("reviewmanagers"); assertEquals("ReviewManagers", role.getName()); assertEquals(Role.Scope.REPOSITORY, role.getScope()); } @Test public void selectSingleReviewer_RoleReviewer() { - Role role = selectSingleReviewer.getRoles().get("Reviewer"); + Role role = selectSingleReviewer.getRoles().get("scoreassignedreviewer"); assertEquals("Reviewer", role.getName()); assertEquals(Role.Scope.ITEM, role.getScope()); } @Test public void scoreReview_RoleScoreReviewers() { - Role role = scoreReview.getRoles().get("ScoreReviewers"); + Role role = scoreReview.getRoles().get("scorereviewers"); assertEquals("ScoreReviewers", role.getName()); assertEquals(Role.Scope.COLLECTION, role.getScope()); } diff --git a/dspace-api/src/test/java/org/dspace/xmlworkflow/XmlWorkflowFactoryTest.java b/dspace-api/src/test/java/org/dspace/xmlworkflow/XmlWorkflowFactoryTest.java index a19e6a2622..03a6a0e949 100644 --- a/dspace-api/src/test/java/org/dspace/xmlworkflow/XmlWorkflowFactoryTest.java +++ b/dspace-api/src/test/java/org/dspace/xmlworkflow/XmlWorkflowFactoryTest.java @@ -10,8 +10,10 @@ package org.dspace.xmlworkflow; import static junit.framework.TestCase.assertEquals; import static org.junit.Assert.fail; +import java.io.IOException; import java.sql.SQLException; +import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.dspace.AbstractUnitTest; import org.dspace.authorize.AuthorizeException; @@ -35,9 +37,11 @@ import org.junit.Test; */ public class XmlWorkflowFactoryTest extends AbstractUnitTest { - private CollectionService collectionService = ContentServiceFactory.getInstance().getCollectionService(); - private CommunityService communityService = ContentServiceFactory.getInstance().getCommunityService(); - private XmlWorkflowFactory xmlWorkflowFactory + private final CollectionService collectionService + = ContentServiceFactory.getInstance().getCollectionService(); + private final CommunityService communityService + = ContentServiceFactory.getInstance().getCommunityService(); + private final XmlWorkflowFactory xmlWorkflowFactory = new DSpace().getServiceManager().getServiceByName("xmlWorkflowFactory", XmlWorkflowFactoryImpl.class); private Community owningCommunity; @@ -47,7 +51,7 @@ public class XmlWorkflowFactoryTest extends AbstractUnitTest { /** * log4j category */ - private static final Logger log = org.apache.logging.log4j.LogManager.getLogger(XmlWorkflowFactoryTest.class); + private static final Logger log = LogManager.getLogger(XmlWorkflowFactoryTest.class); /** * This method will be run before every test as per @Before. It will @@ -94,7 +98,7 @@ public class XmlWorkflowFactoryTest extends AbstractUnitTest { this.collectionService.delete(context, this.nonMappedCollection); this.collectionService.delete(context, this.mappedCollection); this.communityService.delete(context, this.owningCommunity); - } catch (Exception e) { + } catch (IOException | SQLException | AuthorizeException e) { log.error("Error in destroy", e); } @@ -112,12 +116,12 @@ public class XmlWorkflowFactoryTest extends AbstractUnitTest { @Test public void workflowMapping_NonMappedCollection() throws WorkflowConfigurationException { Workflow workflow = xmlWorkflowFactory.getWorkflow(this.nonMappedCollection); - assertEquals(workflow.getID(), "defaultWorkflow"); + assertEquals(XmlWorkflowFactoryImpl.LEGACY_WORKFLOW_NAME, workflow.getID()); } @Test public void workflowMapping_MappedCollection() throws WorkflowConfigurationException { Workflow workflow = xmlWorkflowFactory.getWorkflow(this.mappedCollection); - assertEquals(workflow.getID(), "selectSingleReviewer"); + assertEquals( "selectSingleReviewer", workflow.getID()); } } diff --git a/dspace-api/src/test/resources/test-config.properties b/dspace-api/src/test/resources/test-config.properties index 49aaa9bb10..66a29ab9a0 100644 --- a/dspace-api/src/test/resources/test-config.properties +++ b/dspace-api/src/test/resources/test-config.properties @@ -11,3 +11,5 @@ test.folder = ./target/testing/ # Path of the test bitstream (to use in BitstreamTest and elsewhere) test.bitstream = ./target/testing/dspace/assetstore/ConstitutionofIreland.pdf +test.exportcsv = ./target/testing/dspace/assetstore/test.csv +test.importcsv = ./target/testing/dspace/assetstore/testImport.csv diff --git a/dspace-oai/pom.xml b/dspace-oai/pom.xml index 4caec1c3b2..05ee3dc7df 100644 --- a/dspace-oai/pom.xml +++ b/dspace-oai/pom.xml @@ -8,7 +8,7 @@ dspace-parent org.dspace - 7.0-SNAPSHOT + 7.0-beta5-SNAPSHOT .. @@ -142,19 +142,22 @@ org.springframework.boot spring-boot-starter-web - + - org.ow2.asm - asm - - - - org.springframework.boot - spring-boot-configuration-processor + org.parboiled + parboiled-java + + + org.parboiled + parboiled-java + 1.3.1 + + org.dspace @@ -190,10 +193,10 @@ org.apache.logging.log4j log4j-core - + org.apache.logging.log4j log4j-web - + org.apache.logging.log4j log4j-slf4j-impl @@ -258,12 +261,6 @@ ${spring.version} test - - org.parboiled - parboiled-core - 1.1.7 - test - org.xmlmatchers xml-matchers diff --git a/dspace-rdf/pom.xml b/dspace-rdf/pom.xml index 03959f9cdb..c483ea5a91 100644 --- a/dspace-rdf/pom.xml +++ b/dspace-rdf/pom.xml @@ -9,7 +9,7 @@ org.dspace dspace-parent - 7.0-SNAPSHOT + 7.0-beta5-SNAPSHOT .. diff --git a/dspace-rdf/src/main/java/org/dspace/rdf/providing/LocalURIRedirectionServlet.java b/dspace-rdf/src/main/java/org/dspace/rdf/providing/LocalURIRedirectionServlet.java index b6a6854938..7224bb9bfb 100644 --- a/dspace-rdf/src/main/java/org/dspace/rdf/providing/LocalURIRedirectionServlet.java +++ b/dspace-rdf/src/main/java/org/dspace/rdf/providing/LocalURIRedirectionServlet.java @@ -86,7 +86,8 @@ public class LocalURIRedirectionServlet extends HttpServlet { response.sendError(HttpServletResponse.SC_NOT_FOUND); return; } - + // use object's reported handle for redirect (just in case user provided handle had odd characters) + handle = dso.getHandle(); // close the context and send forward. context.abort(); Negotiator.sendRedirect(response, handle, "", requestedMimeType, true); diff --git a/dspace-rest/pom.xml b/dspace-rest/pom.xml index df22263136..be80c8c159 100644 --- a/dspace-rest/pom.xml +++ b/dspace-rest/pom.xml @@ -3,7 +3,7 @@ org.dspace dspace-rest war - 7.0-SNAPSHOT + 7.0-beta5-SNAPSHOT DSpace (Deprecated) REST Webapp DSpace RESTful Web Services API. NOTE: this REST API is DEPRECATED. Please consider using the REST API in the dspace-server-webapp instead! @@ -12,14 +12,14 @@ org.dspace dspace-parent - 7.0-SNAPSHOT + 7.0-beta5-SNAPSHOT .. ${basedir}/.. - 5.1.3.RELEASE + 5.3.3.RELEASE @@ -104,7 +104,7 @@ ${jackson.version} - + org.springframework spring-core @@ -123,9 +123,10 @@ org.glassfish.jersey.ext - jersey-spring4 + jersey-spring5 ${jersey.version} + org.springframework spring @@ -150,6 +151,11 @@ org.springframework spring-aop + + + jakarta.annotation + jakarta.annotation-api + diff --git a/dspace-rest/src/main/java/org/dspace/rest/CollectionsResource.java b/dspace-rest/src/main/java/org/dspace/rest/CollectionsResource.java index af06792b7b..66919ad5c7 100644 --- a/dspace-rest/src/main/java/org/dspace/rest/CollectionsResource.java +++ b/dspace-rest/src/main/java/org/dspace/rest/CollectionsResource.java @@ -274,16 +274,16 @@ public class CollectionsResource extends Resource { headers, request, context); items = new ArrayList(); - Iterator dspaceItems = itemService.findByCollection(context, dspaceCollection); - for (int i = 0; (dspaceItems.hasNext()) && (i < (limit + offset)); i++) { + Iterator dspaceItems = itemService.findByCollection(context, dspaceCollection, + limit, offset); + + while (dspaceItems.hasNext()) { org.dspace.content.Item dspaceItem = dspaceItems.next(); - if (i >= offset) { - if (itemService.isItemListedForUser(context, dspaceItem)) { - items.add(new Item(dspaceItem, servletContext, expand, context)); - writeStats(dspaceItem, UsageEvent.Action.VIEW, user_ip, user_agent, xforwardedfor, - headers, request, context); - } + if (itemService.isItemListedForUser(context, dspaceItem)) { + items.add(new Item(dspaceItem, servletContext, expand, context)); + writeStats(dspaceItem, UsageEvent.Action.VIEW, user_ip, user_agent, xforwardedfor, + headers, request, context); } } diff --git a/dspace-rest/src/main/webapp/WEB-INF/applicationContext.xml b/dspace-rest/src/main/webapp/WEB-INF/applicationContext.xml index 62b660b86b..ec892fbaa4 100644 --- a/dspace-rest/src/main/webapp/WEB-INF/applicationContext.xml +++ b/dspace-rest/src/main/webapp/WEB-INF/applicationContext.xml @@ -28,7 +28,7 @@ org.dspace.app.rest.Application - - 7.9 - - - - test-environment - - false - - maven.test.skip - false - - - - - - - maven-dependency-plugin - - ${project.build.directory}/testing - - - org.dspace - dspace-parent - ${project.version} - zip - testEnvironment - - - - - - setupTestEnvironment - generate-test-resources - - unpack - - - - setupIntegrationTestEnvironment - pre-integration-test - - unpack - - - - - - - - org.codehaus.gmaven - groovy-maven-plugin - - - setproperty - generate-test-resources - - - execute - - - - project.properties['agnostic.build.dir'] = project.build.directory.replace(File.separator, '/'); - println("Initializing Maven property 'agnostic.build.dir' to: " + project.properties['agnostic.build.dir']); - - - - - - - - - maven-surefire-plugin - - - - - ${agnostic.build.dir}/testing/dspace/ - - true - ${agnostic.build.dir}/testing/dspace/solr/ - - - - - - - maven-failsafe-plugin - - - - ${agnostic.build.dir}/testing/dspace/ - - true - ${agnostic.build.dir}/testing/dspace/solr/ - - - - - - - - - @@ -174,9 +57,175 @@ + + + com.mycila + license-maven-plugin + + + **/src/test/resources/** + **/src/test/data/** + + src/main/webapp/index.html + src/main/webapp/login.html + src/main/webapp/js/hal/** + + + + + + org.codehaus.gmaven + groovy-maven-plugin + + + setproperty + initialize + + execute + + + + project.properties['agnostic.build.dir'] = project.build.directory.replace(File.separator, '/'); + log.info("Initializing Maven property 'agnostic.build.dir' to: {}", project.properties['agnostic.build.dir']); + + + + + + + + + unit-test-environment + + false + + skipUnitTests + false + + + + + + + maven-dependency-plugin + + ${project.build.directory}/testing + + + org.dspace + dspace-parent + ${project.version} + zip + testEnvironment + + + + + + setupUnitTestEnvironment + generate-test-resources + + unpack + + + + + + + + maven-surefire-plugin + + + + + ${agnostic.build.dir}/testing/dspace/ + + true + ${agnostic.build.dir}/testing/dspace/solr/ + + + + + + + + + + + integration-test-environment + + false + + skipIntegrationTests + false + + + + + + + maven-dependency-plugin + + ${project.build.directory}/testing + + + org.dspace + dspace-parent + ${project.version} + zip + testEnvironment + + + + + + setupIntegrationTestEnvironment + pre-integration-test + + unpack + + + + + + + + maven-failsafe-plugin + + + + ${agnostic.build.dir}/testing/dspace/ + + true + ${agnostic.build.dir}/testing/dspace/solr/ + + + + + + + + + + @@ -241,13 +290,26 @@ org.springframework.data spring-data-rest-hal-browser - 3.1.10.RELEASE - + ${spring-hal-browser.version} + + + + org.webjars.bowergithub.jquery + jquery-dist + 3.5.1 + + + + org.webjars.bowergithub.codeseven + toastr + 2.1.4 + + + org.springframework.boot @@ -255,10 +317,8 @@ ${spring-boot.version} - - + + org.springframework.boot spring-boot-starter @@ -283,6 +343,13 @@ dspace-api + + org.dspace + dspace-api + test-jar + test + + org.dspace dspace-services @@ -346,6 +413,7 @@ com.jayway.jsonpath json-path-assert + ${json-path.version} test @@ -435,6 +503,14 @@ solr-cell test + + org.bouncycastle + bcpkix-jdk15on + + + org.bouncycastle + bcprov-jdk15on + org.eclipse.jetty jetty-continuation @@ -499,13 +575,11 @@ org.apache.lucene lucene-analyzers-smartcn - ${solr.client.version} test org.apache.lucene lucene-analyzers-stempel - ${solr.client.version} test diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/Application.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/Application.java index 900324fc72..a2ea0d1c3c 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/Application.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/Application.java @@ -7,15 +7,18 @@ */ package org.dspace.app.rest; +import java.io.IOException; +import java.sql.SQLException; import java.util.List; import javax.servlet.Filter; import org.dspace.app.rest.filter.DSpaceRequestContextFilter; -import org.dspace.app.rest.model.hateoas.DSpaceRelProvider; +import org.dspace.app.rest.model.hateoas.DSpaceLinkRelationProvider; import org.dspace.app.rest.parameter.resolver.SearchFilterResolver; import org.dspace.app.rest.utils.ApplicationConfig; import org.dspace.app.rest.utils.DSpaceConfigurationInitializer; import org.dspace.app.rest.utils.DSpaceKernelInitializer; +import org.dspace.app.sitemap.GenerateSitemaps; import org.dspace.app.util.DSpaceContextListener; import org.dspace.utils.servlet.DSpaceWebappServletFilter; import org.slf4j.Logger; @@ -26,12 +29,15 @@ import org.springframework.boot.builder.SpringApplicationBuilder; import org.springframework.boot.web.servlet.support.SpringBootServletInitializer; import org.springframework.context.annotation.Bean; import org.springframework.core.annotation.Order; -import org.springframework.hateoas.RelProvider; +import org.springframework.hateoas.server.LinkRelationProvider; import org.springframework.lang.NonNull; +import org.springframework.scheduling.annotation.EnableScheduling; +import org.springframework.scheduling.annotation.Scheduled; import org.springframework.web.context.request.RequestContextListener; import org.springframework.web.cors.CorsConfiguration; import org.springframework.web.method.support.HandlerMethodArgumentResolver; import org.springframework.web.servlet.config.annotation.CorsRegistry; +import org.springframework.web.servlet.config.annotation.ResourceHandlerRegistry; import org.springframework.web.servlet.config.annotation.WebMvcConfigurer; /** @@ -48,6 +54,7 @@ import org.springframework.web.servlet.config.annotation.WebMvcConfigurer; * @author Tim Donohue */ @SpringBootApplication +@EnableScheduling public class Application extends SpringBootServletInitializer { private static final Logger log = LoggerFactory.getLogger(Application.class); @@ -55,6 +62,11 @@ public class Application extends SpringBootServletInitializer { @Autowired private ApplicationConfig configuration; + @Scheduled(cron = "${sitemap.cron:-}") + public void generateSitemap() throws IOException, SQLException { + GenerateSitemaps.generateSitemapsScheduled(); + } + /** * Override the default SpringBootServletInitializer.configure() method, * passing it this Application class. @@ -118,26 +130,49 @@ public class Application extends SpringBootServletInitializer { } @Bean - protected RelProvider dspaceRelProvider() { - return new DSpaceRelProvider(); + protected LinkRelationProvider dspaceLinkRelationProvider() { + return new DSpaceLinkRelationProvider(); } @Bean public WebMvcConfigurer webMvcConfigurer() { return new WebMvcConfigurer() { + /** + * Create a custom CORS mapping for the DSpace REST API (/api/ paths), based on configured allowed origins. + * @param registry CorsRegistry + */ @Override public void addCorsMappings(@NonNull CorsRegistry registry) { String[] corsAllowedOrigins = configuration.getCorsAllowedOrigins(); + boolean corsAllowCredentials = configuration.getCorsAllowCredentials(); if (corsAllowedOrigins != null) { registry.addMapping("/api/**").allowedMethods(CorsConfiguration.ALL) - .allowedOrigins(corsAllowedOrigins).allowedHeaders("Authorization", "Content-Type", - "X-Requested-With", "accept", "Origin", "Access-Control-Request-Method", - "Access-Control-Request-Headers") - .exposedHeaders("Access-Control-Allow-Origin", "Authorization"); + // Set Access-Control-Allow-Credentials to "true" and specify which origins are valid + // for our Access-Control-Allow-Origin header + .allowCredentials(corsAllowCredentials).allowedOrigins(corsAllowedOrigins) + // Allow list of request preflight headers allowed to be sent to us from the client + .allowedHeaders("Authorization", "Content-Type", "X-Requested-With", "accept", "Origin", + "Access-Control-Request-Method", "Access-Control-Request-Headers", + "X-On-Behalf-Of") + // Allow list of response headers allowed to be sent by us (the server) + .exposedHeaders("Access-Control-Allow-Origin", "Access-Control-Allow-Credentials", + "Authorization"); } } + /** + * Add a new ResourceHandler to allow us to use WebJars.org to pull in web dependencies + * dynamically for HAL Browser, and access them off the /webjars path. + * @param registry ResourceHandlerRegistry + */ + @Override + public void addResourceHandlers(ResourceHandlerRegistry registry) { + registry + .addResourceHandler("/webjars/**") + .addResourceLocations("/webjars/"); + } + @Override public void addArgumentResolvers(@NonNull List argumentResolvers) { argumentResolvers.add(new SearchFilterResolver()); diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/AuthenticationRestController.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/AuthenticationRestController.java index 68f9085e21..3038011009 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/AuthenticationRestController.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/AuthenticationRestController.java @@ -16,10 +16,13 @@ import org.dspace.app.rest.converter.ConverterService; import org.dspace.app.rest.converter.EPersonConverter; import org.dspace.app.rest.link.HalLinkService; import org.dspace.app.rest.model.AuthenticationStatusRest; +import org.dspace.app.rest.model.AuthenticationTokenRest; import org.dspace.app.rest.model.AuthnRest; import org.dspace.app.rest.model.EPersonRest; import org.dspace.app.rest.model.hateoas.AuthenticationStatusResource; +import org.dspace.app.rest.model.hateoas.AuthenticationTokenResource; import org.dspace.app.rest.model.hateoas.AuthnResource; +import org.dspace.app.rest.model.wrapper.AuthenticationToken; import org.dspace.app.rest.projection.Projection; import org.dspace.app.rest.security.RestAuthenticationService; import org.dspace.app.rest.utils.ContextUtil; @@ -32,6 +35,7 @@ import org.springframework.beans.factory.annotation.Autowired; import org.springframework.hateoas.Link; import org.springframework.http.HttpStatus; import org.springframework.http.ResponseEntity; +import org.springframework.security.access.prepost.PreAuthorize; import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RequestMethod; import org.springframework.web.bind.annotation.RequestParam; @@ -118,6 +122,30 @@ public class AuthenticationRestController implements InitializingBean { "valid."); } + /** + * This method will generate a short lived token to be used for bitstream downloads among other things. + * + * curl -v -X POST https://{dspace-server.url}/api/authn/shortlivedtokens -H "Authorization: Bearer eyJhbG...COdbo" + * + * Example: + *

    +     * {@code
    +     * curl -v -X POST https://{dspace-server.url}/api/authn/shortlivedtokens -H "Authorization: Bearer eyJhbG...COdbo"
    +     * }
    +     * 
    + * @param request The StandardMultipartHttpServletRequest + * @return The created short lived token + */ + @PreAuthorize("hasAuthority('AUTHENTICATED')") + @RequestMapping(value = "/shortlivedtokens", method = RequestMethod.POST) + public AuthenticationTokenResource shortLivedToken(HttpServletRequest request) { + Projection projection = utils.obtainProjection(); + AuthenticationToken shortLivedToken = + restAuthenticationService.getShortLivedAuthenticationToken(ContextUtil.obtainContext(request), request); + AuthenticationTokenRest authenticationTokenRest = converter.toRest(shortLivedToken, projection); + return converter.toResource(authenticationTokenRest); + } + @RequestMapping(value = "/login", method = { RequestMethod.GET, RequestMethod.PUT, RequestMethod.PATCH, RequestMethod.DELETE }) public ResponseEntity login() { diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/BundleUploadBitstreamController.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/BundleUploadBitstreamController.java index 7035e329f2..a0f5d5f71e 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/BundleUploadBitstreamController.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/BundleUploadBitstreamController.java @@ -31,7 +31,7 @@ import org.dspace.core.Context; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.data.rest.webmvc.ControllerUtils; import org.springframework.data.rest.webmvc.ResourceNotFoundException; -import org.springframework.hateoas.ResourceSupport; +import org.springframework.hateoas.RepresentationModel; import org.springframework.http.HttpHeaders; import org.springframework.http.HttpStatus; import org.springframework.http.ResponseEntity; @@ -86,9 +86,11 @@ public class BundleUploadBitstreamController { */ @RequestMapping(method = RequestMethod.POST, headers = "content-type=multipart/form-data") @PreAuthorize("hasPermission(#uuid, 'BUNDLE', 'ADD') && hasPermission(#uuid, 'BUNDLE', 'WRITE')") - public ResponseEntity uploadBitstream(HttpServletRequest request, @PathVariable UUID uuid, - @RequestParam("file") MultipartFile uploadfile, - @RequestParam(value = "properties", required = false) String properties) { + public ResponseEntity> uploadBitstream( + HttpServletRequest request, + @PathVariable UUID uuid, + @RequestParam("file") MultipartFile uploadfile, + @RequestParam(value = "properties", required = false) String properties) { Context context = ContextUtil.obtainContext(request); Bundle bundle = null; diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/CollectionGroupRestController.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/CollectionGroupRestController.java new file mode 100644 index 0000000000..d85685a188 --- /dev/null +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/CollectionGroupRestController.java @@ -0,0 +1,470 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.rest; + +import static org.dspace.app.rest.utils.RegexUtils.REGEX_REQUESTMAPPING_IDENTIFIER_AS_UUID; + +import java.io.IOException; +import java.sql.SQLException; +import java.util.List; +import java.util.UUID; +import javax.servlet.http.HttpServletRequest; +import javax.servlet.http.HttpServletResponse; + +import org.apache.commons.lang3.StringUtils; +import org.dspace.app.rest.converter.ConverterService; +import org.dspace.app.rest.exception.UnprocessableEntityException; +import org.dspace.app.rest.model.GroupRest; +import org.dspace.app.rest.model.hateoas.GroupResource; +import org.dspace.app.rest.repository.CollectionRestRepository; +import org.dspace.app.rest.utils.ContextUtil; +import org.dspace.app.util.AuthorizeUtil; +import org.dspace.authorize.AuthorizeException; +import org.dspace.authorize.service.AuthorizeService; +import org.dspace.content.Collection; +import org.dspace.content.service.CollectionService; +import org.dspace.core.Constants; +import org.dspace.core.Context; +import org.dspace.eperson.Group; +import org.dspace.workflow.WorkflowService; +import org.dspace.xmlworkflow.WorkflowUtils; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.data.rest.webmvc.ControllerUtils; +import org.springframework.data.rest.webmvc.ResourceNotFoundException; +import org.springframework.hateoas.RepresentationModel; +import org.springframework.http.HttpHeaders; +import org.springframework.http.HttpStatus; +import org.springframework.http.ResponseEntity; +import org.springframework.security.access.prepost.PreAuthorize; +import org.springframework.web.bind.annotation.PathVariable; +import org.springframework.web.bind.annotation.RequestMapping; +import org.springframework.web.bind.annotation.RequestMethod; +import org.springframework.web.bind.annotation.RestController; + +/** + * This RestController will take care of all the calls for a specific collection's special group + * This is handled by calling "/api/core/collections/{uuid}/{group}" with the correct RequestMethod + * This works for specific WorkflowGroups as well given that their role is supplied by calling + * "/api/core/collections/{uuid}/workflowGroups/{workflowRole}" + */ +@RestController +@RequestMapping("/api/core/collections" + REGEX_REQUESTMAPPING_IDENTIFIER_AS_UUID) +public class CollectionGroupRestController { + + @Autowired + private CollectionService collectionService; + + @Autowired + private CollectionRestRepository collectionRestRepository; + + @Autowired + private ConverterService converterService; + + @Autowired + private AuthorizeService authorizeService; + + @Autowired + private WorkflowService workflowService; + + /** + * This method creates and returns an AdminGroup object for the given collection + * This is called by using RequestMethod.POST on the /adminGroup value + * @param uuid The UUID of the collection for which we'll create an adminGroup + * @param response The current response + * @param request The current request + * @return The created AdminGroup + * @throws SQLException If something goes wrong + * @throws AuthorizeException If something goes wrong + */ + @RequestMapping(method = RequestMethod.POST, value = "/adminGroup") + @PreAuthorize("hasPermission(#uuid, 'COLLECTION', 'WRITE')") + public ResponseEntity> postAdminGroup(@PathVariable UUID uuid, HttpServletResponse response, + HttpServletRequest request) + throws SQLException, AuthorizeException { + + Context context = ContextUtil.obtainContext(request); + Collection collection = collectionService.find(context, uuid); + + if (collection == null) { + throw new ResourceNotFoundException("No such collection: " + uuid); + } + AuthorizeUtil.authorizeManageAdminGroup(context, collection); + if (collection.getAdministrators() != null) { + throw new UnprocessableEntityException("The collection with UUID: " + uuid + " already has " + + "an admin group"); + } + GroupRest adminGroup = collectionRestRepository.createAdminGroup(context, request, collection); + context.complete(); + GroupResource groupResource = converterService.toResource(adminGroup); + return ControllerUtils.toResponseEntity(HttpStatus.CREATED, new HttpHeaders(), groupResource); + } + + /** + * This method takes care of the deletion of an AdminGroup for the given collection + * This is called by using RequestMethod.DELETE on the /adminGroup value + * @param uuid The UUID of the collection for which we'll delete the AdminGroup + * @param response The current response + * @param request The current request + * @return An empty response if the deletion was successful + * @throws SQLException If something goes wrong + * @throws AuthorizeException If something goes wrong + * @throws IOException If something goes wrong + */ + @RequestMapping(method = RequestMethod.DELETE, value = "/adminGroup") + @PreAuthorize("hasPermission(#uuid, 'COLLECTION', 'WRITE')") + public ResponseEntity> deleteAdminGroup(@PathVariable UUID uuid, + HttpServletResponse response, + HttpServletRequest request) + throws SQLException, AuthorizeException, IOException { + + Context context = ContextUtil.obtainContext(request); + Collection collection = collectionService.find(context, uuid); + if (collection == null) { + throw new ResourceNotFoundException("No such collection: " + uuid); + } + + AuthorizeUtil.authorizeManageAdminGroup(context, collection); + if (collection.getAdministrators() == null) { + throw new UnprocessableEntityException("The collection with UUID: " + uuid + " doesn't have an admin " + + "group"); + } + collectionRestRepository.deleteAdminGroup(context, collection); + context.complete(); + return ControllerUtils.toEmptyResponse(HttpStatus.NO_CONTENT); + } + + /** + * This method creates and returns a SubmitterGroup object for the given collection + * This is called by using RequestMethod.POST on the /submittersGroup + * @param uuid The UUID of the collection for which we'll create a submitterGroup + * @param response The current response + * @param request The current request + * @return The created SubmitterGroup + * @throws SQLException If something goes wrong + * @throws AuthorizeException If something goes wrong + */ + @RequestMapping(method = RequestMethod.POST, value = "/submittersGroup") + @PreAuthorize("hasPermission(#uuid, 'COLLECTION', 'WRITE')") + public ResponseEntity> postSubmittersGroup(@PathVariable UUID uuid, + HttpServletResponse response, + HttpServletRequest request) + throws SQLException, AuthorizeException { + + Context context = ContextUtil.obtainContext(request); + Collection collection = collectionService.find(context, uuid); + + if (collection == null) { + throw new ResourceNotFoundException("No such collection: " + uuid); + } + AuthorizeUtil.authorizeManageSubmittersGroup(context, collection); + if (collection.getSubmitters() != null) { + throw new UnprocessableEntityException("The collection with UUID: " + uuid + " already has " + + "a submitter group"); + } + GroupRest submitterGroup = collectionRestRepository.createSubmitterGroup(context, request, collection); + context.complete(); + GroupResource groupResource = converterService.toResource(submitterGroup); + return ControllerUtils.toResponseEntity(HttpStatus.CREATED, new HttpHeaders(), groupResource); + } + + /** + * This method takes care of the deletion of a SubmitterGroup for the given collection + * This is called by using RequestMethod.DELETE on the default url for this class + * @param uuid The UUID of the collection for which we'll delete the SubmittersGroup + * @param response The current response + * @param request The current request + * @return An empty response if the deletion was successful + * @throws SQLException If something goes wrong + * @throws AuthorizeException If something goes wrong + * @throws IOException If something goes wrong + */ + @RequestMapping(method = RequestMethod.DELETE, value = "/submittersGroup") + @PreAuthorize("hasPermission(#uuid, 'COLLECTION', 'WRITE')") + public ResponseEntity> deleteSubmittersGroup(@PathVariable UUID uuid, + HttpServletResponse response, + HttpServletRequest request) + throws SQLException, AuthorizeException, IOException { + + Context context = ContextUtil.obtainContext(request); + Collection collection = collectionService.find(context, uuid); + if (collection == null) { + throw new ResourceNotFoundException("No such collection: " + uuid); + } + AuthorizeUtil.authorizeManageSubmittersGroup(context, collection); + if (collection.getSubmitters() == null) { + throw new UnprocessableEntityException("The collection with UUID: " + uuid + " doesn't have a submitter " + + "group"); + } + collectionRestRepository.deleteSubmitterGroup(context, collection); + context.complete(); + return ControllerUtils.toEmptyResponse(HttpStatus.NO_CONTENT); + } + + /** + * This method creates and returns a ItemReadGroup object for the given collection + * This is called by using RequestMethod.POST on the /itemReadGroup value + * @param uuid The UUID of the collection for which we'll create a ItemReadGroup + * @param response The current response + * @param request The current request + * @return The created ItemReadGroup + * @throws SQLException If something goes wrong + * @throws AuthorizeException If something goes wrong + */ + @RequestMapping(method = RequestMethod.POST, value = "/itemReadGroup") + @PreAuthorize("hasPermission(#uuid, 'COLLECTION', 'WRITE')") + public ResponseEntity> postItemReadGroup(@PathVariable UUID uuid, + HttpServletResponse response, + HttpServletRequest request) + throws SQLException, AuthorizeException { + + Context context = ContextUtil.obtainContext(request); + Collection collection = collectionService.find(context, uuid); + + if (collection == null) { + throw new ResourceNotFoundException("No such collection: " + uuid); + } + AuthorizeUtil.authorizeManageDefaultReadGroup(context, collection); + List itemGroups = authorizeService + .getAuthorizedGroups(context, collection, Constants.DEFAULT_ITEM_READ); + if (itemGroups != null && !itemGroups.isEmpty()) { + Group itemReadGroup = itemGroups.get(0); + if (itemReadGroup != null && !StringUtils.equalsIgnoreCase(itemReadGroup.getName(), Group.ANONYMOUS)) { + throw new UnprocessableEntityException( + "Unable to create a new default read group because either the group already exists or multiple " + + "groups are assigned the default privileges."); + } + } + + GroupRest itemReadGroup = collectionRestRepository.createItemReadGroup(context, request, collection); + context.complete(); + GroupResource groupResource = converterService.toResource(itemReadGroup); + return ControllerUtils.toResponseEntity(HttpStatus.CREATED, new HttpHeaders(), groupResource); + } + + /** + * This method takes care of the deletion of an ItemReadGroup for the given collection + * This is called by using RequestMethod.DELETE on the /itemReadGroup value + * @param uuid The UUID of the collection for which we'll delete the ItemReadGroup + * @param response The current response + * @param request The current request + * @return An empty response if the deletion was successful + * @throws SQLException If something goes wrong + * @throws AuthorizeException If something goes wrong + * @throws IOException If something goes wrong + */ + @RequestMapping(method = RequestMethod.DELETE, value = "/itemReadGroup") + @PreAuthorize("hasPermission(#uuid, 'COLLECTION', 'WRITE')") + public ResponseEntity> deleteItemReadGroup(@PathVariable UUID uuid, + HttpServletResponse response, + HttpServletRequest request) + throws SQLException, AuthorizeException, IOException { + + Context context = ContextUtil.obtainContext(request); + Collection collection = collectionService.find(context, uuid); + if (collection == null) { + throw new ResourceNotFoundException("No such collection: " + uuid); + } + AuthorizeUtil.authorizeManageDefaultReadGroup(context, collection); + List itemGroups = authorizeService.getAuthorizedGroups(context, collection, Constants.DEFAULT_ITEM_READ); + if (itemGroups != null && !itemGroups.isEmpty()) { + Group itemReadGroup = itemGroups.get(0); + if (itemReadGroup == null || StringUtils.equalsIgnoreCase(itemReadGroup.getName(), Group.ANONYMOUS)) { + throw new UnprocessableEntityException( + "Unable to delete the default read group because it's the default"); + } + } else { + throw new UnprocessableEntityException("The collection with UUID: " + uuid + " doesn't have " + + "an ItemReadGroup group"); + + } + collectionRestRepository.deleteItemReadGroup(context, collection); + context.complete(); + return ControllerUtils.toEmptyResponse(HttpStatus.NO_CONTENT); + } + + /** + * This method creates and returns a BitstreamReadGroup object for the given collection + * This is called by using RequestMethod.POST on the /bitstreamReadGroup value + * @param uuid The UUID of the collection for which we'll create a BitstreamReadGroup + * @param response The current response + * @param request The current request + * @return The created BitstreamReadGroup + * @throws SQLException If something goes wrong + * @throws AuthorizeException If something goes wrong + */ + @RequestMapping(method = RequestMethod.POST, value = "/bitstreamReadGroup") + @PreAuthorize("hasPermission(#uuid, 'COLLECTION', 'WRITE')") + public ResponseEntity> postBitstreamReadGroup(@PathVariable UUID uuid, + HttpServletResponse response, + HttpServletRequest request) + throws SQLException, AuthorizeException { + + Context context = ContextUtil.obtainContext(request); + Collection collection = collectionService.find(context, uuid); + + if (collection == null) { + throw new ResourceNotFoundException("No such collection: " + uuid); + } + AuthorizeUtil.authorizeManageDefaultReadGroup(context, collection); + List bitstreamGroups = authorizeService + .getAuthorizedGroups(context, collection, Constants.DEFAULT_BITSTREAM_READ); + if (bitstreamGroups != null && !bitstreamGroups.isEmpty()) { + Group bitstreamGroup = bitstreamGroups.get(0); + if (bitstreamGroup != null && !StringUtils.equalsIgnoreCase(bitstreamGroup.getName(), Group.ANONYMOUS)) { + throw new UnprocessableEntityException( + "Unable to create a new default read group because either the group already exists or multiple " + + "groups are assigned the default privileges."); + } + } + + + GroupRest bitstreamReadGroup = collectionRestRepository.createBitstreamReadGroup(context, request, collection); + context.complete(); + GroupResource groupResource = converterService.toResource(bitstreamReadGroup); + return ControllerUtils.toResponseEntity(HttpStatus.CREATED, new HttpHeaders(), groupResource); + } + + /** + * This method takes care of the deletion of an BitstreamReadGroup for the given collection + * This is called by using RequestMethod.DELETE on the /bitstreamReadGroup value + * @param uuid The UUID of the collection for which we'll delete the bitstreamReadGroup + * @param response The current response + * @param request The current request + * @return An empty response if the deletion was successful + * @throws SQLException If something goes wrong + * @throws AuthorizeException If something goes wrong + * @throws IOException If something goes wrong + */ + @RequestMapping(method = RequestMethod.DELETE, value = "/bitstreamReadGroup") + @PreAuthorize("hasPermission(#uuid, 'COLLECTION', 'WRITE')") + public ResponseEntity> deleteBitstreamReadGroup(@PathVariable UUID uuid, + HttpServletResponse response, + HttpServletRequest request) + throws SQLException, AuthorizeException, IOException { + + Context context = ContextUtil.obtainContext(request); + Collection collection = collectionService.find(context, uuid); + if (collection == null) { + throw new ResourceNotFoundException("No such collection: " + uuid); + } + AuthorizeUtil.authorizeManageDefaultReadGroup(context, collection); + List bitstreamGroups = authorizeService + .getAuthorizedGroups(context, collection, Constants.DEFAULT_BITSTREAM_READ); + if (bitstreamGroups != null && !bitstreamGroups.isEmpty()) { + Group bitstreamReadGroup = bitstreamGroups.get(0); + if (bitstreamReadGroup == null || StringUtils + .equalsIgnoreCase(bitstreamReadGroup.getName(), Group.ANONYMOUS)) { + throw new UnprocessableEntityException( + "Unable to delete the default read group because it's the default"); + } + } else { + throw new UnprocessableEntityException("The collection with UUID: " + uuid + " doesn't have " + + "an BitstreamReadGroup group"); + + } + collectionRestRepository.deleteBitstreamReadGroup(context, collection); + context.complete(); + return ControllerUtils.toEmptyResponse(HttpStatus.NO_CONTENT); + } + + /** + * This method will retrieve the workflowGroup for a given Collection and workflowRole + * @param uuid The UUID of the collection to retrieve + * @param response The current response + * @param request The current request + * @param workflowRole The given workflowRole + * @return The workflowGroup for the given collection and workflowrole + * @throws Exception If something goes wrong + */ + @RequestMapping(method = RequestMethod.GET, value = "/workflowGroups/{workflowRole}") + @PreAuthorize("hasPermission(#uuid, 'COLLECTION', 'READ')") + public ResponseEntity> getWorkflowGroupForRole(@PathVariable UUID uuid, + HttpServletResponse response, + HttpServletRequest request, + @PathVariable String workflowRole) + throws Exception { + Context context = ContextUtil.obtainContext(request); + Collection collection = collectionService.find(context, uuid); + if (collection == null) { + throw new ResourceNotFoundException("No such collection: " + uuid); + } + AuthorizeUtil.authorizeManageWorkflowsGroup(context, collection); + GroupRest groupRest = collectionRestRepository.getWorkflowGroupForRole(context, collection, workflowRole); + if (groupRest == null) { + return ControllerUtils.toEmptyResponse(HttpStatus.NO_CONTENT); + } + GroupResource groupResource = converterService.toResource(groupRest); + return ControllerUtils.toResponseEntity(HttpStatus.OK, new HttpHeaders(), groupResource); + } + + /** + * This method will create the workflowGroup for a given Collection and workflowRole + * @param uuid The UUID of the collection to retrieve + * @param response The current response + * @param request The current request + * @param workflowRole The given workflowRole + * @return The workflowGroup for the given collection and workflowrole + * @throws Exception If something goes wrong + */ + @RequestMapping(method = RequestMethod.POST, value = "/workflowGroups/{workflowRole}") + @PreAuthorize("hasPermission(#uuid, 'COLLECTION', 'READ')") + public ResponseEntity> postWorkflowGroupForRole(@PathVariable UUID uuid, + HttpServletResponse response, + HttpServletRequest request, + @PathVariable String workflowRole) + throws Exception { + Context context = ContextUtil.obtainContext(request); + Collection collection = collectionService.find(context, uuid); + if (collection == null) { + throw new ResourceNotFoundException("No such collection: " + uuid); + } + AuthorizeUtil.authorizeManageWorkflowsGroup(context, collection); + if (WorkflowUtils.getCollectionAndRepositoryRoles(collection).get(workflowRole) == null) { + throw new ResourceNotFoundException("Couldn't find role for: " + workflowRole + + " in the collection with UUID: " + collection.getID()); + } + Group group = workflowService.getWorkflowRoleGroup(context, collection, workflowRole, null); + if (group != null) { + throw new UnprocessableEntityException("WorkflowGroup already exists for the role: " + workflowRole + + " in collection with UUID: " + collection.getID()); + } + GroupRest groupRest = collectionRestRepository + .createWorkflowGroupForRole(context, request, collection, workflowRole); + context.complete(); + GroupResource groupResource = converterService.toResource(groupRest); + return ControllerUtils.toResponseEntity(HttpStatus.CREATED, new HttpHeaders(), groupResource); + + } + + /** + * This method will delete the workflowGroup for a given Collection and workflowRole + * @param uuid The UUID of the collection to retrieve + * @param response The current response + * @param request The current request + * @param workflowRole The given workflowRole + * @return + * @throws Exception If something goes wrong + */ + @RequestMapping(method = RequestMethod.DELETE, value = "/workflowGroups/{workflowRole}") + @PreAuthorize("hasPermission(#uuid, 'COLLECTION', 'READ')") + public ResponseEntity> deleteWorkflowGroupForRole(@PathVariable UUID uuid, + HttpServletResponse response, + HttpServletRequest request, + @PathVariable String workflowRole) + throws Exception { + Context context = ContextUtil.obtainContext(request); + Collection collection = collectionService.find(context, uuid); + if (collection == null) { + throw new ResourceNotFoundException("No such collection: " + uuid); + } + AuthorizeUtil.authorizeManageWorkflowsGroup(context, collection); + collectionRestRepository.deleteWorkflowGroupForRole(context, request, collection, workflowRole); + context.complete(); + return ControllerUtils.toEmptyResponse(HttpStatus.NO_CONTENT); + } +} diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/CollectionItemtemplateController.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/CollectionItemTemplateController.java similarity index 92% rename from dspace-server-webapp/src/main/java/org/dspace/app/rest/CollectionItemtemplateController.java rename to dspace-server-webapp/src/main/java/org/dspace/app/rest/CollectionItemTemplateController.java index 8b87df6b06..6a0890fabc 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/CollectionItemtemplateController.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/CollectionItemTemplateController.java @@ -32,7 +32,7 @@ import org.dspace.core.Context; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.data.rest.webmvc.ControllerUtils; import org.springframework.data.rest.webmvc.ResourceNotFoundException; -import org.springframework.hateoas.ResourceSupport; +import org.springframework.hateoas.RepresentationModel; import org.springframework.http.HttpHeaders; import org.springframework.http.HttpStatus; import org.springframework.http.ResponseEntity; @@ -50,7 +50,7 @@ import org.springframework.web.bind.annotation.RestController; @RestController @RequestMapping("/api/" + CollectionRest.CATEGORY + "/" + CollectionRest.PLURAL_NAME + REGEX_REQUESTMAPPING_IDENTIFIER_AS_UUID + "/itemtemplate") -public class CollectionItemtemplateController { +public class CollectionItemTemplateController { @Autowired private Utils utils; @@ -100,9 +100,9 @@ public class CollectionItemtemplateController { */ @PreAuthorize("hasPermission(#uuid, 'COLLECTION', 'WRITE')") @RequestMapping(method = RequestMethod.POST) - public ResponseEntity createTemplateItem(HttpServletRequest request, - @PathVariable UUID uuid, - @RequestBody(required = false) JsonNode itemBody) + public ResponseEntity> createTemplateItem(HttpServletRequest request, + @PathVariable UUID uuid, + @RequestBody(required = false) JsonNode itemBody) throws SQLException, AuthorizeException { if (itemBody == null) { @@ -125,7 +125,7 @@ public class CollectionItemtemplateController { context.commit(); return ControllerUtils.toResponseEntity(HttpStatus.CREATED, new HttpHeaders(), - converter.toResource(templateItem)); + (RepresentationModel) converter.toResource(templateItem)); } /** diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/CollectionLogoController.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/CollectionLogoController.java index c0b78cdec0..c3243d8887 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/CollectionLogoController.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/CollectionLogoController.java @@ -26,7 +26,7 @@ import org.dspace.core.Context; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.data.rest.webmvc.ControllerUtils; import org.springframework.data.rest.webmvc.ResourceNotFoundException; -import org.springframework.hateoas.ResourceSupport; +import org.springframework.hateoas.RepresentationModel; import org.springframework.http.HttpHeaders; import org.springframework.http.HttpStatus; import org.springframework.http.ResponseEntity; @@ -89,8 +89,10 @@ public class CollectionLogoController { @PreAuthorize("hasPermission(#uuid, 'COLLECTION', 'WRITE')") @RequestMapping(method = RequestMethod.POST, headers = "content-type=multipart/form-data") - public ResponseEntity createLogo(HttpServletRequest request, @PathVariable UUID uuid, - @RequestParam(value = "file", required = false) MultipartFile uploadfile) + public ResponseEntity> createLogo( + HttpServletRequest request, + @PathVariable UUID uuid, + @RequestParam(value = "file", required = false) MultipartFile uploadfile) throws SQLException, IOException, AuthorizeException { if (uploadfile == null) { diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/CommunityAdminGroupRestController.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/CommunityAdminGroupRestController.java new file mode 100644 index 0000000000..2265ac941e --- /dev/null +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/CommunityAdminGroupRestController.java @@ -0,0 +1,129 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.rest; + +import static org.dspace.app.rest.utils.RegexUtils.REGEX_REQUESTMAPPING_IDENTIFIER_AS_UUID; + +import java.io.IOException; +import java.sql.SQLException; +import java.util.UUID; +import javax.servlet.http.HttpServletRequest; +import javax.servlet.http.HttpServletResponse; + +import org.dspace.app.rest.converter.ConverterService; +import org.dspace.app.rest.exception.UnprocessableEntityException; +import org.dspace.app.rest.model.GroupRest; +import org.dspace.app.rest.model.hateoas.GroupResource; +import org.dspace.app.rest.repository.CommunityRestRepository; +import org.dspace.app.rest.utils.ContextUtil; +import org.dspace.app.util.AuthorizeUtil; +import org.dspace.authorize.AuthorizeException; +import org.dspace.authorize.service.AuthorizeService; +import org.dspace.content.Community; +import org.dspace.content.service.CommunityService; +import org.dspace.core.Context; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.data.rest.webmvc.ControllerUtils; +import org.springframework.data.rest.webmvc.ResourceNotFoundException; +import org.springframework.hateoas.RepresentationModel; +import org.springframework.http.HttpHeaders; +import org.springframework.http.HttpStatus; +import org.springframework.http.ResponseEntity; +import org.springframework.security.access.prepost.PreAuthorize; +import org.springframework.web.bind.annotation.PathVariable; +import org.springframework.web.bind.annotation.RequestMapping; +import org.springframework.web.bind.annotation.RequestMethod; +import org.springframework.web.bind.annotation.RestController; + +/** + * This RestController will take care of all the calls for a specific community's admingroup + * This is handled by calling "/api/core/communities/{uuid}/adminGroup" with the correct RequestMethod + */ +@RestController +@RequestMapping("/api/core/communities" + REGEX_REQUESTMAPPING_IDENTIFIER_AS_UUID + "/adminGroup") +public class CommunityAdminGroupRestController { + + @Autowired + private CommunityService communityService; + + @Autowired + private CommunityRestRepository communityRestRepository; + + @Autowired + private ConverterService converterService; + + + @Autowired + private AuthorizeService authorizeService; + + /** + * This method creates and returns an AdminGroup object for the given community + * This is called by using RequestMethod.POST on the default url for this class + * @param uuid The UUID of the community for which we'll create an adminGroup + * @param response The current response + * @param request The current request + * @return The created AdminGroup + * @throws SQLException If something goes wrong + * @throws AuthorizeException If something goes wrong + */ + @RequestMapping(method = RequestMethod.POST) + @PreAuthorize("hasPermission(#uuid, 'COMMUNITY', 'WRITE')") + public ResponseEntity> postAdminGroup(@PathVariable UUID uuid, HttpServletResponse response, + HttpServletRequest request) + throws SQLException, AuthorizeException { + + Context context = ContextUtil.obtainContext(request); + Community community = communityService.find(context, uuid); + + if (community == null) { + throw new ResourceNotFoundException("No such community: " + uuid); + } + AuthorizeUtil.authorizeManageAdminGroup(context, community); + if (community.getAdministrators() != null) { + throw new UnprocessableEntityException("The community with UUID: " + uuid + " already has " + + "an admin group"); + } + GroupRest adminGroup = communityRestRepository.createAdminGroup(context, request, community); + context.complete(); + GroupResource groupResource = converterService.toResource(adminGroup); + return ControllerUtils.toResponseEntity(HttpStatus.CREATED, new HttpHeaders(), groupResource); + } + + /** + * This method takes care of the deletion of an AdminGroup for the given community + * This is called by using RequestMethod.DELETE on the default url for this class + * @param uuid The UUID of the community for which we'll delete the AdminGroup + * @param response The current response + * @param request The current request + * @return An empty response if the deletion was successful + * @throws SQLException If something goes wrong + * @throws AuthorizeException If something goes wrong + * @throws IOException If something goes wrong + */ + @RequestMapping(method = RequestMethod.DELETE) + @PreAuthorize("hasPermission(#uuid, 'COMMUNITY', 'WRITE')") + public ResponseEntity> deleteAdminGroup(@PathVariable UUID uuid, + HttpServletResponse response, + HttpServletRequest request) + throws SQLException, AuthorizeException, IOException { + + Context context = ContextUtil.obtainContext(request); + Community community = communityService.find(context, uuid); + if (community == null) { + throw new ResourceNotFoundException("No such community: " + uuid); + } + AuthorizeUtil.authorizeManageAdminGroup(context, community); + if (community.getAdministrators() == null) { + throw new UnprocessableEntityException("The community with UUID: " + uuid + " doesn't have an admin " + + "group"); + } + communityRestRepository.deleteAdminGroup(context, community); + context.complete(); + return ControllerUtils.toEmptyResponse(HttpStatus.NO_CONTENT); + } +} diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/CommunityLogoController.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/CommunityLogoController.java index baf8009006..52c0f000b6 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/CommunityLogoController.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/CommunityLogoController.java @@ -26,7 +26,7 @@ import org.dspace.core.Context; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.data.rest.webmvc.ControllerUtils; import org.springframework.data.rest.webmvc.ResourceNotFoundException; -import org.springframework.hateoas.ResourceSupport; +import org.springframework.hateoas.RepresentationModel; import org.springframework.http.HttpHeaders; import org.springframework.http.HttpStatus; import org.springframework.http.ResponseEntity; @@ -89,8 +89,9 @@ public class CommunityLogoController { @PreAuthorize("hasPermission(#uuid, 'COMMUNITY', 'WRITE')") @RequestMapping(method = RequestMethod.POST, headers = "content-type=multipart/form-data") - public ResponseEntity createLogo(HttpServletRequest request, @PathVariable UUID uuid, - @RequestParam(value = "file", required = false) MultipartFile uploadfile) + public ResponseEntity> createLogo(HttpServletRequest request, @PathVariable UUID uuid, + @RequestParam(value = "file", required = false) + MultipartFile uploadfile) throws SQLException, IOException, AuthorizeException { if (uploadfile == null) { diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/DiscoverableEndpointsService.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/DiscoverableEndpointsService.java index 6e8c674d43..1853285d71 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/DiscoverableEndpointsService.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/DiscoverableEndpointsService.java @@ -68,10 +68,11 @@ public class DiscoverableEndpointsService { discoverableEndpoints.add(link); // sanity check // FIXME improve logging for debugging - if (rels.contains(link.getRel())) { - throw new IllegalStateException("The rel " + link.getRel() + " is defined multiple times!"); + if (rels.contains(link.getRel().value())) { + throw new IllegalStateException("The rel " + link.getRel().value() + + " is defined multiple times!"); } - rels.add(link.getRel()); + rels.add(link.getRel().value()); } } } @@ -87,4 +88,4 @@ public class DiscoverableEndpointsService { // could be used to override default implementation) return true; } -} \ No newline at end of file +} diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/DiscoveryRestController.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/DiscoveryRestController.java index 6be17ae24c..d167d2a84d 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/DiscoveryRestController.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/DiscoveryRestController.java @@ -7,6 +7,8 @@ */ package org.dspace.app.rest; +import static org.apache.commons.collections4.ListUtils.emptyIfNull; + import java.util.Arrays; import java.util.List; import java.util.Objects; @@ -34,7 +36,7 @@ import org.springframework.beans.factory.InitializingBean; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.data.domain.Pageable; import org.springframework.hateoas.Link; -import org.springframework.hateoas.ResourceSupport; +import org.springframework.hateoas.RepresentationModel; import org.springframework.web.bind.annotation.PathVariable; import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RequestMethod; @@ -100,51 +102,55 @@ public class DiscoveryRestController implements InitializingBean { @RequestMapping(method = RequestMethod.GET, value = "/search/facets") public FacetsResource getFacets(@RequestParam(name = "query", required = false) String query, - @RequestParam(name = "dsoType", required = false) String dsoType, + @RequestParam(name = "dsoType", required = false) List dsoTypes, @RequestParam(name = "scope", required = false) String dsoScope, @RequestParam(name = "configuration", required = false) String configuration, List searchFilters, Pageable page) throws Exception { + dsoTypes = emptyIfNull(dsoTypes); + if (log.isTraceEnabled()) { log.trace("Searching with scope: " + StringUtils.trimToEmpty(dsoScope) - + ", configuration name: " + StringUtils.trimToEmpty(configuration) - + ", dsoType: " + StringUtils.trimToEmpty(dsoType) - + ", query: " + StringUtils.trimToEmpty(query) - + ", filters: " + Objects.toString(searchFilters)); + + ", configuration name: " + StringUtils.trimToEmpty(configuration) + + ", dsoTypes: " + String.join(", ", dsoTypes) + + ", query: " + StringUtils.trimToEmpty(query) + + ", filters: " + Objects.toString(searchFilters)); } SearchResultsRest searchResultsRest = discoveryRestRepository - .getAllFacets(query, dsoType, dsoScope, configuration, searchFilters); + .getAllFacets(query, dsoTypes, dsoScope, configuration, searchFilters); FacetsResource facetsResource = new FacetsResource(searchResultsRest, page); halLinkService.addLinks(facetsResource, page); return facetsResource; - - } @RequestMapping(method = RequestMethod.GET, value = "/search/objects") public SearchResultsResource getSearchObjects(@RequestParam(name = "query", required = false) String query, - @RequestParam(name = "dsoType", required = false) String dsoType, + @RequestParam(name = "dsoType", required = false) + List dsoTypes, @RequestParam(name = "scope", required = false) String dsoScope, @RequestParam(name = "configuration", required = false) String configuration, List searchFilters, Pageable page) throws Exception { + + dsoTypes = emptyIfNull(dsoTypes); + if (log.isTraceEnabled()) { log.trace("Searching with scope: " + StringUtils.trimToEmpty(dsoScope) - + ", configuration name: " + StringUtils.trimToEmpty(configuration) - + ", dsoType: " + StringUtils.trimToEmpty(dsoType) - + ", query: " + StringUtils.trimToEmpty(query) - + ", filters: " + Objects.toString(searchFilters) - + ", page: " + Objects.toString(page)); + + ", configuration name: " + StringUtils.trimToEmpty(configuration) + + ", dsoTypes: " + String.join(", ", dsoTypes) + + ", query: " + StringUtils.trimToEmpty(query) + + ", filters: " + Objects.toString(searchFilters) + + ", page: " + Objects.toString(page)); } //Get the Search results in JSON format SearchResultsRest searchResultsRest = discoveryRestRepository - .getSearchObjects(query, dsoType, dsoScope, configuration, searchFilters, page, utils.obtainProjection()); + .getSearchObjects(query, dsoTypes, dsoScope, configuration, searchFilters, page, utils.obtainProjection()); //Convert the Search JSON results to paginated HAL resources SearchResultsResource searchResultsResource = new SearchResultsResource(searchResultsRest, utils, page); @@ -171,18 +177,21 @@ public class DiscoveryRestController implements InitializingBean { } @RequestMapping(method = RequestMethod.GET, value = "/facets/{name}") - public ResourceSupport getFacetValues(@PathVariable("name") String facetName, - @RequestParam(name = "prefix", required = false) String prefix, - @RequestParam(name = "query", required = false) String query, - @RequestParam(name = "dsoType", required = false) String dsoType, - @RequestParam(name = "scope", required = false) String dsoScope, - @RequestParam(name = "configuration", required = false) String - configuration, - List searchFilters, - Pageable page) throws Exception { + public RepresentationModel getFacetValues(@PathVariable("name") String facetName, + @RequestParam(name = "prefix", required = false) String prefix, + @RequestParam(name = "query", required = false) String query, + @RequestParam(name = "dsoType", required = false) List dsoTypes, + @RequestParam(name = "scope", required = false) String dsoScope, + @RequestParam(name = "configuration", required = false) String + configuration, + List searchFilters, + Pageable page) throws Exception { + + dsoTypes = emptyIfNull(dsoTypes); + if (log.isTraceEnabled()) { log.trace("Facetting on facet " + facetName + " with scope: " + StringUtils.trimToEmpty(dsoScope) - + ", dsoType: " + StringUtils.trimToEmpty(dsoType) + + ", dsoTypes: " + String.join(", ", dsoTypes) + ", prefix: " + StringUtils.trimToEmpty(prefix) + ", query: " + StringUtils.trimToEmpty(query) + ", filters: " + Objects.toString(searchFilters) @@ -190,7 +199,7 @@ public class DiscoveryRestController implements InitializingBean { } FacetResultsRest facetResultsRest = discoveryRestRepository - .getFacetObjects(facetName, prefix, query, dsoType, dsoScope, configuration, searchFilters, page); + .getFacetObjects(facetName, prefix, query, dsoTypes, dsoScope, configuration, searchFilters, page); FacetResultsResource facetResultsResource = converter.toResource(facetResultsRest); diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/ExternalSourcesRestController.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/ExternalSourcesRestController.java index db016218a9..357ea409c0 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/ExternalSourcesRestController.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/ExternalSourcesRestController.java @@ -16,7 +16,7 @@ import org.springframework.beans.factory.annotation.Autowired; import org.springframework.data.domain.Page; import org.springframework.data.domain.Pageable; import org.springframework.data.web.PagedResourcesAssembler; -import org.springframework.hateoas.PagedResources; +import org.springframework.hateoas.PagedModel; import org.springframework.web.bind.annotation.PathVariable; import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RequestMethod; @@ -54,7 +54,7 @@ public class ExternalSourcesRestController { * @return A paginated list of ExternalSourceEntryResource objects that comply with the params */ @RequestMapping(method = RequestMethod.GET, value = "/entries") - public PagedResources getExternalSourceEntries( + public PagedModel getExternalSourceEntries( @PathVariable("externalSourceName") String externalSourceName, @RequestParam(name = "query") String query, @RequestParam(name = "parent", required = false) String parent, @@ -65,7 +65,7 @@ public class ExternalSourcesRestController { Page externalSourceEntryResources = externalSourceEntryRestPage .map(externalSourceEntryRest -> new ExternalSourceEntryResource(externalSourceEntryRest)); externalSourceEntryResources.forEach(linkService::addLinks); - PagedResources result = assembler.toResource(externalSourceEntryResources); + PagedModel result = assembler.toModel(externalSourceEntryResources); return result; } diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/GroupRestController.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/GroupRestController.java index db545736f1..c8d0c2eb66 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/GroupRestController.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/GroupRestController.java @@ -12,9 +12,6 @@ import static org.apache.http.HttpStatus.SC_NO_CONTENT; import static org.apache.http.HttpStatus.SC_UNPROCESSABLE_ENTITY; import static org.dspace.app.rest.utils.ContextUtil.obtainContext; import static org.dspace.app.rest.utils.RegexUtils.REGEX_UUID; -import static org.dspace.app.util.AuthorizeUtil.authorizeManageAdminGroup; -import static org.dspace.app.util.AuthorizeUtil.authorizeManageSubmittersGroup; -import static org.dspace.app.util.AuthorizeUtil.authorizeManageWorkflowsGroup; import static org.springframework.web.bind.annotation.RequestMethod.DELETE; import static org.springframework.web.bind.annotation.RequestMethod.POST; @@ -31,19 +28,14 @@ import javax.servlet.http.HttpServletResponse; import org.dspace.app.rest.exception.UnprocessableEntityException; import org.dspace.app.rest.model.GroupRest; -import org.dspace.app.rest.utils.GroupUtil; import org.dspace.app.rest.utils.Utils; +import org.dspace.app.util.AuthorizeUtil; import org.dspace.authorize.AuthorizeException; -import org.dspace.authorize.service.AuthorizeService; -import org.dspace.content.Collection; -import org.dspace.content.Community; import org.dspace.core.Context; import org.dspace.eperson.EPerson; import org.dspace.eperson.Group; import org.dspace.eperson.service.EPersonService; import org.dspace.eperson.service.GroupService; -import org.dspace.xmlworkflow.storedcomponents.CollectionRole; -import org.dspace.xmlworkflow.storedcomponents.service.CollectionRoleService; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.data.rest.webmvc.ResourceNotFoundException; import org.springframework.security.access.prepost.PreAuthorize; @@ -64,18 +56,9 @@ public class GroupRestController { @Autowired private EPersonService ePersonService; - @Autowired - private AuthorizeService authorizeService; - - @Autowired - private CollectionRoleService collectionRoleService; - @Autowired Utils utils; - @Autowired - GroupUtil groupUtil; - /** * Method to add one or more subgroups to a group. * The subgroups to be added should be provided in the request body as a uri-list. @@ -96,7 +79,7 @@ public class GroupRestController { throw new ResourceNotFoundException("parent group is not found for uuid: " + uuid); } - checkAuthorization(context, parentGroup); + AuthorizeUtil.authorizeManageGroup(context, parentGroup); List groupLinks = utils.getStringListFromRequest(request); @@ -156,7 +139,7 @@ public class GroupRestController { throw new ResourceNotFoundException("parent group is not found for uuid: " + uuid); } - checkAuthorization(context, parentGroup); + AuthorizeUtil.authorizeManageGroup(context, parentGroup); List memberLinks = utils.getStringListFromRequest(request); @@ -212,7 +195,7 @@ public class GroupRestController { throw new ResourceNotFoundException("parent group is not found for uuid: " + parentUUID); } - checkAuthorization(context, parentGroup); + AuthorizeUtil.authorizeManageGroup(context, parentGroup); Group childGroup = groupService.find(context, childUUID); if (childGroup == null) { @@ -247,7 +230,7 @@ public class GroupRestController { throw new ResourceNotFoundException("parent group is not found for uuid: " + parentUUID); } - checkAuthorization(context, parentGroup); + AuthorizeUtil.authorizeManageGroup(context, parentGroup); EPerson childGroup = ePersonService.find(context, memberUUID); if (childGroup == null) { @@ -260,52 +243,4 @@ public class GroupRestController { response.setStatus(SC_NO_CONTENT); } - - /** - * This method checks whether the current user has sufficient rights to modify the group. - * Depending on the kind of group and due to delegated administration, separate checks need to be done to verify - * whether the user is allowed to modify the group. - * - * @param context the context of which the user will be checked - * @param group the group to be checked - * @throws SQLException - * @throws AuthorizeException - */ - private void checkAuthorization(Context context, Group group) throws SQLException, AuthorizeException { - - if (authorizeService.isAdmin(context)) { - return; - } - - Collection collection = groupUtil.getCollection(context, group); - if (collection != null) { - - if (group.equals(collection.getSubmitters())) { - authorizeManageSubmittersGroup(context, collection); - return; - } - - - List collectionRoles = collectionRoleService.findByCollection(context, collection); - for (CollectionRole role : collectionRoles) { - if (group.equals(role.getGroup())) { - authorizeManageWorkflowsGroup(context, collection); - return; - } - } - - if (group.equals(collection.getAdministrators())) { - authorizeManageAdminGroup(context, collection); - return; - } - } - - Community community = groupUtil.getCommunity(context, group); - if (community != null) { - authorizeManageAdminGroup(context, community); - return; - } - - throw new AuthorizeException("not authorized to manage this group"); - } } diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/IdentifierRestController.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/IdentifierRestController.java index 09b6468b3c..dadf2c514b 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/IdentifierRestController.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/IdentifierRestController.java @@ -2,12 +2,12 @@ * The contents of this file are subject to the license and copyright * detailed in the LICENSE and NOTICE files at the root of the source * tree and available online at - * + * * http://www.dspace.org/license/ */ package org.dspace.app.rest; -import static org.springframework.hateoas.mvc.ControllerLinkBuilder.linkTo; +import static org.springframework.hateoas.server.mvc.WebMvcLinkBuilder.linkTo; import java.io.IOException; import java.net.URI; diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/ItemAddBundleController.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/ItemAddBundleController.java index 12e8e057f9..5175dec5e2 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/ItemAddBundleController.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/ItemAddBundleController.java @@ -33,7 +33,7 @@ import org.dspace.core.Context; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.data.rest.webmvc.ControllerUtils; import org.springframework.data.rest.webmvc.ResourceNotFoundException; -import org.springframework.hateoas.ResourceSupport; +import org.springframework.hateoas.RepresentationModel; import org.springframework.http.HttpHeaders; import org.springframework.http.HttpStatus; import org.springframework.http.ResponseEntity; @@ -87,9 +87,9 @@ public class ItemAddBundleController { */ @RequestMapping(method = RequestMethod.POST) @PreAuthorize("hasPermission(#uuid, 'ITEM', 'ADD')") - public ResponseEntity addBundleToItem(@PathVariable UUID uuid, - HttpServletRequest request, - HttpServletResponse response) + public ResponseEntity> addBundleToItem(@PathVariable UUID uuid, + HttpServletRequest request, + HttpServletResponse response) throws SQLException, AuthorizeException { Context context = ContextUtil.obtainContext(request); diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/ItemtemplateRestController.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/ItemTemplateRestController.java similarity index 91% rename from dspace-server-webapp/src/main/java/org/dspace/app/rest/ItemtemplateRestController.java rename to dspace-server-webapp/src/main/java/org/dspace/app/rest/ItemTemplateRestController.java index fb77967b15..e297dab44c 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/ItemtemplateRestController.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/ItemTemplateRestController.java @@ -32,7 +32,7 @@ import org.dspace.core.Context; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.data.rest.webmvc.ControllerUtils; import org.springframework.data.rest.webmvc.ResourceNotFoundException; -import org.springframework.hateoas.ResourceSupport; +import org.springframework.hateoas.RepresentationModel; import org.springframework.http.HttpHeaders; import org.springframework.http.HttpStatus; import org.springframework.http.ResponseEntity; @@ -49,7 +49,7 @@ import org.springframework.web.bind.annotation.RestController; */ @RestController @RequestMapping("/api/core/itemtemplates" + REGEX_REQUESTMAPPING_IDENTIFIER_AS_UUID) -public class ItemtemplateRestController { +public class ItemTemplateRestController { @Autowired private Utils utils; @@ -122,8 +122,8 @@ public class ItemtemplateRestController { */ @PreAuthorize("hasPermission(#uuid, 'ITEM', 'WRITE')") @RequestMapping(method = RequestMethod.PATCH) - public ResponseEntity patch(HttpServletRequest request, @PathVariable UUID uuid, - @RequestBody(required = true) JsonNode jsonNode) + public ResponseEntity> patch(HttpServletRequest request, @PathVariable UUID uuid, + @RequestBody(required = true) JsonNode jsonNode) throws SQLException, AuthorizeException { Context context = ContextUtil.obtainContext(request); @@ -132,7 +132,7 @@ public class ItemtemplateRestController { context.commit(); return ControllerUtils.toResponseEntity(HttpStatus.OK, new HttpHeaders(), - converter.toResource(templateItemRest)); + (RepresentationModel) converter.toResource(templateItemRest)); } /** @@ -155,7 +155,8 @@ public class ItemtemplateRestController { */ @PreAuthorize("hasPermission(#uuid, 'ITEM', 'DELETE')") @RequestMapping(method = RequestMethod.DELETE) - public ResponseEntity deleteTemplateItem(HttpServletRequest request, @PathVariable UUID uuid) + public ResponseEntity> deleteTemplateItem(HttpServletRequest request, + @PathVariable UUID uuid) throws SQLException, AuthorizeException, IOException { Context context = ContextUtil.obtainContext(request); diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/OpenSearchController.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/OpenSearchController.java index 42ad173f2e..62c6a9c573 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/OpenSearchController.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/OpenSearchController.java @@ -34,6 +34,7 @@ import org.dspace.content.service.CollectionService; import org.dspace.content.service.CommunityService; import org.dspace.core.Context; import org.dspace.core.LogManager; +import org.dspace.core.Utils; import org.dspace.discovery.DiscoverQuery; import org.dspace.discovery.DiscoverResult; import org.dspace.discovery.IndexableObject; @@ -103,7 +104,8 @@ public class OpenSearchController { // do some sanity checking if (!openSearchService.getFormats().contains(format)) { - String err = "Format " + format + " is not supported."; + // Since we are returning error response as HTML, escape any HTML in "format" param + String err = "Format " + Utils.addEntities(format) + " is not supported."; response.setContentType("text/html"); response.setContentLength(err.length()); response.getWriter().write(err); diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/RelationshipTypeRestController.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/RelationshipTypeRestController.java index a3d3f0fb32..46aefbe69e 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/RelationshipTypeRestController.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/RelationshipTypeRestController.java @@ -27,7 +27,7 @@ import org.springframework.beans.factory.annotation.Autowired; import org.springframework.data.domain.Page; import org.springframework.data.domain.Pageable; import org.springframework.data.web.PagedResourcesAssembler; -import org.springframework.hateoas.PagedResources; +import org.springframework.hateoas.PagedModel; import org.springframework.web.bind.annotation.PathVariable; import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RequestMethod; @@ -71,7 +71,7 @@ public class RelationshipTypeRestController { * @throws SQLException If something goes wrong */ @RequestMapping(method = RequestMethod.GET) - public PagedResources retrieve(@PathVariable Integer id, + public PagedModel retrieve(@PathVariable Integer id, HttpServletResponse response, HttpServletRequest request, Pageable pageable, @@ -81,12 +81,12 @@ public class RelationshipTypeRestController { List list = relationshipTypeService.findByEntityType(context, entityType, -1, -1); Page relationshipTypeRestPage = converter - .toRestPage(list, pageable, list.size(), utils.obtainProjection()); + .toRestPage(list, pageable, utils.obtainProjection()); Page relationshipTypeResources = relationshipTypeRestPage .map(relationshipTypeRest -> new RelationshipTypeResource(relationshipTypeRest, utils)); relationshipTypeResources.forEach(halLinkService::addLinks); - PagedResources result = assembler.toResource(relationshipTypeResources); + PagedModel result = assembler.toModel(relationshipTypeResources); return result; diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/RestResourceController.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/RestResourceController.java index ab338af966..9e14df2ec3 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/RestResourceController.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/RestResourceController.java @@ -10,8 +10,8 @@ package org.dspace.app.rest; import static org.dspace.app.rest.utils.RegexUtils.REGEX_REQUESTMAPPING_IDENTIFIER_AS_DIGIT; import static org.dspace.app.rest.utils.RegexUtils.REGEX_REQUESTMAPPING_IDENTIFIER_AS_STRING_VERSION_STRONG; import static org.dspace.app.rest.utils.RegexUtils.REGEX_REQUESTMAPPING_IDENTIFIER_AS_UUID; -import static org.springframework.hateoas.mvc.ControllerLinkBuilder.linkTo; -import static org.springframework.hateoas.mvc.ControllerLinkBuilder.methodOn; +import static org.springframework.hateoas.server.mvc.WebMvcLinkBuilder.linkTo; +import static org.springframework.hateoas.server.mvc.WebMvcLinkBuilder.methodOn; import java.io.FileNotFoundException; import java.io.IOException; @@ -63,15 +63,13 @@ import org.springframework.data.domain.PageImpl; import org.springframework.data.domain.Pageable; import org.springframework.data.domain.Sort; import org.springframework.data.rest.webmvc.ControllerUtils; -import org.springframework.data.rest.webmvc.PersistentEntityResourceAssembler; import org.springframework.data.rest.webmvc.ResourceNotFoundException; import org.springframework.data.web.PagedResourcesAssembler; +import org.springframework.hateoas.CollectionModel; +import org.springframework.hateoas.EntityModel; import org.springframework.hateoas.Link; -import org.springframework.hateoas.PagedResources; -import org.springframework.hateoas.Resource; -import org.springframework.hateoas.ResourceSupport; -import org.springframework.hateoas.Resources; -import org.springframework.hateoas.UriTemplate; +import org.springframework.hateoas.PagedModel; +import org.springframework.hateoas.RepresentationModel; import org.springframework.http.HttpHeaders; import org.springframework.http.HttpStatus; import org.springframework.http.ResponseEntity; @@ -121,7 +119,7 @@ public class RestResourceController implements InitializingBean { @Override public void afterPropertiesSet() { - List links = new ArrayList(); + List links = new ArrayList<>(); for (String r : utils.getRepositories()) { // this doesn't work as we don't have an active http request // see https://github.com/spring-projects/spring-hateoas/issues/408 @@ -130,7 +128,7 @@ public class RestResourceController implements InitializingBean { String plural = English.plural(split[1]); Link l = new Link("/api/" + split[0] + "/" + plural, plural); links.add(l); - System.out.println(l.getRel() + " " + l.getHref()); + log.debug(l.getRel().value() + " " + l.getHref()); } discoverableEndpointsService.register(this, links); } @@ -145,14 +143,13 @@ public class RestResourceController implements InitializingBean { * identifier * and see {@link RestResourceController#findOne(String, String, UUID)} for uuid as identifier * - * @param apiCategory - * @param model - * @param id - * @return + * @param apiCategory category from request + * @param model model from request + * @param id Identifier from request + * @return single DSpaceResource */ @RequestMapping(method = RequestMethod.GET, value = REGEX_REQUESTMAPPING_IDENTIFIER_AS_DIGIT) - @SuppressWarnings("unchecked") - public DSpaceResource findOne(@PathVariable String apiCategory, @PathVariable String model, + public HALResource findOne(@PathVariable String apiCategory, @PathVariable String model, @PathVariable Integer id) { return findOneInternal(apiCategory, model, id); } @@ -177,14 +174,13 @@ public class RestResourceController implements InitializingBean { * identifier * and see {@link RestResourceController#findOne(String, String, UUID)} for uuid as identifier * - * @param apiCategory - * @param model - * @param id - * @return + * @param apiCategory category from request + * @param model model from request + * @param id Identifier from request + * @return single DSpaceResource */ @RequestMapping(method = RequestMethod.GET, value = REGEX_REQUESTMAPPING_IDENTIFIER_AS_STRING_VERSION_STRONG) - @SuppressWarnings("unchecked") - public DSpaceResource findOne(@PathVariable String apiCategory, @PathVariable String model, + public HALResource findOne(@PathVariable String apiCategory, @PathVariable String model, @PathVariable String id) { return findOneInternal(apiCategory, model, id); } @@ -198,14 +194,13 @@ public class RestResourceController implements InitializingBean { * identifier * and see {@link RestResourceController#findOne(String, String, String)} for string as identifier * - * @param apiCategory - * @param model - * @param uuid - * @return + * @param apiCategory category from request + * @param model model from request + * @param uuid Identifier from request + * @return single DSpaceResource */ @RequestMapping(method = RequestMethod.GET, value = REGEX_REQUESTMAPPING_IDENTIFIER_AS_UUID) - @SuppressWarnings("unchecked") - public DSpaceResource findOne(@PathVariable String apiCategory, @PathVariable String model, + public HALResource findOne(@PathVariable String apiCategory, @PathVariable String model, @PathVariable UUID uuid) { return findOneInternal(apiCategory, model, uuid); } @@ -213,12 +208,12 @@ public class RestResourceController implements InitializingBean { /** * Internal method to retrieve single resource from an identifier of generic type * - * @param apiCategory - * @param model - * @param id - * @return + * @param apiCategory category from request + * @param model model from request + * @param id Identifier from request + * @return single DSpaceResource */ - private DSpaceResource findOneInternal(String apiCategory, + private HALResource findOneInternal(String apiCategory, String model, ID id) { DSpaceRestRepository repository = utils.getResourceRepository(apiCategory, model); Optional modelObject = Optional.empty(); @@ -238,17 +233,17 @@ public class RestResourceController implements InitializingBean { * * Note that the regular expression in the request mapping accept a number; * - * @param request - * @param apiCategory - * @param model - * @param id - * @param rel - * @param page - * @param assembler - * @return + * @param request current HTTPServletRequest + * @param apiCategory category from request + * @param model model from request + * @param id identifier from request + * @param rel relation from request + * @param page pagination information + * @param assembler PagedResourcesAssembler + * @return single RepresentationModel */ @RequestMapping(method = RequestMethod.GET, value = REGEX_REQUESTMAPPING_IDENTIFIER_AS_DIGIT + "/{rel}") - public ResourceSupport findRel(HttpServletRequest request, HttpServletResponse response, + public RepresentationModel findRel(HttpServletRequest request, HttpServletResponse response, @PathVariable String apiCategory, @PathVariable String model, @PathVariable Integer id, @PathVariable String rel, Pageable page, @@ -262,18 +257,19 @@ public class RestResourceController implements InitializingBean { * Note that the regular expression in the request mapping accept a string as identifier but not the other kind * of identifier; * - * @param request - * @param apiCategory - * @param model - * @param id - * @param rel - * @param page - * @param assembler - * @return + * @param request current HTTPServletRequest + * @param response HTTPServletResponse + * @param apiCategory category from request + * @param model model from request + * @param id identifier from request + * @param rel relation from request + * @param page pagination information + * @param assembler PagedResourcesAssembler + * @return single RepresentationModel */ @RequestMapping(method = RequestMethod.GET, value = REGEX_REQUESTMAPPING_IDENTIFIER_AS_STRING_VERSION_STRONG + "/{rel}") - public ResourceSupport findRel(HttpServletRequest request, HttpServletResponse response, + public RepresentationModel findRel(HttpServletRequest request, HttpServletResponse response, @PathVariable String apiCategory, @PathVariable String model, @PathVariable String id, @PathVariable String rel, Pageable page, @@ -296,7 +292,7 @@ public class RestResourceController implements InitializingBean { * @return */ @RequestMapping(method = RequestMethod.GET, value = REGEX_REQUESTMAPPING_IDENTIFIER_AS_UUID + "/{rel}") - public ResourceSupport findRel(HttpServletRequest request, HttpServletResponse response, + public RepresentationModel findRel(HttpServletRequest request, HttpServletResponse response, @PathVariable String apiCategory, @PathVariable String model, @PathVariable UUID uuid, @PathVariable String rel, Pageable page, @@ -338,7 +334,7 @@ public class RestResourceController implements InitializingBean { */ @RequestMapping(method = RequestMethod.GET, value = REGEX_REQUESTMAPPING_IDENTIFIER_AS_STRING_VERSION_STRONG + "/{rel}/{relid}") - public ResourceSupport findRel(HttpServletRequest request, HttpServletResponse response, + public RepresentationModel findRel(HttpServletRequest request, HttpServletResponse response, @PathVariable String apiCategory, @PathVariable String model, @PathVariable String id, @PathVariable String rel, @PathVariable String relid, @@ -346,7 +342,15 @@ public class RestResourceController implements InitializingBean { return findRelEntryInternal(request, response, apiCategory, model, id, rel, relid, page, assembler); } - + @RequestMapping(method = RequestMethod.GET, value = REGEX_REQUESTMAPPING_IDENTIFIER_AS_DIGIT + + "/{rel}/{relid}") + public RepresentationModel findRel(HttpServletRequest request, HttpServletResponse response, + @PathVariable String apiCategory, + @PathVariable String model, @PathVariable Integer id, @PathVariable String rel, + @PathVariable String relid, + Pageable page, PagedResourcesAssembler assembler) throws Throwable { + return findRelEntryInternal(request, response, apiCategory, model, id.toString(), rel, relid, page, assembler); + } /** * Execute a POST request; * @@ -367,10 +371,10 @@ public class RestResourceController implements InitializingBean { * @throws HttpRequestMethodNotSupportedException If something goes wrong */ @RequestMapping(method = RequestMethod.POST, consumes = {"application/json", "application/hal+json"}) - public ResponseEntity post(HttpServletRequest request, - @PathVariable String apiCategory, - @PathVariable String model, - @RequestParam(required = false) String parent) + public ResponseEntity> post(HttpServletRequest request, + @PathVariable String apiCategory, + @PathVariable String model, + @RequestParam(required = false) String parent) throws HttpRequestMethodNotSupportedException { return postJsonInternal(request, apiCategory, model, parent); } @@ -394,9 +398,9 @@ public class RestResourceController implements InitializingBean { * @throws HttpRequestMethodNotSupportedException If something goes wrong */ @RequestMapping(method = RequestMethod.POST, consumes = {"text/uri-list"}) - public ResponseEntity postWithUriListContentType(HttpServletRequest request, - @PathVariable String apiCategory, - @PathVariable String model) + public ResponseEntity> postWithUriListContentType(HttpServletRequest request, + @PathVariable String apiCategory, + @PathVariable String model) throws HttpRequestMethodNotSupportedException { return postUriListInternal(request, apiCategory, model); } @@ -411,9 +415,10 @@ public class RestResourceController implements InitializingBean { * @return The relevant ResponseEntity for this request * @throws HttpRequestMethodNotSupportedException If something goes wrong */ - public ResponseEntity postJsonInternal(HttpServletRequest request, - String apiCategory, - String model, String parent) + public ResponseEntity> postJsonInternal(HttpServletRequest request, + String apiCategory, + String model, + String parent) throws HttpRequestMethodNotSupportedException { checkModelPluralForm(apiCategory, model); DSpaceRestRepository repository = utils.getResourceRepository(apiCategory, model); @@ -442,9 +447,10 @@ public class RestResourceController implements InitializingBean { * @return The relevant ResponseEntity for this request * @throws HttpRequestMethodNotSupportedException If something goes wrong */ - public ResponseEntity postUriListInternal(HttpServletRequest request, - String apiCategory, - String model) + public ResponseEntity> postUriListInternal( + HttpServletRequest request, + String apiCategory, + String model) throws HttpRequestMethodNotSupportedException { checkModelPluralForm(apiCategory, model); DSpaceRestRepository repository = utils.getResourceRepository(apiCategory, model); @@ -482,8 +488,8 @@ public class RestResourceController implements InitializingBean { */ @RequestMapping(method = RequestMethod.POST, value = REGEX_REQUESTMAPPING_IDENTIFIER_AS_DIGIT, headers = "content-type=application/x-www-form-urlencoded") - public ResponseEntity action(HttpServletRequest request, @PathVariable String apiCategory, - @PathVariable String model, @PathVariable Integer id) + public ResponseEntity> action(HttpServletRequest request, @PathVariable String apiCategory, + @PathVariable String model, @PathVariable Integer id) throws HttpRequestMethodNotSupportedException, SQLException, IOException { checkModelPluralForm(apiCategory, model); DSpaceRestRepository repository = @@ -525,11 +531,11 @@ public class RestResourceController implements InitializingBean { */ @RequestMapping(method = RequestMethod.POST, value = REGEX_REQUESTMAPPING_IDENTIFIER_AS_DIGIT, headers = "content-type=multipart/form-data") - public ResponseEntity upload(HttpServletRequest request, - @PathVariable String apiCategory, - @PathVariable String model, - @PathVariable Integer id, - @RequestParam("file") MultipartFile + public ResponseEntity> upload(HttpServletRequest request, + @PathVariable String apiCategory, + @PathVariable String model, + @PathVariable Integer id, + @RequestParam("file") MultipartFile uploadfile) throws HttpRequestMethodNotSupportedException { return uploadInternal(request, apiCategory, model, id, uploadfile); @@ -555,11 +561,11 @@ public class RestResourceController implements InitializingBean { */ @RequestMapping(method = RequestMethod.POST, value = REGEX_REQUESTMAPPING_IDENTIFIER_AS_UUID, headers = "content-type=multipart/form-data") - public ResponseEntity upload(HttpServletRequest request, - @PathVariable String apiCategory, - @PathVariable String model, - @PathVariable UUID uuid, - @RequestParam("file") MultipartFile + public ResponseEntity> upload(HttpServletRequest request, + @PathVariable String apiCategory, + @PathVariable String model, + @PathVariable UUID uuid, + @RequestParam("file") MultipartFile uploadfile) throws HttpRequestMethodNotSupportedException { return uploadInternal(request, apiCategory, model, uuid, uploadfile); @@ -575,10 +581,11 @@ public class RestResourceController implements InitializingBean { * @param uploadfile * @return */ - private ResponseEntity uploadInternal(HttpServletRequest request, - String apiCategory, String model, - ID id, - MultipartFile uploadfile) { + private ResponseEntity> uploadInternal(HttpServletRequest request, + String apiCategory, + String model, + ID id, + MultipartFile uploadfile) { checkModelPluralForm(apiCategory, model); DSpaceRestRepository repository = utils.getResourceRepository(apiCategory, model); RestAddressableModel modelObject = null; @@ -613,11 +620,11 @@ public class RestResourceController implements InitializingBean { * @throws AuthorizeException */ @RequestMapping(method = { RequestMethod.POST }, headers = "content-type=multipart/form-data") - public ResponseEntity upload(HttpServletRequest request, - @PathVariable String apiCategory, - @PathVariable String model, - @RequestParam("file") MultipartFile - uploadfile) + public ResponseEntity> upload( + HttpServletRequest request, + @PathVariable String apiCategory, + @PathVariable String model, + @RequestParam("file") List uploadfile) throws SQLException, FileNotFoundException, IOException, AuthorizeException { checkModelPluralForm(apiCategory, model); @@ -630,7 +637,7 @@ public class RestResourceController implements InitializingBean { DSpaceResource result = converter.toResource(modelObject); resources.add(result); } - return ControllerUtils.toResponseEntity(HttpStatus.OK, new HttpHeaders(), Resources.wrap(resources)); + return ControllerUtils.toResponseEntity(HttpStatus.OK, new HttpHeaders(), CollectionModel.wrap(resources)); } /** @@ -648,9 +655,9 @@ public class RestResourceController implements InitializingBean { * @throws HttpRequestMethodNotSupportedException */ @RequestMapping(method = RequestMethod.PATCH, value = REGEX_REQUESTMAPPING_IDENTIFIER_AS_DIGIT) - public ResponseEntity patch(HttpServletRequest request, @PathVariable String apiCategory, - @PathVariable String model, @PathVariable Integer id, - @RequestBody(required = true) JsonNode jsonNode) { + public ResponseEntity> patch(HttpServletRequest request, @PathVariable String apiCategory, + @PathVariable String model, @PathVariable Integer id, + @RequestBody(required = true) JsonNode jsonNode) { return patchInternal(request, apiCategory, model, id, jsonNode); } @@ -669,10 +676,10 @@ public class RestResourceController implements InitializingBean { * @throws HttpRequestMethodNotSupportedException */ @RequestMapping(method = RequestMethod.PATCH, value = REGEX_REQUESTMAPPING_IDENTIFIER_AS_UUID) - public ResponseEntity patch(HttpServletRequest request, @PathVariable String apiCategory, - @PathVariable String model, - @PathVariable(name = "uuid") UUID id, - @RequestBody(required = true) JsonNode jsonNode) { + public ResponseEntity> patch(HttpServletRequest request, @PathVariable String apiCategory, + @PathVariable String model, + @PathVariable(name = "uuid") UUID id, + @RequestBody(required = true) JsonNode jsonNode) { return patchInternal(request, apiCategory, model, id, jsonNode); } @@ -687,10 +694,10 @@ public class RestResourceController implements InitializingBean { * @return * @throws HttpRequestMethodNotSupportedException */ - public ResponseEntity patchInternal(HttpServletRequest request, - String apiCategory, - String model, ID id, - JsonNode jsonNode) { + public ResponseEntity> patchInternal(HttpServletRequest request, + String apiCategory, + String model, ID id, + JsonNode jsonNode) { checkModelPluralForm(apiCategory, model); DSpaceRestRepository repository = utils.getResourceRepository(apiCategory, model); RestAddressableModel modelObject = null; @@ -722,7 +729,7 @@ public class RestResourceController implements InitializingBean { * @param assembler * @return */ - private ResourceSupport findRelEntryInternal(HttpServletRequest request, + private RepresentationModel findRelEntryInternal(HttpServletRequest request, HttpServletResponse response, String apiCategory, String model, String id, String rel, String relid, @@ -745,7 +752,7 @@ public class RestResourceController implements InitializingBean { result.add(object); PageImpl pageResult = new PageImpl(result, page, 1); Page halResources = pageResult.map(restObject -> converter.toResource(restObject)); - return assembler.toResource(halResources, link); + return assembler.toModel(halResources, link); } catch (InvocationTargetException e) { // This catch has been made to resolve the issue that caused AuthorizeDenied exceptions for the methods // on the repository defined by the @PreAuthorize etc annotation to be absorbed by the reflection's @@ -775,7 +782,7 @@ public class RestResourceController implements InitializingBean { * @param assembler * @return */ - private ResourceSupport findRelInternal(HttpServletRequest request, + private RepresentationModel findRelInternal(HttpServletRequest request, HttpServletResponse response, String apiCategory, String model, ID uuid, String subpath, Pageable page, @@ -785,14 +792,14 @@ public class RestResourceController implements InitializingBean { Class domainClass = repository.getDomainClass(); LinkRest linkRest = utils.getClassLevelLinkRest(subpath, domainClass); - PagedResources result; + PagedModel result; if (linkRest != null) { LinkRestRepository linkRepository = utils.getLinkResourceRepository(apiCategory, model, linkRest.name()); Method linkMethod = utils.requireMethod(linkRepository.getClass(), linkRest.method()); try { if (Page.class.isAssignableFrom(linkMethod.getReturnType())) { - Page pageResult = (Page) linkMethod + Page pageResult = (Page) linkMethod .invoke(linkRepository, request, uuid, page, utils.obtainProjection()); if (pageResult == null) { @@ -813,11 +820,11 @@ public class RestResourceController implements InitializingBean { link = linkTo(this.getClass(), apiCategory, model).slash(uuid).slash(subpath).withSelfRel(); } - return new Resource(new EmbeddedPage(link.getHref(), + return new EntityModel(new EmbeddedPage(link.getHref(), pageResult.map(converter::toResource), null, subpath)); } else { - RestModel object = (RestModel) linkMethod.invoke(linkRepository, request, uuid, page, - utils.obtainProjection()); + RestModel object = (RestModel) linkMethod.invoke(linkRepository, request, + uuid, page, utils.obtainProjection()); if (object == null) { response.setStatus(HttpServletResponse.SC_NO_CONTENT); return null; @@ -839,7 +846,7 @@ public class RestResourceController implements InitializingBean { throw new RuntimeException(e); } } - RestAddressableModel modelObject = repository.findById(uuid).orElse(null); + RestModel modelObject = repository.findById(uuid).orElse(null); if (modelObject == null) { throw new ResourceNotFoundException(apiCategory + "." + model + " with id: " + uuid + " not found"); @@ -852,10 +859,10 @@ public class RestResourceController implements InitializingBean { for (Link l : resource.getLinks()) { if (l.isTemplated()) { if (l.getHref().substring(0, l.getHref().indexOf("?")).contentEquals(request.getRequestURL())) { - rel = l.getRel(); + rel = l.getRel().value(); } } else if (l.getHref().contentEquals(request.getRequestURL())) { - rel = l.getRel(); + rel = l.getRel().value(); } } @@ -877,7 +884,7 @@ public class RestResourceController implements InitializingBean { List fullList = ep.getFullList(); if (fullList == null || fullList.size() == 0) { PageImpl pageResult = new PageImpl(fullList, page, 0); - result = assembler.toResource(pageResult); + result = assembler.toModel(pageResult); return result; } int start = Math.toIntExact(page.getOffset()); @@ -886,12 +893,12 @@ public class RestResourceController implements InitializingBean { .getResourceRepository(fullList.get(0).getCategory(), fullList.get(0).getType()); PageImpl pageResult = new PageImpl(fullList.subList(start, end), page, fullList.size()); - return assembler.toResource(pageResult.map(converter::toResource)); + return assembler.toModel(pageResult.map(converter::toResource)); } else { if (resource.getEmbeddedResources().get(rel) == null) { response.setStatus(HttpServletResponse.SC_NO_CONTENT); } - return (ResourceSupport) resource.getEmbeddedResources().get(rel); + return (RepresentationModel) resource.getEmbeddedResources().get(rel); } } @@ -907,7 +914,7 @@ public class RestResourceController implements InitializingBean { */ @RequestMapping(method = RequestMethod.GET) @SuppressWarnings("unchecked") - public PagedResources> findAll(@PathVariable String apiCategory, + public PagedModel> findAll(@PathVariable String apiCategory, @PathVariable String model, Pageable page, PagedResourcesAssembler assembler, @@ -923,7 +930,7 @@ public class RestResourceController implements InitializingBean { } catch (PaginationException pe) { resources = new PageImpl<>(new ArrayList<>(), page, pe.getTotal()); } - PagedResources> result = assembler.toResource(resources, link); + PagedModel> result = assembler.toModel(resources, link); if (repositoryUtils.haveSearchMethods(repository)) { result.add(linkTo(this.getClass(), apiCategory, model).slash("search").withRel("search")); } @@ -942,8 +949,8 @@ public class RestResourceController implements InitializingBean { } @RequestMapping(method = RequestMethod.GET, value = "/search") - public ResourceSupport listSearchMethods(@PathVariable String apiCategory, @PathVariable String model) { - ResourceSupport root = new ResourceSupport(); + public RepresentationModel listSearchMethods(@PathVariable String apiCategory, @PathVariable String model) { + RepresentationModel root = new RepresentationModel(); DSpaceRestRepository repository = utils.getResourceRepository(apiCategory, model); List searchMethods = repositoryUtils.listSearchMethods(repository); @@ -961,14 +968,14 @@ public class RestResourceController implements InitializingBean { @RequestMapping(method = RequestMethod.GET, value = "/search/{searchMethodName}") @SuppressWarnings("unchecked") - public ResourceSupport executeSearchMethods(@PathVariable String apiCategory, - @PathVariable String model, - @PathVariable String searchMethodName, - HttpServletResponse response, - Pageable pageable, Sort sort, - PagedResourcesAssembler assembler, - @RequestParam MultiValueMap parameters) + public RepresentationModel executeSearchMethods( + @PathVariable String apiCategory, + @PathVariable String model, + @PathVariable String searchMethodName, + HttpServletResponse response, + Pageable pageable, Sort sort, + PagedResourcesAssembler assembler, + @RequestParam MultiValueMap parameters) throws IllegalAccessException, IllegalArgumentException, InvocationTargetException { Link link = linkTo(this.getClass(), apiCategory, model).slash("search").slash(searchMethodName).withSelfRel(); @@ -990,7 +997,7 @@ public class RestResourceController implements InitializingBean { .executeQueryMethod(repository, parameters, searchMethod, pageable, sort, assembler); returnPage = searchMethod.getReturnType().isAssignableFrom(Page.class); - ResourceSupport result = null; + RepresentationModel result = null; if (returnPage) { Page> resources; if (searchResult == null) { @@ -998,7 +1005,7 @@ public class RestResourceController implements InitializingBean { } else { resources = ((Page) searchResult).map(converter::toResource); } - result = assembler.toResource(resources, link); + result = assembler.toModel(resources, link); } else { if (searchResult == null) { response.setStatus(HttpServletResponse.SC_NO_CONTENT); @@ -1009,30 +1016,16 @@ public class RestResourceController implements InitializingBean { return result; } - /** - * Sets the location header pointing to the resource representing the given instance. Will make sure we properly - * expand the URI template potentially created as self link. - * - * @param headers must not be {@literal null}. - * @param assembler must not be {@literal null}. - * @param source must not be {@literal null}. - */ - private void addLocationHeader(HttpHeaders headers, PersistentEntityResourceAssembler assembler, Object source) { - - String selfLink = assembler.getSelfLinkFor(source).getHref(); - headers.setLocation(new UriTemplate(selfLink).expand()); - } - @RequestMapping(method = RequestMethod.DELETE, value = REGEX_REQUESTMAPPING_IDENTIFIER_AS_DIGIT) - public ResponseEntity delete(HttpServletRequest request, @PathVariable String apiCategory, - @PathVariable String model, @PathVariable Integer id) + public ResponseEntity> delete(HttpServletRequest request, @PathVariable String apiCategory, + @PathVariable String model, @PathVariable Integer id) throws HttpRequestMethodNotSupportedException { return deleteInternal(apiCategory, model, id); } @RequestMapping(method = RequestMethod.DELETE, value = REGEX_REQUESTMAPPING_IDENTIFIER_AS_UUID) - public ResponseEntity delete(HttpServletRequest request, @PathVariable String apiCategory, - @PathVariable String model, @PathVariable UUID uuid) + public ResponseEntity> delete(HttpServletRequest request, @PathVariable String apiCategory, + @PathVariable String model, @PathVariable UUID uuid) throws HttpRequestMethodNotSupportedException { return deleteInternal(apiCategory, model, uuid); } @@ -1045,8 +1038,9 @@ public class RestResourceController implements InitializingBean { * @param id * @return */ - private ResponseEntity deleteInternal(String apiCategory, String model, - ID id) { + private ResponseEntity> deleteInternal(String apiCategory, + String model, + ID id) { checkModelPluralForm(apiCategory, model); DSpaceRestRepository repository = utils.getResourceRepository(apiCategory, model); repository.deleteById(id); diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/ScriptProcessesController.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/ScriptProcessesController.java index 43788dfd1f..5cc956c5b1 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/ScriptProcessesController.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/ScriptProcessesController.java @@ -7,6 +7,8 @@ */ package org.dspace.app.rest; +import java.util.List; + import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.dspace.app.rest.converter.ConverterService; @@ -14,9 +16,12 @@ import org.dspace.app.rest.model.ProcessRest; import org.dspace.app.rest.model.ScriptRest; import org.dspace.app.rest.model.hateoas.ProcessResource; import org.dspace.app.rest.repository.ScriptRestRepository; +import org.dspace.app.rest.utils.ContextUtil; +import org.dspace.core.Context; +import org.dspace.services.RequestService; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.data.rest.webmvc.ControllerUtils; -import org.springframework.hateoas.ResourceSupport; +import org.springframework.hateoas.RepresentationModel; import org.springframework.http.HttpHeaders; import org.springframework.http.HttpStatus; import org.springframework.http.ResponseEntity; @@ -24,7 +29,9 @@ import org.springframework.security.access.prepost.PreAuthorize; import org.springframework.web.bind.annotation.PathVariable; import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RequestMethod; +import org.springframework.web.bind.annotation.RequestParam; import org.springframework.web.bind.annotation.RestController; +import org.springframework.web.multipart.MultipartFile; /** * This controller adds additional subresource methods to allow connecting scripts with processes @@ -41,6 +48,9 @@ public class ScriptProcessesController { @Autowired private ScriptRestRepository scriptRestRepository; + @Autowired + private RequestService requestService; + /** * This method can be called by sending a POST request to the system/scripts/{name}/processes endpoint * This will start a process for the script that matches the given name @@ -50,13 +60,16 @@ public class ScriptProcessesController { */ @RequestMapping(method = RequestMethod.POST) @PreAuthorize("hasAuthority('ADMIN')") - public ResponseEntity startProcess(@PathVariable(name = "name") String scriptName) + public ResponseEntity> startProcess(@PathVariable(name = "name") String scriptName, + @RequestParam(name = "file") List files) throws Exception { if (log.isTraceEnabled()) { log.trace("Starting Process for Script with name: " + scriptName); } - ProcessRest processRest = scriptRestRepository.startProcess(scriptName); + Context context = ContextUtil.obtainContext(requestService.getCurrentRequest().getServletRequest()); + ProcessRest processRest = scriptRestRepository.startProcess(context, scriptName, files); ProcessResource processResource = converter.toResource(processRest); + context.complete(); return ControllerUtils.toResponseEntity(HttpStatus.ACCEPTED, new HttpHeaders(), processResource); } diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/ShibbolethRestController.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/ShibbolethRestController.java index 7355bab2a8..159170f8b2 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/ShibbolethRestController.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/ShibbolethRestController.java @@ -8,10 +8,13 @@ package org.dspace.app.rest; import java.io.IOException; +import java.util.ArrayList; import java.util.Arrays; import javax.servlet.http.HttpServletResponse; +import org.apache.commons.lang3.StringUtils; import org.dspace.app.rest.model.AuthnRest; +import org.dspace.core.Utils; import org.dspace.services.ConfigurationService; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -47,14 +50,35 @@ public class ShibbolethRestController implements InitializingBean { .register(this, Arrays.asList(new Link("/api/" + AuthnRest.CATEGORY, "shibboleth"))); } + // LGTM.com thinks this method has an unvalidated URL redirect (https://lgtm.com/rules/4840088/) in `redirectUrl`, + // even though we are clearly validating the hostname of `redirectUrl` and test it in ShibbolethRestControllerIT + @SuppressWarnings("lgtm[java/unvalidated-url-redirection]") @RequestMapping(method = RequestMethod.GET) public void shibboleth(HttpServletResponse response, @RequestParam(name = "redirectUrl", required = false) String redirectUrl) throws IOException { if (redirectUrl == null) { redirectUrl = configurationService.getProperty("dspace.ui.url"); } - log.info("Redirecting to " + redirectUrl); - response.sendRedirect(redirectUrl); + + // Validate that the redirectURL matches either the server or UI hostname. It *cannot* be an arbitrary URL. + String redirectHostName = Utils.getHostName(redirectUrl); + String serverHostName = Utils.getHostName(configurationService.getProperty("dspace.server.url")); + ArrayList allowedHostNames = new ArrayList(); + allowedHostNames.add(serverHostName); + String[] allowedUrls = configurationService.getArrayProperty("rest.cors.allowed-origins"); + for (String url : allowedUrls) { + allowedHostNames.add(Utils.getHostName(url)); + } + + if (StringUtils.equalsAnyIgnoreCase(redirectHostName, allowedHostNames.toArray(new String[0]))) { + log.debug("Shibboleth redirecting to " + redirectUrl); + response.sendRedirect(redirectUrl); + } else { + log.error("Invalid Shibboleth redirectURL=" + redirectUrl + + ". URL doesn't match hostname of server or UI!"); + response.sendError(HttpServletResponse.SC_BAD_REQUEST, + "Invalid redirectURL! Must match server or ui hostname."); + } } } diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/SitemapRestController.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/SitemapRestController.java new file mode 100644 index 0000000000..4eef1ba34b --- /dev/null +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/SitemapRestController.java @@ -0,0 +1,148 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.rest; + +import java.io.File; +import java.io.FileInputStream; +import java.io.IOException; +import java.io.InputStream; +import java.nio.file.Files; +import java.sql.SQLException; +import javax.servlet.http.HttpServletRequest; +import javax.servlet.http.HttpServletResponse; + +import org.apache.catalina.connector.ClientAbortException; +import org.apache.logging.log4j.Logger; +import org.dspace.app.rest.utils.ContextUtil; +import org.dspace.app.rest.utils.MultipartFileSender; +import org.dspace.core.Context; +import org.dspace.services.ConfigurationService; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.data.rest.webmvc.ResourceNotFoundException; +import org.springframework.stereotype.Controller; +import org.springframework.web.bind.annotation.GetMapping; +import org.springframework.web.bind.annotation.PathVariable; +import org.springframework.web.bind.annotation.RequestMapping; + +/** + * This is a specialized controller to provide access to the sitemap files, generated by + * {@link org.dspace.app.sitemap.GenerateSitemaps} + * + * The mapping for requested endpoint try to resolve a valid sitemap file name, for example + *
    + * {@code
    + * https:///sitemaps/26453b4d-e513-44e8-8d5b-395f62972eff/sitemap0.html
    + * }
    + * 
    + * + * @author Maria Verdonck (Atmire) on 08/07/2020 + */ +@Controller +@RequestMapping("/${sitemap.path:sitemaps}") +public class SitemapRestController { + + private static final Logger log = org.apache.logging.log4j.LogManager.getLogger(SitemapRestController.class); + + @Autowired + ConfigurationService configurationService; + + // Most file systems are configured to use block sizes of 4096 or 8192 and our buffer should be a multiple of that. + private static final int BUFFER_SIZE = 4096 * 10; + + /** + * Tries to retrieve a matching sitemap file in configured location + * + * @param name the name of the requested sitemap file + * @param response the HTTP response + * @param request the HTTP request + * @throws SQLException if db error while completing DSpace context + * @throws IOException if IO error surrounding sitemap file + */ + @GetMapping("/{name}") + public void retrieve(@PathVariable String name, HttpServletResponse response, + HttpServletRequest request) throws IOException, SQLException { + // Find sitemap with given name in dspace/sitemaps + File foundSitemapFile = null; + File sitemapOutputDir = new File(configurationService.getProperty("sitemap.dir")); + if (sitemapOutputDir.exists() && sitemapOutputDir.isDirectory()) { + // List of all files and directories inside sitemapOutputDir + File sitemapFilesList[] = sitemapOutputDir.listFiles(); + for (File sitemapFile : sitemapFilesList) { + if (name.equalsIgnoreCase(sitemapFile.getName())) { + if (sitemapFile.isFile()) { + foundSitemapFile = sitemapFile; + } else { + throw new ResourceNotFoundException( + "Directory with name " + name + " in " + sitemapOutputDir.getAbsolutePath() + + " found, but no file."); + } + } + } + } else { + throw new ResourceNotFoundException( + "Sitemap directory in " + sitemapOutputDir.getAbsolutePath() + " does not " + + "exist, either sitemaps have not been generated (./dspace generate-sitemaps)," + + " or are located elsewhere (config used: sitemap.dir)."); + } + if (foundSitemapFile == null) { + throw new ResourceNotFoundException( + "Could not find sitemap file with name " + name + " in " + sitemapOutputDir.getAbsolutePath()); + } else { + // return found sitemap file + this.returnSitemapFile(foundSitemapFile, response, request); + } + } + + /** + * Sends back the matching sitemap file as a MultipartFile, with the headers set with details of the file + * (content, size, name, last modified) + * + * @param foundSitemapFile the found sitemap file, with matching name as in request path + * @param response the HTTP response + * @param request the HTTP request + * @throws SQLException if db error while completing DSpace context + * @throws IOException if IO error surrounding sitemap file + */ + private void returnSitemapFile(File foundSitemapFile, HttpServletResponse response, HttpServletRequest request) + throws SQLException, IOException { + // Pipe the bits + try (InputStream is = new FileInputStream(foundSitemapFile)) { + MultipartFileSender sender = MultipartFileSender + .fromInputStream(is) + .withBufferSize(BUFFER_SIZE) + .withFileName(foundSitemapFile.getName()) + .withLength(foundSitemapFile.length()) + .withMimetype(Files.probeContentType(foundSitemapFile.toPath())) + .with(request) + .with(response); + + sender.withLastModified(foundSitemapFile.lastModified()); + + // Determine if we need to send the file as a download or if the browser can open it inline + long dispositionThreshold = configurationService.getLongProperty("webui.content_disposition_threshold"); + if (dispositionThreshold >= 0 && foundSitemapFile.length() > dispositionThreshold) { + sender.withDisposition(MultipartFileSender.CONTENT_DISPOSITION_ATTACHMENT); + } + + Context context = ContextUtil.obtainContext(request); + + // We have all the data we need, close the connection to the database so that it doesn't stay open during + // download/streaming + context.complete(); + + // Send the data + if (sender.isValid()) { + sender.serveResource(); + } + + } catch (ClientAbortException e) { + log.debug("Client aborted the request before the download was completed. " + + "Client is probably switching to a Range request.", e); + } + } +} diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/StatisticsRestController.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/StatisticsRestController.java index 9422ef32ae..77cae6f596 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/StatisticsRestController.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/StatisticsRestController.java @@ -12,7 +12,6 @@ import java.util.UUID; import org.dspace.app.rest.converter.ConverterService; import org.dspace.app.rest.exception.RepositoryMethodNotImplementedException; -import org.dspace.app.rest.link.HalLinkService; import org.dspace.app.rest.model.RestAddressableModel; import org.dspace.app.rest.model.StatisticsSupportRest; import org.dspace.app.rest.model.hateoas.SearchEventResource; @@ -26,8 +25,8 @@ import org.springframework.beans.factory.InitializingBean; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.data.rest.webmvc.ControllerUtils; import org.springframework.hateoas.Link; -import org.springframework.hateoas.PagedResources; -import org.springframework.hateoas.ResourceSupport; +import org.springframework.hateoas.PagedModel; +import org.springframework.hateoas.RepresentationModel; import org.springframework.http.HttpHeaders; import org.springframework.http.HttpStatus; import org.springframework.http.ResponseEntity; @@ -46,9 +45,6 @@ public class StatisticsRestController implements InitializingBean { @Autowired private DiscoverableEndpointsService discoverableEndpointsService; - @Autowired - private HalLinkService halLinkService; - @Autowired private ConverterService converter; @@ -75,33 +71,33 @@ public class StatisticsRestController implements InitializingBean { } @RequestMapping(method = RequestMethod.GET, value = "/viewevents/{uuid}") - public PagedResources getViewEvent(@PathVariable(name = "uuid") UUID uuid) throws Exception { + public PagedModel getViewEvent(@PathVariable(name = "uuid") UUID uuid) throws Exception { throw new RepositoryMethodNotImplementedException("No implementation found; Method not allowed!", ""); } @RequestMapping(method = RequestMethod.GET, value = "/searchevents/{uuid}") - public PagedResources getSearchEvent(@PathVariable(name = "uuid") UUID uuid) throws Exception { + public PagedModel getSearchEvent(@PathVariable(name = "uuid") UUID uuid) throws Exception { throw new RepositoryMethodNotImplementedException("No implementation found; Method not allowed!", ""); } @RequestMapping(method = RequestMethod.GET, value = "/viewevents") - public PagedResources getViewEvents() throws Exception { + public PagedModel getViewEvents() throws Exception { throw new RepositoryMethodNotImplementedException("No implementation found; Method not allowed!", ""); } @RequestMapping(method = RequestMethod.GET, value = "/searchevents") - public PagedResources getSearchEvents() throws Exception { + public PagedModel getSearchEvents() throws Exception { throw new RepositoryMethodNotImplementedException("No implementation found; Method not allowed!", ""); } @RequestMapping(method = RequestMethod.POST, value = "/viewevents") - public ResponseEntity postViewEvent() throws Exception { + public ResponseEntity> postViewEvent() throws Exception { ViewEventResource result = converter.toResource(viewEventRestRepository.createViewEvent()); return ControllerUtils.toResponseEntity(HttpStatus.CREATED, new HttpHeaders(), result); } @RequestMapping(method = RequestMethod.POST, value = "/searchevents") - public ResponseEntity postSearchEvent() throws Exception { + public ResponseEntity> postSearchEvent() throws Exception { SearchEventResource result = converter.toResource(searchEventRestRepository.createSearchEvent()); return ControllerUtils.toResponseEntity(HttpStatus.CREATED, new HttpHeaders(), result); } diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/SubmissionCCLicenseUrlRepository.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/SubmissionCCLicenseUrlRepository.java new file mode 100644 index 0000000000..957484319c --- /dev/null +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/SubmissionCCLicenseUrlRepository.java @@ -0,0 +1,140 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.rest; + +import java.util.Arrays; +import java.util.HashMap; +import java.util.Map; +import javax.servlet.ServletRequest; + +import org.apache.commons.lang3.StringUtils; +import org.dspace.app.rest.converter.ConverterService; +import org.dspace.app.rest.exception.DSpaceBadRequestException; +import org.dspace.app.rest.exception.RepositoryMethodNotImplementedException; +import org.dspace.app.rest.model.SubmissionCCLicenseUrlRest; +import org.dspace.app.rest.model.wrapper.SubmissionCCLicenseUrl; +import org.dspace.app.rest.repository.DSpaceRestRepository; +import org.dspace.app.rest.utils.Utils; +import org.dspace.core.Context; +import org.dspace.license.service.CreativeCommonsService; +import org.dspace.services.RequestService; +import org.dspace.utils.DSpace; +import org.springframework.beans.factory.InitializingBean; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.data.domain.Page; +import org.springframework.data.domain.Pageable; +import org.springframework.data.rest.webmvc.ResourceNotFoundException; +import org.springframework.hateoas.Link; +import org.springframework.security.access.prepost.PreAuthorize; +import org.springframework.stereotype.Component; + +/** + * This Repository is responsible for handling the CC License URIs. + * It only supports a search method + */ + +@Component(SubmissionCCLicenseUrlRest.CATEGORY + "." + SubmissionCCLicenseUrlRest.NAME) +public class SubmissionCCLicenseUrlRepository extends DSpaceRestRepository + implements InitializingBean { + + @Autowired + protected Utils utils; + + @Autowired + protected CreativeCommonsService creativeCommonsService; + + @Autowired + protected ConverterService converter; + + protected RequestService requestService = new DSpace().getRequestService(); + + @Autowired + DiscoverableEndpointsService discoverableEndpointsService; + + /** + * Retrieves the CC License URI based on the license ID and answers in the field questions, provided as parameters + * to this request + * + * @return the CC License URI as a SubmissionCCLicenseUrlRest + */ + @PreAuthorize("hasAuthority('AUTHENTICATED')") + @SearchRestMethod(name = "rightsByQuestions") + public SubmissionCCLicenseUrlRest findByRightsByQuestions() { + ServletRequest servletRequest = requestService.getCurrentRequest() + .getServletRequest(); + Map requestParameterMap = servletRequest + .getParameterMap(); + Map parameterMap = new HashMap<>(); + String licenseId = servletRequest.getParameter("license"); + if (StringUtils.isBlank(licenseId)) { + throw new DSpaceBadRequestException( + "A \"license\" parameter needs to be provided."); + } + + // Loop through parameters to find answer parameters, adding them to the parameterMap. Zero or more answers + // may exist, as some CC licenses do not require answers + for (String parameter : requestParameterMap.keySet()) { + if (StringUtils.startsWith(parameter, "answer_")) { + String field = StringUtils.substringAfter(parameter, "answer_"); + String answer = ""; + if (requestParameterMap.get(parameter).length > 0) { + answer = requestParameterMap.get(parameter)[0]; + } + parameterMap.put(field, answer); + } + } + + Map fullParamMap = creativeCommonsService.retrieveFullAnswerMap(licenseId, parameterMap); + if (fullParamMap == null) { + throw new ResourceNotFoundException("No CC License could be matched on the provided ID: " + licenseId); + } + boolean licenseContainsCorrectInfo = creativeCommonsService.verifyLicenseInformation(licenseId, fullParamMap); + if (!licenseContainsCorrectInfo) { + throw new DSpaceBadRequestException( + "The provided answers do not match the required fields for the provided license."); + } + + String licenseUri = creativeCommonsService.retrieveLicenseUri(licenseId, fullParamMap); + + SubmissionCCLicenseUrl submissionCCLicenseUrl = new SubmissionCCLicenseUrl(licenseUri, licenseUri); + if (StringUtils.isBlank(licenseUri)) { + throw new ResourceNotFoundException("No CC License URI could be found for ID: " + licenseId); + } + + return converter.toRest(submissionCCLicenseUrl, utils.obtainProjection()); + + } + + /** + * The findOne method is not supported in this repository + */ + @PreAuthorize("permitAll()") + public SubmissionCCLicenseUrlRest findOne(final Context context, final String s) { + throw new RepositoryMethodNotImplementedException(SubmissionCCLicenseUrlRest.NAME, "findOne"); + } + + /** + * The findAll method is not supported in this repository + */ + public Page findAll(final Context context, final Pageable pageable) { + throw new RepositoryMethodNotImplementedException(SubmissionCCLicenseUrlRest.NAME, "findAll"); + } + + public Class getDomainClass() { + return SubmissionCCLicenseUrlRest.class; + } + + @Override + public void afterPropertiesSet() { + discoverableEndpointsService.register(this, Arrays.asList( + new Link("/api/" + SubmissionCCLicenseUrlRest.CATEGORY + "/" + + SubmissionCCLicenseUrlRest.NAME + "/search", + SubmissionCCLicenseUrlRest.NAME + "-search"))); + } + +} diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/UUIDLookupRestController.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/UUIDLookupRestController.java index b20848b3e4..17ce7d4c54 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/UUIDLookupRestController.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/UUIDLookupRestController.java @@ -2,12 +2,12 @@ * The contents of this file are subject to the license and copyright * detailed in the LICENSE and NOTICE files at the root of the source * tree and available online at - * + * * http://www.dspace.org/license/ */ package org.dspace.app.rest; -import static org.springframework.hateoas.mvc.ControllerLinkBuilder.linkTo; +import static org.springframework.hateoas.server.mvc.WebMvcLinkBuilder.linkTo; import java.io.IOException; import java.net.URI; diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/WorkflowDefinitionCollectionsLinkRepository.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/WorkflowDefinitionCollectionsLinkRepository.java index 9496e32738..fd1192e0bb 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/WorkflowDefinitionCollectionsLinkRepository.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/WorkflowDefinitionCollectionsLinkRepository.java @@ -12,13 +12,11 @@ import java.util.List; import javax.annotation.Nullable; import javax.servlet.http.HttpServletRequest; -import org.dspace.app.rest.converter.ConverterService; import org.dspace.app.rest.model.CollectionRest; import org.dspace.app.rest.model.WorkflowDefinitionRest; import org.dspace.app.rest.projection.Projection; import org.dspace.app.rest.repository.AbstractDSpaceRestRepository; import org.dspace.app.rest.repository.LinkRestRepository; -import org.dspace.app.rest.utils.Utils; import org.dspace.content.Collection; import org.dspace.core.Context; import org.dspace.xmlworkflow.factory.XmlWorkflowFactory; @@ -43,12 +41,6 @@ public class WorkflowDefinitionCollectionsLinkRepository extends AbstractDSpaceR @Autowired protected XmlWorkflowFactory xmlWorkflowFactory; - @Autowired - protected ConverterService converter; - - @Autowired - protected Utils utils; - /** * GET endpoint that returns the list of collections that make an explicit use of the workflow-definition. * If a collection doesn't specify the workflow-definition to be used, the default mapping applies, @@ -69,10 +61,10 @@ public class WorkflowDefinitionCollectionsLinkRepository extends AbstractDSpaceR if (xmlWorkflowFactory.isDefaultWorkflow(workflowName)) { collectionsMappedToWorkflow.addAll(xmlWorkflowFactory.getAllNonMappedCollectionsHandles(context)); } - collectionsMappedToWorkflow.addAll(xmlWorkflowFactory.getCollectionHandlesMappedToWorklow(context, + collectionsMappedToWorkflow.addAll(xmlWorkflowFactory.getCollectionHandlesMappedToWorkflow(context, workflowName)); - Pageable pageable = optionalPageable != null ? optionalPageable : new PageRequest(0, 20); - return converter.toRestPage(utils.getPage(collectionsMappedToWorkflow, pageable), + Pageable pageable = optionalPageable != null ? optionalPageable : PageRequest.of(0, 20); + return super.converter.toRestPage(collectionsMappedToWorkflow, pageable, projection); } else { throw new ResourceNotFoundException("No workflow with name " + workflowName + " is configured"); diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/WorkflowDefinitionStepsLinkRepository.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/WorkflowDefinitionStepsLinkRepository.java index fe05a4c1d0..24c82ee460 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/WorkflowDefinitionStepsLinkRepository.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/WorkflowDefinitionStepsLinkRepository.java @@ -54,8 +54,8 @@ public class WorkflowDefinitionStepsLinkRepository extends AbstractDSpaceRestRep Projection projection) { try { List steps = xmlWorkflowFactory.getWorkflowByName(workflowName).getSteps(); - Pageable pageable = optionalPageable != null ? optionalPageable : new PageRequest(0, 20); - return converter.toRestPage(utils.getPage(steps, pageable), projection); + Pageable pageable = optionalPageable != null ? optionalPageable : PageRequest.of(0, 20); + return converter.toRestPage(steps, pageable, projection); } catch (WorkflowConfigurationException e) { throw new ResourceNotFoundException("No workflow with name " + workflowName + " is configured"); } diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/WorkflowStepActionsLinkRepository.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/WorkflowStepActionsLinkRepository.java index b11dd929d5..f2b6a423f8 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/WorkflowStepActionsLinkRepository.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/WorkflowStepActionsLinkRepository.java @@ -51,7 +51,7 @@ public class WorkflowStepActionsLinkRepository extends AbstractDSpaceRestReposit @Nullable Pageable optionalPageable, Projection projection) { List actions = xmlWorkflowFactory.getStepByName(workflowStepName).getActions(); - Pageable pageable = optionalPageable != null ? optionalPageable : new PageRequest(0, 20); - return converter.toRestPage(utils.getPage(actions, pageable), projection); + Pageable pageable = optionalPageable != null ? optionalPageable : PageRequest.of(0, 20); + return converter.toRestPage(actions, pageable, projection); } } diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/authorization/AuthorizeServiceRestUtil.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/authorization/AuthorizeServiceRestUtil.java new file mode 100644 index 0000000000..6b34479ec0 --- /dev/null +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/authorization/AuthorizeServiceRestUtil.java @@ -0,0 +1,72 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.rest.authorization; + +import java.sql.SQLException; + +import org.dspace.app.rest.model.BaseObjectRest; +import org.dspace.app.rest.security.DSpaceRestPermission; +import org.dspace.app.rest.utils.Utils; +import org.dspace.authorize.service.AuthorizeService; +import org.dspace.content.DSpaceObject; +import org.dspace.content.Item; +import org.dspace.content.factory.ContentServiceFactory; +import org.dspace.content.service.DSpaceObjectService; +import org.dspace.core.Context; +import org.dspace.eperson.EPerson; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.stereotype.Component; + +/** + * This class is a wrapper around the AuthorizationService which takes Rest objects instead of dspace objects + */ +@Component +public class AuthorizeServiceRestUtil { + @Autowired + private AuthorizeService authorizeService; + @Autowired + private Utils utils; + @Autowired + private ContentServiceFactory contentServiceFactory; + + /** + * Checks that the specified eperson can perform the given action on the rest given object. + * + * @param context DSpace context + * @param object The Rest object to test the action against + * @param dSpaceRestPermission The permission to check + * @return A boolean indicating if the action is allowed by the logged in ePerson on the given object + * @throws SQLException + */ + public boolean authorizeActionBoolean(Context context, BaseObjectRest object, + DSpaceRestPermission dSpaceRestPermission) + throws SQLException { + + DSpaceObject dSpaceObject = (DSpaceObject)utils.getDSpaceAPIObjectFromRest(context, object); + if (dSpaceObject == null) { + return false; + } + + DSpaceObjectService dSpaceObjectService = + contentServiceFactory.getDSpaceObjectService(dSpaceObject.getType()); + + EPerson ePerson = context.getCurrentUser(); + + // If the item is still inprogress we can process here only the READ permission. + // Other actions need to be evaluated against the wrapper object (workspace or workflow item) + if (dSpaceObject instanceof Item) { + if (!DSpaceRestPermission.READ.equals(dSpaceRestPermission) + && !((Item) dSpaceObject).isArchived() && !((Item) dSpaceObject).isWithdrawn()) { + return false; + } + } + + return authorizeService.authorizeActionBoolean(context, ePerson, dSpaceObject, + dSpaceRestPermission.getDspaceApiActionId(), true); + } +} diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/authorization/impl/AdministratorOfFeature.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/authorization/impl/AdministratorOfFeature.java new file mode 100644 index 0000000000..6cfee12751 --- /dev/null +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/authorization/impl/AdministratorOfFeature.java @@ -0,0 +1,75 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.rest.authorization.impl; + +import java.sql.SQLException; + +import org.dspace.app.rest.authorization.AuthorizationFeature; +import org.dspace.app.rest.authorization.AuthorizationFeatureDocumentation; +import org.dspace.app.rest.model.BaseObjectRest; +import org.dspace.app.rest.model.CollectionRest; +import org.dspace.app.rest.model.CommunityRest; +import org.dspace.app.rest.model.ItemRest; +import org.dspace.app.rest.model.SiteRest; +import org.dspace.app.rest.utils.Utils; +import org.dspace.authorize.service.AuthorizeService; +import org.dspace.content.Collection; +import org.dspace.content.Community; +import org.dspace.content.Item; +import org.dspace.core.Context; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.stereotype.Component; + +/** + * The administrator feature. It can be used for verify that an user has access + * to the administrative features of the repository or of a specific community and collection. + * + * @author Mykhaylo Boychuk (mykhaylo.boychuk at 4science.it) + */ +@Component +@AuthorizationFeatureDocumentation(name = AdministratorOfFeature.NAME, + description = "It can be used for verify that an user has access " + + "to the administrative features of the repository or of a specific community and collection") +public class AdministratorOfFeature implements AuthorizationFeature { + + public static final String NAME = "administratorOf"; + + @Autowired + AuthorizeService authService; + @Autowired + private Utils utils; + + @Override + public boolean isAuthorized(Context context, BaseObjectRest object) throws SQLException { + if (object != null) { + if (object instanceof CommunityRest) { + Community community = (Community) utils.getDSpaceAPIObjectFromRest(context, object); + return authService.isAdmin(context, community); + } + if (object instanceof CollectionRest) { + Collection collection = (Collection) utils.getDSpaceAPIObjectFromRest(context, object); + return authService.isAdmin(context, collection); + } + if (object instanceof ItemRest) { + Item item = (Item) utils.getDSpaceAPIObjectFromRest(context, object); + return authService.isAdmin(context, item); + } + } + return authService.isAdmin(context); + } + + @Override + public String[] getSupportedTypes() { + return new String[]{ + SiteRest.CATEGORY + "." + SiteRest.NAME, + CommunityRest.CATEGORY + "." + CommunityRest.NAME, + CollectionRest.CATEGORY + "." + CollectionRest.NAME, + ItemRest.CATEGORY + "." + ItemRest.NAME + }; + } +} \ No newline at end of file diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/authorization/impl/CreateBitstreamFeature.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/authorization/impl/CreateBitstreamFeature.java new file mode 100644 index 0000000000..7c26383ca9 --- /dev/null +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/authorization/impl/CreateBitstreamFeature.java @@ -0,0 +1,89 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.rest.authorization.impl; + +import java.sql.SQLException; + +import org.apache.log4j.Logger; +import org.dspace.app.rest.authorization.AuthorizationFeature; +import org.dspace.app.rest.authorization.AuthorizationFeatureDocumentation; +import org.dspace.app.rest.authorization.AuthorizeServiceRestUtil; +import org.dspace.app.rest.model.BaseObjectRest; +import org.dspace.app.rest.model.BundleRest; +import org.dspace.app.rest.security.DSpaceRestPermission; +import org.dspace.app.rest.utils.Utils; +import org.dspace.authorize.service.AuthorizeService; +import org.dspace.content.Bundle; +import org.dspace.content.DSpaceObject; +import org.dspace.content.Item; +import org.dspace.content.service.BundleService; +import org.dspace.core.Constants; +import org.dspace.core.Context; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.stereotype.Component; + +/** + * The create bitstream feature. It can be used to verify if bitstreams can be created in a specific bundle. + * + * Authorization is granted if the current user has ADD & WRITE permissions on the given bundle AND the item + */ +@Component +@AuthorizationFeatureDocumentation(name = CreateBitstreamFeature.NAME, + description = "It can be used to verify if bitstreams can be created in a specific bundle") +public class CreateBitstreamFeature implements AuthorizationFeature { + + Logger log = Logger.getLogger(CreateBitstreamFeature.class); + + public final static String NAME = "canCreateBitstream"; + + @Autowired + private AuthorizeServiceRestUtil authorizeServiceRestUtil; + @Autowired + private BundleService bundleService; + @Autowired + private Utils utils; + @Autowired + private AuthorizeService authorizeService; + + @Override + public boolean isAuthorized(Context context, BaseObjectRest object) throws SQLException { + if (object instanceof BundleRest) { + if (!authorizeServiceRestUtil.authorizeActionBoolean(context, object, DSpaceRestPermission.WRITE)) { + return false; + } + if (!authorizeServiceRestUtil.authorizeActionBoolean(context, object, DSpaceRestPermission.ADD)) { + return false; + } + + DSpaceObject owningObject = bundleService.getParentObject(context, + (Bundle)utils.getDSpaceAPIObjectFromRest(context, object)); + + // Safety check. In case this is ever not true, this method should be revised. + if (!(owningObject instanceof Item)) { + log.error("The parent object of bundle " + object.getType() + " is not an item"); + return false; + } + + if (!authorizeService.authorizeActionBoolean(context, context.getCurrentUser(), owningObject, + Constants.WRITE, true)) { + return false; + } + + return authorizeService.authorizeActionBoolean(context, context.getCurrentUser(), owningObject, + Constants.ADD, true); + } + return false; + } + + @Override + public String[] getSupportedTypes() { + return new String[]{ + BundleRest.CATEGORY + "." + BundleRest.NAME + }; + } +} diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/authorization/impl/CreateBundleFeature.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/authorization/impl/CreateBundleFeature.java new file mode 100644 index 0000000000..0dc97446aa --- /dev/null +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/authorization/impl/CreateBundleFeature.java @@ -0,0 +1,54 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.rest.authorization.impl; + +import java.sql.SQLException; + +import org.dspace.app.rest.authorization.AuthorizationFeature; +import org.dspace.app.rest.authorization.AuthorizationFeatureDocumentation; +import org.dspace.app.rest.authorization.AuthorizeServiceRestUtil; +import org.dspace.app.rest.model.BaseObjectRest; +import org.dspace.app.rest.model.ItemRest; +import org.dspace.app.rest.security.DSpaceRestPermission; +import org.dspace.core.Context; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.stereotype.Component; + +/** + * The create bundle feature. It can be used to verify if bundles can be created in a specific item. + * + * Authorization is granted if the current user has ADD & WRITE permissions on the given item + */ +@Component +@AuthorizationFeatureDocumentation(name = CreateBundleFeature.NAME, + description = "It can be used to verify if bundles can be created in a specific item") +public class CreateBundleFeature implements AuthorizationFeature { + + public final static String NAME = "canCreateBundle"; + + @Autowired + private AuthorizeServiceRestUtil authorizeServiceRestUtil; + + @Override + public boolean isAuthorized(Context context, BaseObjectRest object) throws SQLException { + if (object instanceof ItemRest) { + if (!authorizeServiceRestUtil.authorizeActionBoolean(context, object, DSpaceRestPermission.WRITE)) { + return false; + } + return authorizeServiceRestUtil.authorizeActionBoolean(context, object, DSpaceRestPermission.ADD); + } + return false; + } + + @Override + public String[] getSupportedTypes() { + return new String[]{ + ItemRest.CATEGORY + "." + ItemRest.NAME + }; + } +} diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/authorization/impl/CreateCollectionFeature.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/authorization/impl/CreateCollectionFeature.java new file mode 100644 index 0000000000..57b92aa425 --- /dev/null +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/authorization/impl/CreateCollectionFeature.java @@ -0,0 +1,59 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.rest.authorization.impl; + +import java.sql.SQLException; + +import org.dspace.app.rest.authorization.AuthorizationFeature; +import org.dspace.app.rest.authorization.AuthorizationFeatureDocumentation; +import org.dspace.app.rest.model.BaseObjectRest; +import org.dspace.app.rest.model.CommunityRest; +import org.dspace.app.rest.utils.Utils; +import org.dspace.authorize.service.AuthorizeService; +import org.dspace.content.Community; +import org.dspace.core.Constants; +import org.dspace.core.Context; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.stereotype.Component; + +/** + * The canCreateCollections feature. + * It can be used to verify if a user has access to create a new collection within a specific community. + */ +@Component +@AuthorizationFeatureDocumentation(name = CreateCollectionFeature.NAME, + description = "It can be used to verify if a user has access to create a new collection within a specific " + + "community") +public class CreateCollectionFeature implements AuthorizationFeature { + + public static final String NAME = "canCreateCollections"; + + @Autowired + AuthorizeService authService; + + @Autowired + Utils utils; + + @Override + public boolean isAuthorized(Context context, BaseObjectRest object) throws SQLException { + if (object != null) { + if (object instanceof CommunityRest) { + Community community = (Community) utils.getDSpaceAPIObjectFromRest(context, object); + return authService.authorizeActionBoolean(context, community, Constants.ADD); + } + } + return false; + } + + @Override + public String[] getSupportedTypes() { + return new String[]{ + CommunityRest.CATEGORY + "." + CommunityRest.NAME + }; + } +} diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/authorization/impl/CreateCommunityFeature.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/authorization/impl/CreateCommunityFeature.java new file mode 100644 index 0000000000..aefc2b0a42 --- /dev/null +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/authorization/impl/CreateCommunityFeature.java @@ -0,0 +1,66 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.rest.authorization.impl; + +import java.sql.SQLException; + +import org.dspace.app.rest.authorization.AuthorizationFeature; +import org.dspace.app.rest.authorization.AuthorizationFeatureDocumentation; +import org.dspace.app.rest.model.BaseObjectRest; +import org.dspace.app.rest.model.CommunityRest; +import org.dspace.app.rest.model.SiteRest; +import org.dspace.app.rest.utils.Utils; +import org.dspace.authorize.service.AuthorizeService; +import org.dspace.content.Community; +import org.dspace.content.Site; +import org.dspace.core.Constants; +import org.dspace.core.Context; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.stereotype.Component; + +/** + * The canCreateCommunities feature. + * It can be used to verify if a user has access to create a new community within a specific parent community or site. + */ +@Component +@AuthorizationFeatureDocumentation(name = CreateCommunityFeature.NAME, + description = "It can be used to verify if a user has access to create a new community within a specific parent" + + " community or site") +public class CreateCommunityFeature implements AuthorizationFeature { + + public static final String NAME = "canCreateCommunities"; + + @Autowired + AuthorizeService authService; + + @Autowired + Utils utils; + + @Override + public boolean isAuthorized(Context context, BaseObjectRest object) throws SQLException { + if (object != null) { + if (object instanceof CommunityRest) { + Community community = (Community) utils.getDSpaceAPIObjectFromRest(context, object); + return authService.authorizeActionBoolean(context, community, Constants.ADD); + } + if (object instanceof SiteRest) { + Site site = (Site) utils.getDSpaceAPIObjectFromRest(context, object); + return authService.authorizeActionBoolean(context, site, Constants.ADD); + } + } + return false; + } + + @Override + public String[] getSupportedTypes() { + return new String[]{ + SiteRest.CATEGORY + "." + SiteRest.NAME, + CommunityRest.CATEGORY + "." + CommunityRest.NAME + }; + } +} diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/authorization/impl/DeleteFeature.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/authorization/impl/DeleteFeature.java new file mode 100644 index 0000000000..02ca816290 --- /dev/null +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/authorization/impl/DeleteFeature.java @@ -0,0 +1,138 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.rest.authorization.impl; + +import java.sql.SQLException; + +import org.dspace.app.rest.authorization.AuthorizationFeature; +import org.dspace.app.rest.authorization.AuthorizationFeatureDocumentation; +import org.dspace.app.rest.authorization.AuthorizeServiceRestUtil; +import org.dspace.app.rest.model.BaseObjectRest; +import org.dspace.app.rest.model.BitstreamRest; +import org.dspace.app.rest.model.BundleRest; +import org.dspace.app.rest.model.CollectionRest; +import org.dspace.app.rest.model.CommunityRest; +import org.dspace.app.rest.model.EPersonRest; +import org.dspace.app.rest.model.GroupRest; +import org.dspace.app.rest.model.ItemRest; +import org.dspace.app.rest.model.WorkspaceItemRest; +import org.dspace.app.rest.security.DSpaceRestPermission; +import org.dspace.app.rest.utils.Utils; +import org.dspace.authorize.service.AuthorizeService; +import org.dspace.content.DSpaceObject; +import org.dspace.content.Item; +import org.dspace.content.WorkspaceItem; +import org.dspace.content.factory.ContentServiceFactory; +import org.dspace.core.Constants; +import org.dspace.core.Context; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.stereotype.Component; + +/** + * The delete feature. It can be used to verify if specific content can be deleted/expunged. + * + * Authorization is granted + * - for a bitstream if the current used has REMOVE permissions on both the Item and the Bundle + * - for a bundle if the current user has REMOVE permissions on the Item + * - for an item if the current user has REMOVE permissions on the collection AND and DELETE permissions on the item + * - for a collection if the current user has REMOVE permissions on the community + * - for a community with a parent community if the current user has REMOVE permissions on the parent community + * - for a community without a parent community if the current user has DELETE permissions on the current community + * - for other objects if the current user has REMOVE permissions on the parent object if there is one. Otherwise if the + * current user has DELETE permissions on the current object + */ +@Component +@AuthorizationFeatureDocumentation(name = DeleteFeature.NAME, + description = "It can be used to verify if specific content can be deleted/expunged") +public class DeleteFeature implements AuthorizationFeature { + + public final static String NAME = "canDelete"; + + @Autowired + private AuthorizeServiceRestUtil authorizeServiceRestUtil; + @Autowired + private Utils utils; + @Autowired + private AuthorizeService authorizeService; + @Autowired + private ContentServiceFactory contentServiceFactory; + + @Override + public boolean isAuthorized(Context context, BaseObjectRest object) throws SQLException { + if (object instanceof BaseObjectRest) { + if (object.getType().equals(WorkspaceItemRest.NAME)) { + object = ((WorkspaceItemRest)object).getItem(); + } + + DSpaceObject dSpaceObject = (DSpaceObject) utils.getDSpaceAPIObjectFromRest(context, object); + DSpaceObject parentObject = getParentObject(context, dSpaceObject); + + switch (object.getType()) { + case BitstreamRest.NAME: + return ( + authorizeService.authorizeActionBoolean(context, context.getCurrentUser(), parentObject, + Constants.REMOVE, true) + && authorizeService.authorizeActionBoolean(context, context.getCurrentUser(), dSpaceObject, + Constants.REMOVE, true) + ); + case ItemRest.NAME: + return ( + authorizeService.authorizeActionBoolean(context, context.getCurrentUser(), parentObject, + Constants.REMOVE, true) + && authorizeServiceRestUtil.authorizeActionBoolean(context, object, + DSpaceRestPermission.DELETE) + ); + case CollectionRest.NAME: + case CommunityRest.NAME: + case BundleRest.NAME: + case WorkspaceItemRest.NAME: + case EPersonRest.NAME: + case GroupRest.NAME: + default: + if (parentObject != null) { + return authorizeService.authorizeActionBoolean(context, context.getCurrentUser(), parentObject, + Constants.REMOVE, true); + } + + return authorizeServiceRestUtil.authorizeActionBoolean(context, object, + DSpaceRestPermission.DELETE); + } + } + return false; + } + + private DSpaceObject getParentObject(Context context, DSpaceObject object) throws SQLException { + DSpaceObject parentObject + = contentServiceFactory.getDSpaceObjectService(object.getType()).getParentObject(context, object); + if (object.getType() == Constants.ITEM && parentObject == null) { + Item item = (Item) object; + parentObject = item.getOwningCollection(); + WorkspaceItem byItem = ContentServiceFactory.getInstance() + .getWorkspaceItemService() + .findByItem(context, item); + if (byItem != null) { + parentObject = byItem.getCollection(); + } + } + return parentObject; + } + + @Override + public String[] getSupportedTypes() { + return new String[]{ + CommunityRest.CATEGORY + "." + CommunityRest.NAME, + CollectionRest.CATEGORY + "." + CollectionRest.NAME, + ItemRest.CATEGORY + "." + ItemRest.NAME, + BundleRest.CATEGORY + "." + BundleRest.NAME, + BitstreamRest.CATEGORY + "." + BitstreamRest.NAME, + WorkspaceItemRest.CATEGORY + "." + WorkspaceItemRest.NAME, + EPersonRest.CATEGORY + "." + EPersonRest.NAME, + GroupRest.CATEGORY + "." + GroupRest.NAME + }; + } +} diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/authorization/impl/EPersonRegistrationFeature.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/authorization/impl/EPersonRegistrationFeature.java new file mode 100644 index 0000000000..a03d68fcc9 --- /dev/null +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/authorization/impl/EPersonRegistrationFeature.java @@ -0,0 +1,52 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.rest.authorization.impl; + +import java.sql.SQLException; + +import org.dspace.app.rest.authorization.AuthorizationFeature; +import org.dspace.app.rest.authorization.AuthorizationFeatureDocumentation; +import org.dspace.app.rest.model.BaseObjectRest; +import org.dspace.app.rest.model.SiteRest; +import org.dspace.app.util.AuthorizeUtil; +import org.dspace.core.Context; +import org.dspace.services.RequestService; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.stereotype.Component; + +/** + * The EPerson Registration feature. It's able to be used on site objects if the user.registration property is set to + * true. If it's set to true, it'll check if the current context is allowed to set the password. + */ +@Component +@AuthorizationFeatureDocumentation(name = EPersonRegistrationFeature.NAME, + description = "It can be used to register an eperson") +public class EPersonRegistrationFeature implements AuthorizationFeature { + + public static final String NAME = "epersonRegistration"; + + @Autowired + private RequestService requestService; + + @Override + public boolean isAuthorized(Context context, BaseObjectRest object) throws SQLException { + if (!(object instanceof SiteRest)) { + return false; + } + if (!AuthorizeUtil.authorizeNewAccountRegistration(context, + requestService.getCurrentRequest().getHttpServletRequest())) { + return false; + } + return true; + } + + @Override + public String[] getSupportedTypes() { + return new String[] {SiteRest.CATEGORY + "." + SiteRest.NAME}; + } +} diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/authorization/impl/EditMetadataFeature.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/authorization/impl/EditMetadataFeature.java new file mode 100644 index 0000000000..06961bc33d --- /dev/null +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/authorization/impl/EditMetadataFeature.java @@ -0,0 +1,67 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.rest.authorization.impl; + +import java.sql.SQLException; + +import org.dspace.app.rest.authorization.AuthorizationFeature; +import org.dspace.app.rest.authorization.AuthorizationFeatureDocumentation; +import org.dspace.app.rest.authorization.AuthorizeServiceRestUtil; +import org.dspace.app.rest.model.BaseObjectRest; +import org.dspace.app.rest.model.BitstreamRest; +import org.dspace.app.rest.model.BundleRest; +import org.dspace.app.rest.model.CollectionRest; +import org.dspace.app.rest.model.CommunityRest; +import org.dspace.app.rest.model.ItemRest; +import org.dspace.app.rest.model.SiteRest; +import org.dspace.app.rest.security.DSpaceRestPermission; +import org.dspace.core.Context; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.stereotype.Component; + +/** + * The edit metadata feature. It can be used to verify if the metadata of the specified objects can be edited. + * + * Authorization is granted if the current user has WRITE permissions on the given DSO + */ +@Component +@AuthorizationFeatureDocumentation(name = EditMetadataFeature.NAME, + description = "It can be used to verify if the metadata of the specified objects can be edited") +public class EditMetadataFeature implements AuthorizationFeature { + + public final static String NAME = "canEditMetadata"; + + @Autowired + private AuthorizeServiceRestUtil authorizeServiceRestUtil; + + @Override + public boolean isAuthorized(Context context, BaseObjectRest object) throws SQLException { + if (object instanceof CommunityRest + || object instanceof CollectionRest + || object instanceof ItemRest + || object instanceof BundleRest + || object instanceof BitstreamRest + || object instanceof SiteRest + ) { + return authorizeServiceRestUtil.authorizeActionBoolean(context, object, DSpaceRestPermission.WRITE); + } + return false; + } + + @Override + public String[] getSupportedTypes() { + return new String[]{ + CommunityRest.CATEGORY + "." + CommunityRest.NAME, + CollectionRest.CATEGORY + "." + CollectionRest.NAME, + ItemRest.CATEGORY + "." + ItemRest.NAME, + BundleRest.CATEGORY + "." + BundleRest.NAME, + BitstreamRest.CATEGORY + "." + BitstreamRest.NAME, + SiteRest.CATEGORY + "." + SiteRest.NAME + }; + } +} diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/authorization/impl/LoginOnBehalfOfFeature.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/authorization/impl/LoginOnBehalfOfFeature.java new file mode 100644 index 0000000000..f5578f19c6 --- /dev/null +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/authorization/impl/LoginOnBehalfOfFeature.java @@ -0,0 +1,84 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.rest.authorization.impl; + +import java.sql.SQLException; + +import org.apache.commons.lang3.StringUtils; +import org.dspace.app.rest.authorization.AuthorizationFeature; +import org.dspace.app.rest.authorization.AuthorizationFeatureDocumentation; +import org.dspace.app.rest.model.BaseObjectRest; +import org.dspace.app.rest.model.EPersonRest; +import org.dspace.app.rest.model.SiteRest; +import org.dspace.authorize.service.AuthorizeService; +import org.dspace.core.Context; +import org.dspace.eperson.EPerson; +import org.dspace.eperson.service.EPersonService; +import org.dspace.services.ConfigurationService; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.stereotype.Component; + +/** + * The LoginOnBehalfOf feature. It can be used by Administrators when the webui.user.assumelogin property is set to true + * to allow the admin to login as another user. + * Calling this with the Site URI will check if the current logged in user can use this feature + * without specifying a specific eperson. This will be the case if the logged in user is an Admin + * and if the webui.user.assumelogin is set to true + * Calling this with the EPerson URI will check if the current logged in user can perform the loginOnBehalfOf + * as the given user. This will then additionally check that the eperson given isn't the same as the logged in + * user and it'll also check that the eperson given isn't an admin + */ +@Component +@AuthorizationFeatureDocumentation(name = LoginOnBehalfOfFeature.NAME, + description = "It can be used by administrators to login on behalf of a different user") +public class LoginOnBehalfOfFeature implements AuthorizationFeature { + + public static final String NAME = "loginOnBehalfOf"; + + @Autowired + private AuthorizeService authorizeService; + + @Autowired + private ConfigurationService configurationService; + + @Autowired + private EPersonService ePersonService; + + @Override + public boolean isAuthorized(Context context, BaseObjectRest object) throws SQLException { + if (!StringUtils.equals(object.getType(), SiteRest.NAME) && + !StringUtils.equals(object.getType(), EPersonRest.NAME)) { + return false; + } + if (!authorizeService.isAdmin(context)) { + return false; + } + if (!configurationService.getBooleanProperty("webui.user.assumelogin")) { + return false; + } + if (StringUtils.equals(object.getType(), EPersonRest.NAME)) { + EPersonRest ePersonRest = (EPersonRest) object; + EPerson currentUser = context.getCurrentUser(); + if (StringUtils.equalsIgnoreCase(currentUser.getEmail(), ePersonRest.getEmail())) { + return false; + } + + EPerson ePerson = ePersonService.findByEmail(context, ePersonRest.getEmail()); + if (authorizeService.isAdmin(context, ePerson)) { + return false; + } + } + return true; + } + + @Override + public String[] getSupportedTypes() { + return new String[] {SiteRest.CATEGORY + "." + SiteRest.NAME, EPersonRest.CATEGORY + "." + EPersonRest.NAME}; + } + +} diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/authorization/impl/MakeDiscoverableFeature.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/authorization/impl/MakeDiscoverableFeature.java new file mode 100644 index 0000000000..76fd190ec9 --- /dev/null +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/authorization/impl/MakeDiscoverableFeature.java @@ -0,0 +1,51 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.rest.authorization.impl; + +import java.sql.SQLException; + +import org.dspace.app.rest.authorization.AuthorizationFeature; +import org.dspace.app.rest.authorization.AuthorizationFeatureDocumentation; +import org.dspace.app.rest.authorization.AuthorizeServiceRestUtil; +import org.dspace.app.rest.model.BaseObjectRest; +import org.dspace.app.rest.model.ItemRest; +import org.dspace.app.rest.security.DSpaceRestPermission; +import org.dspace.core.Context; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.stereotype.Component; + +/** + * The make discoverable feature. It can be used to verify if an item can be made discoverable. + * + * Authorization is granted if the current user has WRITE permissions on the given item + */ +@Component +@AuthorizationFeatureDocumentation(name = MakeDiscoverableFeature.NAME, + description = "It can be used to verify if an item can be made discoverable") +public class MakeDiscoverableFeature implements AuthorizationFeature { + + public final static String NAME = "canMakeDiscoverable"; + + @Autowired + private AuthorizeServiceRestUtil authorizeServiceRestUtil; + + @Override + public boolean isAuthorized(Context context, BaseObjectRest object) throws SQLException { + if (object instanceof ItemRest) { + return authorizeServiceRestUtil.authorizeActionBoolean(context, object, DSpaceRestPermission.WRITE); + } + return false; + } + + @Override + public String[] getSupportedTypes() { + return new String[]{ + ItemRest.CATEGORY + "." + ItemRest.NAME + }; + } +} diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/authorization/impl/MakePrivateFeature.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/authorization/impl/MakePrivateFeature.java new file mode 100644 index 0000000000..d48f52fb3d --- /dev/null +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/authorization/impl/MakePrivateFeature.java @@ -0,0 +1,51 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.rest.authorization.impl; + +import java.sql.SQLException; + +import org.dspace.app.rest.authorization.AuthorizationFeature; +import org.dspace.app.rest.authorization.AuthorizationFeatureDocumentation; +import org.dspace.app.rest.authorization.AuthorizeServiceRestUtil; +import org.dspace.app.rest.model.BaseObjectRest; +import org.dspace.app.rest.model.ItemRest; +import org.dspace.app.rest.security.DSpaceRestPermission; +import org.dspace.core.Context; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.stereotype.Component; + +/** + * The make private feature. It can be used to verify if an item can be made private. + * + * Authorization is granted if the current user has WRITE permissions on the given item + */ +@Component +@AuthorizationFeatureDocumentation(name = MakePrivateFeature.NAME, + description = "It can be used to verify if an item can be made private") +public class MakePrivateFeature implements AuthorizationFeature { + + public final static String NAME = "canMakePrivate"; + + @Autowired + private AuthorizeServiceRestUtil authorizeServiceRestUtil; + + @Override + public boolean isAuthorized(Context context, BaseObjectRest object) throws SQLException { + if (object instanceof ItemRest) { + return authorizeServiceRestUtil.authorizeActionBoolean(context, object, DSpaceRestPermission.WRITE); + } + return false; + } + + @Override + public String[] getSupportedTypes() { + return new String[]{ + ItemRest.CATEGORY + "." + ItemRest.NAME + }; + } +} diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/authorization/impl/MoveFeature.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/authorization/impl/MoveFeature.java new file mode 100644 index 0000000000..a364e0c0ae --- /dev/null +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/authorization/impl/MoveFeature.java @@ -0,0 +1,81 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.rest.authorization.impl; + +import java.sql.SQLException; + +import org.apache.log4j.Logger; +import org.dspace.app.rest.authorization.AuthorizationFeature; +import org.dspace.app.rest.authorization.AuthorizationFeatureDocumentation; +import org.dspace.app.rest.authorization.AuthorizeServiceRestUtil; +import org.dspace.app.rest.model.BaseObjectRest; +import org.dspace.app.rest.model.ItemRest; +import org.dspace.app.rest.security.DSpaceRestPermission; +import org.dspace.app.rest.utils.Utils; +import org.dspace.authorize.service.AuthorizeService; +import org.dspace.content.Collection; +import org.dspace.content.DSpaceObject; +import org.dspace.content.Item; +import org.dspace.content.service.ItemService; +import org.dspace.core.Constants; +import org.dspace.core.Context; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.stereotype.Component; + +/** + * The move feature. It can be used to verify if item can be moved to a different collection. + * + * Authorization is granted if the current user has WRITE permissions on the given item and REMOVE permissions on the + * item’s owning collection + */ +@Component +@AuthorizationFeatureDocumentation(name = MoveFeature.NAME, + description = "It can be used to verify if item can be moved to a different collection") +public class MoveFeature implements AuthorizationFeature { + + Logger log = Logger.getLogger(MoveFeature.class); + + public final static String NAME = "canMove"; + + @Autowired + private AuthorizeServiceRestUtil authorizeServiceRestUtil; + @Autowired + private Utils utils; + @Autowired + private ItemService itemService; + @Autowired + private AuthorizeService authorizeService; + + @Override + public boolean isAuthorized(Context context, BaseObjectRest object) throws SQLException { + if (object instanceof ItemRest) { + if (!authorizeServiceRestUtil.authorizeActionBoolean(context, object, DSpaceRestPermission.WRITE)) { + return false; + } + + DSpaceObject owningObject = itemService.getParentObject(context, + (Item)utils.getDSpaceAPIObjectFromRest(context, object)); + + if (!(owningObject instanceof Collection)) { + log.error("The partent object of item " + object.getType() + " is not a collection"); + return false; + } + + return authorizeService.authorizeActionBoolean(context, context.getCurrentUser(), owningObject, + Constants.REMOVE, true); + } + return false; + } + + @Override + public String[] getSupportedTypes() { + return new String[]{ + ItemRest.CATEGORY + "." + ItemRest.NAME + }; + } +} diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/authorization/impl/PolicyFeature.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/authorization/impl/PolicyFeature.java new file mode 100644 index 0000000000..741d265cae --- /dev/null +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/authorization/impl/PolicyFeature.java @@ -0,0 +1,104 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.rest.authorization.impl; + +import java.sql.SQLException; + +import org.dspace.app.rest.authorization.AuthorizationFeature; +import org.dspace.app.rest.authorization.AuthorizationFeatureDocumentation; +import org.dspace.app.rest.model.BaseObjectRest; +import org.dspace.app.rest.model.BitstreamRest; +import org.dspace.app.rest.model.BundleRest; +import org.dspace.app.rest.model.CollectionRest; +import org.dspace.app.rest.model.CommunityRest; +import org.dspace.app.rest.model.ItemRest; +import org.dspace.app.rest.model.SiteRest; +import org.dspace.app.rest.utils.Utils; +import org.dspace.app.util.AuthorizeUtil; +import org.dspace.authorize.AuthorizeException; +import org.dspace.authorize.service.AuthorizeService; +import org.dspace.content.Bitstream; +import org.dspace.content.Bundle; +import org.dspace.content.Collection; +import org.dspace.content.Community; +import org.dspace.content.Item; +import org.dspace.core.Context; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.stereotype.Component; + +/** + * The policy feature. It can be used by administrators (or community/collection delegate) to manage resource policies + * + * Authorization is granted + * - for the site if the current user is administrator + * - for other objects if the current user has ADMIN permissions on the object + */ +@Component +@AuthorizationFeatureDocumentation(name = PolicyFeature.NAME, + description = "It can be used to verify if the resourcepolicies of the specified objects can be managed") +public class PolicyFeature implements AuthorizationFeature { + + public static final String NAME = "canManagePolicies"; + + @Autowired + AuthorizeService authService; + @Autowired + private Utils utils; + + @Override + public boolean isAuthorized(Context context, BaseObjectRest object) throws SQLException { + if (object != null) { + try { + if (object instanceof SiteRest) { + return authService.isAdmin(context); + } + if (object instanceof CommunityRest) { + AuthorizeUtil.authorizeManageCommunityPolicy(context, + (Community)utils.getDSpaceAPIObjectFromRest(context, object)); + return true; + } + if (object instanceof CollectionRest) { + AuthorizeUtil.authorizeManageCollectionPolicy(context, + (Collection) utils.getDSpaceAPIObjectFromRest(context, object)); + return true; + } + if (object instanceof ItemRest) { + AuthorizeUtil.authorizeManageItemPolicy(context, + (Item)utils.getDSpaceAPIObjectFromRest(context, object)); + return true; + } + if (object instanceof BundleRest) { + AuthorizeUtil.authorizeManageBundlePolicy(context, + (Bundle)utils.getDSpaceAPIObjectFromRest(context, object)); + return true; + } + if (object instanceof BitstreamRest) { + AuthorizeUtil.authorizeManageBitstreamPolicy(context, + (Bitstream)utils.getDSpaceAPIObjectFromRest(context, object)); + return true; + } + } catch (AuthorizeException e) { + return false; + } + } + + return false; + } + + @Override + public String[] getSupportedTypes() { + return new String[]{ + SiteRest.CATEGORY + "." + SiteRest.NAME, + CommunityRest.CATEGORY + "." + CommunityRest.NAME, + CollectionRest.CATEGORY + "." + CollectionRest.NAME, + ItemRest.CATEGORY + "." + ItemRest.NAME, + BundleRest.CATEGORY + "." + BundleRest.NAME, + BitstreamRest.CATEGORY + "." + BitstreamRest.NAME + }; + } +} diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/authorization/impl/ReorderBitstreamFeature.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/authorization/impl/ReorderBitstreamFeature.java new file mode 100644 index 0000000000..568d8e1319 --- /dev/null +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/authorization/impl/ReorderBitstreamFeature.java @@ -0,0 +1,51 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.rest.authorization.impl; + +import java.sql.SQLException; + +import org.dspace.app.rest.authorization.AuthorizationFeature; +import org.dspace.app.rest.authorization.AuthorizationFeatureDocumentation; +import org.dspace.app.rest.authorization.AuthorizeServiceRestUtil; +import org.dspace.app.rest.model.BaseObjectRest; +import org.dspace.app.rest.model.BundleRest; +import org.dspace.app.rest.security.DSpaceRestPermission; +import org.dspace.core.Context; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.stereotype.Component; + +/** + * The reorder bitstream feature. It can be used to verify if bitstreams can be reordered in a specific bundle. + * + * Authorization is granted if the current user has WRITE permissions on the given bundle + */ +@Component +@AuthorizationFeatureDocumentation(name = ReorderBitstreamFeature.NAME, + description = "It can be used to verify if bitstreams can be reordered in a specific bundle") +public class ReorderBitstreamFeature implements AuthorizationFeature { + + public final static String NAME = "canReorderBitstreams"; + + @Autowired + private AuthorizeServiceRestUtil authorizeServiceRestUtil; + + @Override + public boolean isAuthorized(Context context, BaseObjectRest object) throws SQLException { + if (object instanceof BundleRest) { + return authorizeServiceRestUtil.authorizeActionBoolean(context, object, DSpaceRestPermission.WRITE); + } + return false; + } + + @Override + public String[] getSupportedTypes() { + return new String[]{ + BundleRest.CATEGORY + "." + BundleRest.NAME + }; + } +} diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/converter/AuthenticationTokenConverter.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/converter/AuthenticationTokenConverter.java new file mode 100644 index 0000000000..ea64bc8bc8 --- /dev/null +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/converter/AuthenticationTokenConverter.java @@ -0,0 +1,31 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.rest.converter; + +import org.dspace.app.rest.model.AuthenticationTokenRest; +import org.dspace.app.rest.model.wrapper.AuthenticationToken; +import org.dspace.app.rest.projection.Projection; +import org.springframework.stereotype.Component; + +/** + * This is the converter from the AuthenticationToken to the REST data model + */ +@Component +public class AuthenticationTokenConverter implements DSpaceConverter { + @Override + public AuthenticationTokenRest convert(AuthenticationToken modelObject, Projection projection) { + AuthenticationTokenRest token = new AuthenticationTokenRest(); + token.setToken(modelObject.getToken()); + return token; + } + + @Override + public Class getModelClass() { + return AuthenticationToken.class; + } +} diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/converter/ConverterService.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/converter/ConverterService.java index 295634599b..84ce1a0032 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/converter/ConverterService.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/converter/ConverterService.java @@ -7,39 +7,53 @@ */ package org.dspace.app.rest.converter; +import java.lang.annotation.Annotation; import java.lang.reflect.Constructor; import java.lang.reflect.InvocationTargetException; +import java.lang.reflect.Method; import java.util.HashMap; +import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.Set; import javax.annotation.Nullable; import javax.annotation.PostConstruct; +import org.apache.commons.lang3.StringUtils; import org.apache.log4j.Logger; import org.dspace.app.rest.link.HalLinkFactory; import org.dspace.app.rest.link.HalLinkService; +import org.dspace.app.rest.model.BaseObjectRest; import org.dspace.app.rest.model.RestAddressableModel; import org.dspace.app.rest.model.RestModel; import org.dspace.app.rest.model.hateoas.HALResource; import org.dspace.app.rest.projection.DefaultProjection; import org.dspace.app.rest.projection.Projection; +import org.dspace.app.rest.repository.DSpaceRestRepository; +import org.dspace.app.rest.security.DSpacePermissionEvaluator; +import org.dspace.app.rest.security.WebSecurityExpressionEvaluator; import org.dspace.app.rest.utils.Utils; +import org.dspace.services.RequestService; +import org.springframework.aop.support.AopUtils; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.config.BeanDefinition; import org.springframework.context.annotation.ClassPathScanningCandidateComponentProvider; +import org.springframework.core.annotation.AnnotationUtils; import org.springframework.core.type.filter.AssignableTypeFilter; import org.springframework.data.domain.Page; import org.springframework.data.domain.PageImpl; import org.springframework.data.domain.Pageable; +import org.springframework.hateoas.EntityModel; import org.springframework.hateoas.Link; -import org.springframework.hateoas.Resource; +import org.springframework.security.access.prepost.PreAuthorize; import org.springframework.stereotype.Component; import org.springframework.stereotype.Service; /** * Converts domain objects from the DSpace service layer to rest objects, and from rest objects to resource * objects, applying {@link Projection}s where applicable. + * + * @author Luca Giamminonni (luca.giamminonni at 4science dot it) */ @Service public class ConverterService { @@ -64,6 +78,15 @@ public class ConverterService { @Autowired private List projections; + @Autowired + private DSpacePermissionEvaluator dSpacePermissionEvaluator; + + @Autowired + private WebSecurityExpressionEvaluator webSecurityExpressionEvaluator; + + @Autowired + private RequestService requestService; + /** * Converts the given model object to a rest object, using the appropriate {@link DSpaceConverter} and * the given projection. @@ -86,18 +109,90 @@ public class ConverterService { M transformedModel = projection.transformModel(modelObject); DSpaceConverter converter = requireConverter(modelObject.getClass()); R restObject = converter.convert(transformedModel, projection); + if (restObject instanceof BaseObjectRest) { + BaseObjectRest baseObjectRest = (BaseObjectRest) restObject; + // This section will verify whether the current user has permissions to retrieve the + // rest object. It'll only return the REST object if the permission is granted. + // If permission isn't granted, it'll return null + String preAuthorizeValue = getPreAuthorizeAnnotationForBaseObject(baseObjectRest); + if (!webSecurityExpressionEvaluator + .evaluate(preAuthorizeValue, requestService.getCurrentRequest().getHttpServletRequest(), + requestService.getCurrentRequest().getHttpServletResponse(), + String.valueOf(baseObjectRest.getId()))) { + log.debug("Access denied on " + restObject.getClass() + " with id: " + + ((BaseObjectRest) restObject).getId()); + return null; + } + } if (restObject instanceof RestModel) { return (R) projection.transformRest((RestModel) restObject); } return restObject; } + private String getPreAuthorizeAnnotationForBaseObject(BaseObjectRest restObject) { + Annotation preAuthorize = getAnnotationForRestObject(restObject); + if (preAuthorize == null) { + preAuthorize = getDefaultFindOnePreAuthorize(); + + } + return parseAnnotation(preAuthorize); + + } + + private String parseAnnotation(Annotation preAuthorize) { + if (preAuthorize != null) { + return (String) AnnotationUtils.getValue(preAuthorize); + } + return null; + } + + private Annotation getAnnotationForRestObject(BaseObjectRest restObject) { + BaseObjectRest baseObjectRest = restObject; + DSpaceRestRepository repositoryToUse = utils + .getResourceRepositoryByCategoryAndModel(baseObjectRest.getCategory(), baseObjectRest.getType()); + Annotation preAuthorize = null; + int maxDepth = 0; + // DS-4530 exclude the AOP Proxy from determining the annotations + for (Method m : AopUtils.getTargetClass(repositoryToUse).getMethods()) { + if (StringUtils.equalsIgnoreCase(m.getName(), "findOne")) { + int depth = howManySuperclass(m.getDeclaringClass()); + if (depth > maxDepth) { + preAuthorize = AnnotationUtils.findAnnotation(m, PreAuthorize.class); + maxDepth = depth; + } + } + } + return preAuthorize; + } + + private int howManySuperclass(Class declaringClass) { + Class curr = declaringClass; + int count = 0; + while (curr != Object.class) { + curr = curr.getSuperclass(); + count++; + } + return count; + } + + private Annotation getDefaultFindOnePreAuthorize() { + for (Method m : DSpaceRestRepository.class.getMethods()) { + if (StringUtils.equalsIgnoreCase(m.getName(), "findOne")) { + Annotation annotation = AnnotationUtils.findAnnotation(m, PreAuthorize.class); + if (annotation != null) { + return annotation; + } + } + } + return null; + } + /** * Converts a list of model objects to a page of rest objects using the given {@link Projection}. * * @param modelObjects the list of model objects. * @param pageable the pageable. - * @param total the total number of items. * @param projection the projection to use. * @param the model object class. * @param the rest object class. @@ -105,25 +200,48 @@ public class ConverterService { * @throws IllegalArgumentException if there is no compatible converter. * @throws ClassCastException if the converter's return type is not compatible with the inferred return type. */ - public Page toRestPage(List modelObjects, Pageable pageable, long total, Projection projection) { - return new PageImpl<>(modelObjects, pageable, total).map((object) -> toRest(object, projection)); + public Page toRestPage(List modelObjects, Pageable pageable, Projection projection) { + List transformedList = new LinkedList<>(); + for (M modelObject : modelObjects) { + R transformedObject = toRest(modelObject, projection); + if (transformedObject != null) { + transformedList.add(transformedObject); + } + } + if (pageable == null) { + pageable = utils.getPageable(pageable); + } + return utils.getPage(transformedList, pageable); } /** - * Converts a list of model objects to a page of rest objects using the given {@link Projection}. - * - * @param modelObjects the page of model objects. + * Converts a list of ModelObjects to a page of Rest Objects using the given {@link Projection} + * This method differences in the sense that we define a total here instead of the size of the list because + * this method will be called if the list is limited through a DB call already and thus we need to give the + * total amount of records in the DB; not the size of the given list + * @param modelObjects the list of model objects. + * @param pageable the pageable. + * @param total The total amount of objects * @param projection the projection to use. * @param the model object class. * @param the rest object class. * @return the page. - * @throws IllegalArgumentException if there is no compatible converter. - * @throws ClassCastException if the converter's return type is not compatible with the inferred return type. */ - public Page toRestPage(Page modelObjects, Projection projection) { - return modelObjects.map((object) -> toRest(object, projection)); + public Page toRestPage(List modelObjects, Pageable pageable, long total, Projection projection) { + List transformedList = new LinkedList<>(); + for (M modelObject : modelObjects) { + R transformedObject = toRest(modelObject, projection); + if (transformedObject != null) { + transformedList.add(transformedObject); + } + } + if (pageable == null) { + pageable = utils.getPageable(pageable); + } + return new PageImpl(transformedList, pageable, total); } + /** * Gets the converter supporting the given class as input. * @@ -177,6 +295,9 @@ public class ConverterService { * @return the fully converted resource, with all automatic links and embeds applied. */ public T toResource(RestModel restObject, Link... oldLinks) { + if (restObject == null) { + return null; + } T halResource = getResource(restObject); if (restObject instanceof RestAddressableModel) { utils.embedOrLinkClassLevelRels(halResource, oldLinks); @@ -286,21 +407,21 @@ public class ConverterService { // scan all resource classes and look for compatible rest classes (by naming convention), // creating a map of resource constructors keyed by rest class, for later use. ClassPathScanningCandidateComponentProvider provider = new ClassPathScanningCandidateComponentProvider(false); - provider.addIncludeFilter(new AssignableTypeFilter(Resource.class)); + provider.addIncludeFilter(new AssignableTypeFilter(EntityModel.class)); Set beanDefinitions = provider.findCandidateComponents( - HALResource.class.getPackage().getName().replaceAll("\\.", "/")); + HALResource.class.getPackage().getName().replaceAll("\\.", "/")); for (BeanDefinition beanDefinition : beanDefinitions) { String resourceClassName = beanDefinition.getBeanClassName(); String resourceClassSimpleName = resourceClassName.substring(resourceClassName.lastIndexOf(".") + 1); String restClassSimpleName = resourceClassSimpleName - .replaceAll("ResourceWrapper$", "RestWrapper") - .replaceAll("Resource$", "Rest"); + .replaceAll("ResourceWrapper$", "RestWrapper") + .replaceAll("Resource$", "Rest"); String restClassName = RestModel.class.getPackage().getName() + "." + restClassSimpleName; try { Class restClass = - (Class) Class.forName(restClassName); + (Class) Class.forName(restClassName); Class> resourceClass = - (Class>) Class.forName(resourceClassName); + (Class>) Class.forName(resourceClassName); Constructor compatibleConstructor = null; for (Constructor constructor : resourceClass.getDeclaredConstructors()) { if (constructor.getParameterCount() == 2 && constructor.getParameterTypes()[1] == Utils.class) { @@ -314,11 +435,11 @@ public class ConverterService { resourceConstructors.put(restClass, compatibleConstructor); } else { log.warn("Skipping registration of resource class " + resourceClassName - + "; compatible constructor not found"); + + "; compatible constructor not found"); } } catch (ClassNotFoundException e) { log.warn("Skipping registration of resource class " + resourceClassName - + "; rest class not found: " + restClassName); + + "; rest class not found: " + restClassName); } } } diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/converter/DSpaceObjectConverter.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/converter/DSpaceObjectConverter.java index 7d3d89ad0c..9b44cea067 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/converter/DSpaceObjectConverter.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/converter/DSpaceObjectConverter.java @@ -7,9 +7,23 @@ */ package org.dspace.app.rest.converter; +import java.sql.SQLException; +import java.util.ArrayList; +import java.util.List; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; import org.dspace.app.rest.model.MetadataValueList; import org.dspace.app.rest.projection.Projection; +import org.dspace.app.rest.utils.ContextUtil; +import org.dspace.app.util.service.MetadataExposureService; +import org.dspace.authorize.service.AuthorizeService; import org.dspace.content.DSpaceObject; +import org.dspace.content.MetadataField; +import org.dspace.content.MetadataValue; +import org.dspace.core.Context; +import org.dspace.services.RequestService; +import org.dspace.services.model.Request; import org.springframework.beans.factory.annotation.Autowired; /** @@ -21,11 +35,23 @@ import org.springframework.beans.factory.annotation.Autowired; * @author Andrea Bollini (andrea.bollini at 4science.it) */ public abstract class DSpaceObjectConverter implements DSpaceConverter { + .DSpaceObjectRest> implements DSpaceConverter { + + private static final Logger log = LogManager.getLogger(DSpaceObjectConverter.class); @Autowired ConverterService converter; + @Autowired + AuthorizeService authorizeService; + + @Autowired + MetadataExposureService metadataExposureService; + + @Autowired + RequestService requestService; + + @Override public R convert(M obj, Projection projection) { R resource = newInstance(); @@ -35,10 +61,54 @@ public abstract class DSpaceObjectConverter