Merge remote-tracking branch 'upstream/main' into Add-Range-CORS-header

This commit is contained in:
Yana De Pauw
2023-01-05 16:34:33 +01:00
1050 changed files with 79118 additions and 11678 deletions

View File

@@ -1,7 +1,7 @@
## References
_Add references/links to any related issues or PRs. These may include:_
* Fixes #[issue-number]
* Related to [REST Contract](https://github.com/DSpace/Rest7Contract)
* Fixes #`issue-number` (if this fixes an issue ticket)
* Related to DSpace/RestContract#`pr-number` (if a corresponding REST Contract PR exists)
## Description
Short summary of changes (1-2 sentences).
@@ -22,5 +22,7 @@ _This checklist provides a reminder of what we are going to look for when review
- [ ] My PR passes Checkstyle validation based on the [Code Style Guide](https://wiki.lyrasis.org/display/DSPACE/Code+Style+Guide).
- [ ] My PR includes Javadoc for _all new (or modified) public methods and classes_. It also includes Javadoc for large or complex private methods.
- [ ] My PR passes all tests and includes new/updated Unit or Integration Tests based on the [Code Testing Guide](https://wiki.lyrasis.org/display/DSPACE/Code+Testing+Guide).
- [ ] If my PR includes new, third-party dependencies (in any `pom.xml`), I've made sure their licenses align with the [DSpace BSD License](https://github.com/DSpace/DSpace/blob/main/LICENSE) based on the [Licensing of Contributions](https://wiki.lyrasis.org/display/DSPACE/Code+Contribution+Guidelines#CodeContributionGuidelines-LicensingofContributions) documentation.
- [ ] If my PR modifies the REST API, I've linked to the REST Contract page (or open PR) related to this change.
- [ ] If my PR includes new libraries/dependencies (in any `pom.xml`), I've made sure their licenses align with the [DSpace BSD License](https://github.com/DSpace/DSpace/blob/main/LICENSE) based on the [Licensing of Contributions](https://wiki.lyrasis.org/display/DSPACE/Code+Contribution+Guidelines#CodeContributionGuidelines-LicensingofContributions) documentation.
- [ ] If my PR modifies REST API endpoints, I've opened a separate [REST Contract](https://github.com/DSpace/RestContract/blob/main/README.md) PR related to this change.
- [ ] If my PR includes new configurations, I've provided basic technical documentation in the PR itself.
- [ ] If my PR fixes an issue ticket, I've [linked them together](https://docs.github.com/en/issues/tracking-your-work-with-issues/linking-a-pull-request-to-an-issue).

View File

@@ -6,14 +6,15 @@ name: Build
# Run this Build for all pushes / PRs to current branch
on: [push, pull_request]
permissions:
contents: read # to fetch code (actions/checkout)
jobs:
tests:
runs-on: ubuntu-latest
env:
# Give Maven 1GB of memory to work with
# Suppress all Maven "downloading" messages in logs (see https://stackoverflow.com/a/35653426)
# This also slightly speeds builds, as there is less logging
MAVEN_OPTS: "-Xmx1024M -Dorg.slf4j.simpleLogger.log.org.apache.maven.cli.transfer.Slf4jMavenTransferListener=warn"
MAVEN_OPTS: "-Xmx1024M"
strategy:
# Create a matrix of two separate configurations for Unit vs Integration Tests
# This will ensure those tasks are run in parallel
@@ -44,18 +45,18 @@ jobs:
steps:
# https://github.com/actions/checkout
- name: Checkout codebase
uses: actions/checkout@v2
uses: actions/checkout@v3
# https://github.com/actions/setup-java
- name: Install JDK ${{ matrix.java }}
uses: actions/setup-java@v2
uses: actions/setup-java@v3
with:
java-version: ${{ matrix.java }}
distribution: 'temurin'
# https://github.com/actions/cache
- name: Cache Maven dependencies
uses: actions/cache@v2
uses: actions/cache@v3
with:
# Cache entire ~/.m2/repository
path: ~/.m2/repository
@@ -67,17 +68,17 @@ jobs:
- name: Run Maven ${{ matrix.type }}
env:
TEST_FLAGS: ${{ matrix.mvnflags }}
run: mvn install -B -V -P-assembly -Pcoverage-report $TEST_FLAGS
run: mvn --no-transfer-progress -V install -P-assembly -Pcoverage-report $TEST_FLAGS
# If previous step failed, save results of tests to downloadable artifact for this job
# (This artifact is downloadable at the bottom of any job's summary page)
- name: Upload Results of ${{ matrix.type }} to Artifact
if: ${{ failure() }}
uses: actions/upload-artifact@v2
uses: actions/upload-artifact@v3
with:
name: ${{ matrix.type }} results
path: ${{ matrix.resultsdir }}
# https://github.com/codecov/codecov-action
- name: Upload coverage to Codecov.io
uses: codecov/codecov-action@v2
uses: codecov/codecov-action@v3

59
.github/workflows/codescan.yml vendored Normal file
View File

@@ -0,0 +1,59 @@
# DSpace CodeQL code scanning configuration for GitHub
# https://docs.github.com/en/code-security/code-scanning
#
# NOTE: Code scanning must be run separate from our default build.yml
# because CodeQL requires a fresh build with all tests *disabled*.
name: "Code Scanning"
# Run this code scan for all pushes / PRs to main branch. Also run once a week.
on:
push:
branches: [ main ]
pull_request:
branches: [ main ]
# Don't run if PR is only updating static documentation
paths-ignore:
- '**/*.md'
- '**/*.txt'
schedule:
- cron: "37 0 * * 1"
jobs:
analyze:
name: Analyze Code
runs-on: ubuntu-latest
# Limit permissions of this GitHub action. Can only write to security-events
permissions:
actions: read
contents: read
security-events: write
steps:
# https://github.com/actions/checkout
- name: Checkout repository
uses: actions/checkout@v3
# https://github.com/actions/setup-java
- name: Install JDK
uses: actions/setup-java@v3
with:
java-version: 11
distribution: 'temurin'
# Initializes the CodeQL tools for scanning.
# https://github.com/github/codeql-action
- name: Initialize CodeQL
uses: github/codeql-action/init@v2
with:
# Codescan Javascript as well since a few JS files exist in REST API's interface
languages: java, javascript
# Autobuild attempts to build any compiled languages
# NOTE: Based on testing, this autobuild process works well for DSpace. A custom
# DSpace build w/caching (like in build.yml) was about the same speed as autobuild.
- name: Autobuild
uses: github/codeql-action/autobuild@v2
# Perform GitHub Code Scanning.
- name: Perform CodeQL Analysis
uses: github/codeql-action/analyze@v2

View File

@@ -12,6 +12,9 @@ on:
- 'dspace-**'
pull_request:
permissions:
contents: read # to fetch code (actions/checkout)
jobs:
docker:
# Ensure this job never runs on forked repos. It's only executed for 'dspace/dspace'
@@ -31,21 +34,30 @@ jobs:
# We turn off 'latest' tag by default.
TAGS_FLAVOR: |
latest=false
# Architectures / Platforms for which we will build Docker images
# If this is a PR, we ONLY build for AMD64. For PRs we only do a sanity check test to ensure Docker builds work.
# If this is NOT a PR (e.g. a tag or merge commit), also build for ARM64. NOTE: The ARM64 build takes MUCH
# longer (around 45mins or so) which is why we only run it when pushing a new Docker image.
PLATFORMS: linux/amd64${{ github.event_name != 'pull_request' && ', linux/arm64' || '' }}
steps:
# https://github.com/actions/checkout
- name: Checkout codebase
uses: actions/checkout@v2
uses: actions/checkout@v3
# https://github.com/docker/setup-buildx-action
- name: Setup Docker Buildx
uses: docker/setup-buildx-action@v1
uses: docker/setup-buildx-action@v2
# https://github.com/docker/setup-qemu-action
- name: Set up QEMU emulation to build for multiple architectures
uses: docker/setup-qemu-action@v2
# https://github.com/docker/login-action
- name: Login to DockerHub
# Only login if not a PR, as PRs only trigger a Docker build and not a push
if: github.event_name != 'pull_request'
uses: docker/login-action@v1
uses: docker/login-action@v2
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_ACCESS_TOKEN }}
@@ -57,7 +69,7 @@ jobs:
# Get Metadata for docker_build_deps step below
- name: Sync metadata (tags, labels) from GitHub to Docker for 'dspace-dependencies' image
id: meta_build_deps
uses: docker/metadata-action@v3
uses: docker/metadata-action@v4
with:
images: dspace/dspace-dependencies
tags: ${{ env.IMAGE_TAGS }}
@@ -66,10 +78,11 @@ jobs:
# https://github.com/docker/build-push-action
- name: Build and push 'dspace-dependencies' image
id: docker_build_deps
uses: docker/build-push-action@v2
uses: docker/build-push-action@v3
with:
context: .
file: ./Dockerfile.dependencies
platforms: ${{ env.PLATFORMS }}
# For pull requests, we run the Docker build (to ensure no PR changes break the build),
# but we ONLY do an image push to DockerHub if it's NOT a PR
push: ${{ github.event_name != 'pull_request' }}
@@ -83,7 +96,7 @@ jobs:
# Get Metadata for docker_build step below
- name: Sync metadata (tags, labels) from GitHub to Docker for 'dspace' image
id: meta_build
uses: docker/metadata-action@v3
uses: docker/metadata-action@v4
with:
images: dspace/dspace
tags: ${{ env.IMAGE_TAGS }}
@@ -91,10 +104,11 @@ jobs:
- name: Build and push 'dspace' image
id: docker_build
uses: docker/build-push-action@v2
uses: docker/build-push-action@v3
with:
context: .
file: ./Dockerfile
platforms: ${{ env.PLATFORMS }}
# For pull requests, we run the Docker build (to ensure no PR changes break the build),
# but we ONLY do an image push to DockerHub if it's NOT a PR
push: ${{ github.event_name != 'pull_request' }}
@@ -108,7 +122,7 @@ jobs:
# Get Metadata for docker_build_test step below
- name: Sync metadata (tags, labels) from GitHub to Docker for 'dspace-test' image
id: meta_build_test
uses: docker/metadata-action@v3
uses: docker/metadata-action@v4
with:
images: dspace/dspace
tags: ${{ env.IMAGE_TAGS }}
@@ -119,10 +133,11 @@ jobs:
- name: Build and push 'dspace-test' image
id: docker_build_test
uses: docker/build-push-action@v2
uses: docker/build-push-action@v3
with:
context: .
file: ./Dockerfile.test
platforms: ${{ env.PLATFORMS }}
# For pull requests, we run the Docker build (to ensure no PR changes break the build),
# but we ONLY do an image push to DockerHub if it's NOT a PR
push: ${{ github.event_name != 'pull_request' }}
@@ -136,7 +151,7 @@ jobs:
# Get Metadata for docker_build_test step below
- name: Sync metadata (tags, labels) from GitHub to Docker for 'dspace-cli' image
id: meta_build_cli
uses: docker/metadata-action@v3
uses: docker/metadata-action@v4
with:
images: dspace/dspace-cli
tags: ${{ env.IMAGE_TAGS }}
@@ -144,13 +159,14 @@ jobs:
- name: Build and push 'dspace-cli' image
id: docker_build_cli
uses: docker/build-push-action@v2
uses: docker/build-push-action@v3
with:
context: .
file: ./Dockerfile.cli
platforms: ${{ env.PLATFORMS }}
# For pull requests, we run the Docker build (to ensure no PR changes break the build),
# but we ONLY do an image push to DockerHub if it's NOT a PR
push: ${{ github.event_name != 'pull_request' }}
# Use tags / labels provided by 'docker/metadata-action' above
tags: ${{ steps.meta_build_cli.outputs.tags }}
labels: ${{ steps.meta_build_cli.outputs.labels }}
labels: ${{ steps.meta_build_cli.outputs.labels }}

View File

@@ -5,25 +5,22 @@ on:
issues:
types: [opened]
permissions: {}
jobs:
automation:
runs-on: ubuntu-latest
steps:
# Add the new issue to a project board, if it needs triage
# See https://github.com/marketplace/actions/create-project-card-action
- name: Add issue to project board
# See https://github.com/actions/add-to-project
- name: Add issue to triage board
# Only add to project board if issue is flagged as "needs triage" or has no labels
# NOTE: By default we flag new issues as "needs triage" in our issue template
if: (contains(github.event.issue.labels.*.name, 'needs triage') || join(github.event.issue.labels.*.name) == '')
uses: technote-space/create-project-card-action@v1
uses: actions/add-to-project@v0.3.0
# Note, the authentication token below is an ORG level Secret.
# It must be created/recreated manually via a personal access token with "public_repo" and "admin:org" permissions
# It must be created/recreated manually via a personal access token with admin:org, project, public_repo permissions
# See: https://docs.github.com/en/actions/configuring-and-managing-workflows/authenticating-with-the-github_token#permissions-for-the-github_token
# This is necessary because the "DSpace Backlog" project is an org level project (i.e. not repo specific)
with:
GITHUB_TOKEN: ${{ secrets.ORG_PROJECT_TOKEN }}
PROJECT: DSpace Backlog
COLUMN: Triage
CHECK_ORG_PROJECT: true
# Ignore errors.
continue-on-error: true
github-token: ${{ secrets.TRIAGE_PROJECT_TOKEN }}
project-url: https://github.com/orgs/DSpace/projects/24

View File

@@ -5,21 +5,32 @@ name: Check for merge conflicts
# NOTE: This means merge conflicts are only checked for when a PR is merged to main.
on:
push:
branches:
- main
branches: [ main ]
# So that the `conflict_label_name` is removed if conflicts are resolved,
# we allow this to run for `pull_request_target` so that github secrets are available.
pull_request_target:
types: [ synchronize ]
permissions: {}
jobs:
triage:
# Ensure this job never runs on forked repos. It's only executed for 'dspace/dspace'
if: github.repository == 'dspace/dspace'
runs-on: ubuntu-latest
permissions:
pull-requests: write
steps:
# See: https://github.com/mschilde/auto-label-merge-conflicts/
# See: https://github.com/prince-chrismc/label-merge-conflicts-action
- name: Auto-label PRs with merge conflicts
uses: mschilde/auto-label-merge-conflicts@v2.0
uses: prince-chrismc/label-merge-conflicts-action@v2
# Add "merge conflict" label if a merge conflict is detected. Remove it when resolved.
# Note, the authentication token is created automatically
# See: https://docs.github.com/en/actions/configuring-and-managing-workflows/authenticating-with-the-github_token
with:
CONFLICT_LABEL_NAME: 'merge conflict'
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
# Ignore errors
continue-on-error: true
conflict_label_name: 'merge conflict'
github_token: ${{ secrets.GITHUB_TOKEN }}
conflict_comment: |
Hi @${author},
Conflicts have been detected against the base branch.
Please [resolve these conflicts](https://docs.github.com/en/pull-requests/collaborating-with-pull-requests/addressing-merge-conflicts/about-merge-conflicts) as soon as you can. Thanks!

45
CONTRIBUTING.md Normal file
View File

@@ -0,0 +1,45 @@
# How to Contribute
DSpace is a community built and supported project. We do not have a centralized development or support team, but have a dedicated group of volunteers who help us improve the software, documentation, resources, etc.
* [Contribute new code via a Pull Request](#contribute-new-code-via-a-pull-request)
* [Contribute documentation](#contribute-documentation)
* [Help others on mailing lists or Slack](#help-others-on-mailing-lists-or-slack)
* [Join a working or interest group](#join-a-working-or-interest-group)
## Contribute new code via a Pull Request
We accept [GitHub Pull Requests (PRs)](https://docs.github.com/en/pull-requests/collaborating-with-pull-requests/proposing-changes-to-your-work-with-pull-requests/creating-a-pull-request-from-a-fork) at any time from anyone.
Contributors to each release are recognized in our [Release Notes](https://wiki.lyrasis.org/display/DSDOC7x/Release+Notes).
Code Contribution Checklist
- [ ] PRs _should_ be smaller in size (ideally less than 1,000 lines of code, not including comments & tests)
- [ ] PRs **must** pass Checkstyle validation based on our [Code Style Guide](https://wiki.lyrasis.org/display/DSPACE/Code+Style+Guide).
- [ ] PRs **must** include Javadoc for _all new/modified public methods and classes_. Larger private methods should also have Javadoc
- [ ] PRs **must** pass all automated tests and include new/updated Unit or Integration tests based on our [Code Testing Guide](https://wiki.lyrasis.org/display/DSPACE/Code+Testing+Guide).
- [ ] If a PR includes new libraries/dependencies (in any `pom.xml`), then their software licenses **must** align with the [DSpace BSD License](https://github.com/DSpace/DSpace/blob/main/LICENSE) based on the [Licensing of Contributions](https://wiki.lyrasis.org/display/DSPACE/Code+Contribution+Guidelines#CodeContributionGuidelines-LicensingofContributions) documentation.
- [ ] Basic technical documentation _should_ be provided for any new features or changes to the REST API. REST API changes should be documented in our [Rest Contract](https://github.com/DSpace/RestContract).
- [ ] If a PR fixes an issue ticket, please [link them together](https://docs.github.com/en/issues/tracking-your-work-with-issues/linking-a-pull-request-to-an-issue).
Additional details on the code contribution process can be found in our [Code Contribution Guidelines](https://wiki.lyrasis.org/display/DSPACE/Code+Contribution+Guidelines)
## Contribute documentation
DSpace Documentation is a collaborative effort in a shared Wiki. The latest documentation is at https://wiki.lyrasis.org/display/DSDOC7x
If you find areas of the DSpace Documentation which you wish to improve, please request a Wiki account by emailing wikihelp@lyrasis.org.
Once you have an account setup, contact @tdonohue (via [Slack](https://wiki.lyrasis.org/display/DSPACE/Slack) or email) for access to edit our Documentation.
## Help others on mailing lists or Slack
DSpace has our own [Slack](https://wiki.lyrasis.org/display/DSPACE/Slack) community and [Mailing Lists](https://wiki.lyrasis.org/display/DSPACE/Mailing+Lists) where discussions take place and questions are answered.
Anyone is welcome to join and help others. We just ask you to follow our [Code of Conduct](https://www.lyrasis.org/about/Pages/Code-of-Conduct.aspx) (adopted via LYRASIS).
## Join a working or interest group
Most of the work in building/improving DSpace comes via [Working Groups](https://wiki.lyrasis.org/display/DSPACE/DSpace+Working+Groups) or [Interest Groups](https://wiki.lyrasis.org/display/DSPACE/DSpace+Interest+Groups).
All working/interest groups are open to anyone to join and participate. A few key groups to be aware of include:
* [DSpace 7 Working Group](https://wiki.lyrasis.org/display/DSPACE/DSpace+7+Working+Group) - This is the main (mostly volunteer) development team. We meet weekly to review our current development [project board](https://github.com/orgs/DSpace/projects), assigning tickets and/or PRs.
* [DSpace Community Advisory Team (DCAT)](https://wiki.lyrasis.org/display/cmtygp/DSpace+Community+Advisory+Team) - This is an interest group for repository managers/administrators. We meet monthly to discuss DSpace, share tips & provide feedback back to developers.

View File

@@ -20,7 +20,7 @@ USER dspace
ADD --chown=dspace . /app/
# Build DSpace (note: this build doesn't include the optional, deprecated "dspace-rest" webapp)
# Copy the dspace-installer directory to /install. Clean up the build to keep the docker image small
RUN mvn package && \
RUN mvn --no-transfer-progress package && \
mv /app/dspace/target/${TARGET_DIR}/* /install && \
mvn clean

View File

@@ -19,7 +19,7 @@ USER dspace
# Copy the DSpace source code (from local machine) into the workdir (excluding .dockerignore contents)
ADD --chown=dspace . /app/
# Build DSpace. Copy the dspace-installer directory to /install. Clean up the build to keep the docker image small
RUN mvn package && \
RUN mvn --no-transfer-progress package && \
mv /app/dspace/target/${TARGET_DIR}/* /install && \
mvn clean

View File

@@ -22,7 +22,7 @@ USER dspace
ADD --chown=dspace . /app/
# Build DSpace (INCLUDING the optional, deprecated "dspace-rest" webapp)
# Copy the dspace-installer directory to /install. Clean up the build to keep the docker image small
RUN mvn package -Pdspace-rest && \
RUN mvn --no-transfer-progress package -Pdspace-rest && \
mv /app/dspace/target/${TARGET_DIR}/* /install && \
mvn clean
@@ -58,9 +58,11 @@ COPY --from=ant_build /dspace $DSPACE_INSTALL
# NOTE: secretRequired="false" should only be used when AJP is NOT accessible from an external network. But, secretRequired="true" isn't supported by mod_proxy_ajp until Apache 2.5
RUN sed -i '/Service name="Catalina".*/a \\n <Connector protocol="AJP/1.3" port="8009" address="0.0.0.0" redirectPort="8443" URIEncoding="UTF-8" secretRequired="false" />' $TOMCAT_INSTALL/conf/server.xml
# Expose Tomcat port and AJP port
EXPOSE 8080 8009
EXPOSE 8080 8009 8000
# Give java extra memory (2GB)
ENV JAVA_OPTS=-Xmx2000m
# Set up debugging
ENV CATALINA_OPTS=-Xrunjdwp:transport=dt_socket,server=y,suspend=n,address=*:8000
# Link the DSpace 'server' webapp into Tomcat's webapps directory.
# This ensures that when we start Tomcat, it runs from /server path (e.g. http://localhost:8080/server/)

View File

@@ -21,28 +21,29 @@ https://wiki.lyrasis.org/display/DSPACE/Code+Contribution+Guidelines
Apache Software License, Version 2.0:
* Ant-Contrib Tasks (ant-contrib:ant-contrib:1.0b3 - http://ant-contrib.sourceforge.net)
* AWS SDK for Java - Core (com.amazonaws:aws-java-sdk-core:1.12.116 - https://aws.amazon.com/sdkforjava)
* AWS Java SDK for AWS KMS (com.amazonaws:aws-java-sdk-kms:1.12.116 - https://aws.amazon.com/sdkforjava)
* AWS Java SDK for Amazon S3 (com.amazonaws:aws-java-sdk-s3:1.12.116 - https://aws.amazon.com/sdkforjava)
* JMES Path Query library (com.amazonaws:jmespath-java:1.12.116 - https://aws.amazon.com/sdkforjava)
* jcommander (com.beust:jcommander:1.78 - https://jcommander.org)
* AWS SDK for Java - Core (com.amazonaws:aws-java-sdk-core:1.12.261 - https://aws.amazon.com/sdkforjava)
* AWS Java SDK for AWS KMS (com.amazonaws:aws-java-sdk-kms:1.12.261 - https://aws.amazon.com/sdkforjava)
* AWS Java SDK for Amazon S3 (com.amazonaws:aws-java-sdk-s3:1.12.261 - https://aws.amazon.com/sdkforjava)
* JMES Path Query library (com.amazonaws:jmespath-java:1.12.261 - https://aws.amazon.com/sdkforjava)
* HPPC Collections (com.carrotsearch:hppc:0.8.1 - http://labs.carrotsearch.com/hppc.html/hppc)
* parso (com.epam:parso:2.0.11 - https://github.com/epam/parso)
* com.drewnoakes:metadata-extractor (com.drewnoakes:metadata-extractor:2.16.0 - https://drewnoakes.com/code/exif/)
* parso (com.epam:parso:2.0.14 - https://github.com/epam/parso)
* Esri Geometry API for Java (com.esri.geometry:esri-geometry-api:2.2.0 - https://github.com/Esri/geometry-api-java)
* ClassMate (com.fasterxml:classmate:1.3.0 - http://github.com/cowtowncoder/java-classmate)
* Jackson-annotations (com.fasterxml.jackson.core:jackson-annotations:2.12.3 - http://github.com/FasterXML/jackson)
* Jackson-core (com.fasterxml.jackson.core:jackson-core:2.12.3 - https://github.com/FasterXML/jackson-core)
* jackson-databind (com.fasterxml.jackson.core:jackson-databind:2.12.3 - http://github.com/FasterXML/jackson)
* Jackson dataformat: CBOR (com.fasterxml.jackson.dataformat:jackson-dataformat-cbor:2.12.3 - http://github.com/FasterXML/jackson-dataformats-binary)
* Jackson dataformat: Smile (com.fasterxml.jackson.dataformat:jackson-dataformat-smile:2.11.2 - http://github.com/FasterXML/jackson-dataformats-binary)
* Jackson-annotations (com.fasterxml.jackson.core:jackson-annotations:2.12.6 - http://github.com/FasterXML/jackson)
* Jackson-core (com.fasterxml.jackson.core:jackson-core:2.12.6 - https://github.com/FasterXML/jackson-core)
* jackson-databind (com.fasterxml.jackson.core:jackson-databind:2.12.6.1 - http://github.com/FasterXML/jackson)
* Jackson dataformat: CBOR (com.fasterxml.jackson.dataformat:jackson-dataformat-cbor:2.12.6 - http://github.com/FasterXML/jackson-dataformats-binary)
* Jackson dataformat: Smile (com.fasterxml.jackson.dataformat:jackson-dataformat-smile:2.12.3 - http://github.com/FasterXML/jackson-dataformats-binary)
* Jackson-dataformat-YAML (com.fasterxml.jackson.dataformat:jackson-dataformat-yaml:2.11.1 - https://github.com/FasterXML/jackson-dataformats-text)
* Jackson datatype: jdk8 (com.fasterxml.jackson.datatype:jackson-datatype-jdk8:2.10.3 - https://github.com/FasterXML/jackson-modules-java8/jackson-datatype-jdk8)
* Jackson datatype: JSR310 (com.fasterxml.jackson.datatype:jackson-datatype-jsr310:2.10.3 - https://github.com/FasterXML/jackson-modules-java8/jackson-datatype-jsr310)
* Jackson datatype: jdk8 (com.fasterxml.jackson.datatype:jackson-datatype-jdk8:2.13.3 - https://github.com/FasterXML/jackson-modules-java8/jackson-datatype-jdk8)
* Jackson datatype: JSR310 (com.fasterxml.jackson.datatype:jackson-datatype-jsr310:2.11.1 - https://github.com/FasterXML/jackson-modules-java8/jackson-datatype-jsr310)
* Jackson-module-parameter-names (com.fasterxml.jackson.module:jackson-module-parameter-names:2.10.3 - https://github.com/FasterXML/jackson-modules-java8/jackson-module-parameter-names)
* Jackson datatype: JSR310 (com.fasterxml.jackson.datatype:jackson-datatype-jsr310:2.13.3 - https://github.com/FasterXML/jackson-modules-java8/jackson-datatype-jsr310)
* Jackson-module-parameter-names (com.fasterxml.jackson.module:jackson-module-parameter-names:2.13.3 - https://github.com/FasterXML/jackson-modules-java8/jackson-module-parameter-names)
* Java UUID Generator (com.fasterxml.uuid:java-uuid-generator:4.0.1 - https://github.com/cowtowncoder/java-uuid-generator)
* Woodstox (com.fasterxml.woodstox:woodstox-core:5.0.3 - https://github.com/FasterXML/woodstox)
* Woodstox (com.fasterxml.woodstox:woodstox-core:6.2.4 - https://github.com/FasterXML/woodstox)
* zjsonpatch (com.flipkart.zjsonpatch:zjsonpatch:0.4.6 - https://github.com/flipkart-incubator/zjsonpatch/)
* Caffeine cache (com.github.ben-manes.caffeine:caffeine:2.8.4 - https://github.com/ben-manes/caffeine)
* Caffeine cache (com.github.ben-manes.caffeine:caffeine:2.9.2 - https://github.com/ben-manes/caffeine)
* btf (com.github.java-json-tools:btf:1.3 - https://github.com/java-json-tools/btf)
* jackson-coreutils (com.github.java-json-tools:jackson-coreutils:2.0 - https://github.com/java-json-tools/jackson-coreutils)
* jackson-coreutils-equivalence (com.github.java-json-tools:jackson-coreutils-equivalence:1.0 - https://github.com/java-json-tools/jackson-coreutils)
@@ -50,41 +51,40 @@ https://wiki.lyrasis.org/display/DSPACE/Code+Contribution+Guidelines
* json-schema-validator (com.github.java-json-tools:json-schema-validator:2.2.14 - https://github.com/java-json-tools/json-schema-validator)
* msg-simple (com.github.java-json-tools:msg-simple:1.2 - https://github.com/java-json-tools/msg-simple)
* uri-template (com.github.java-json-tools:uri-template:0.10 - https://github.com/java-json-tools/uri-template)
* Open JSON (com.github.openjson:openjson:1.0.12 - https://github.com/openjson/openjson)
* JCIP Annotations under Apache License (com.github.stephenc.jcip:jcip-annotations:1.0-1 - http://stephenc.github.com/jcip-annotations)
* Google APIs Client Library for Java (com.google.api-client:google-api-client:1.23.0 - https://github.com/google/google-api-java-client/google-api-client)
* Google Analytics API v3-rev145-1.23.0 (com.google.apis:google-api-services-analytics:v3-rev145-1.23.0 - http://nexus.sonatype.org/oss-repository-hosting.html/google-api-services-analytics)
* FindBugs-jsr305 (com.google.code.findbugs:jsr305:3.0.1 - http://findbugs.sourceforge.net/)
* Gson (com.google.code.gson:gson:2.8.6 - https://github.com/google/gson/gson)
* error-prone annotations (com.google.errorprone:error_prone_annotations:2.3.4 - http://nexus.sonatype.org/oss-repository-hosting.html/error_prone_parent/error_prone_annotations)
* Gson (com.google.code.gson:gson:2.9.0 - https://github.com/google/gson/gson)
* error-prone annotations (com.google.errorprone:error_prone_annotations:2.7.1 - http://nexus.sonatype.org/oss-repository-hosting.html/error_prone_parent/error_prone_annotations)
* Guava InternalFutureFailureAccess and InternalFutures (com.google.guava:failureaccess:1.0.1 - https://github.com/google/guava/failureaccess)
* Guava: Google Core Libraries for Java (com.google.guava:guava:30.0-jre - https://github.com/google/guava/guava)
* Guava: Google Core Libraries for Java (com.google.guava:guava:31.0.1-jre - https://github.com/google/guava)
* Guava: Google Core Libraries for Java (JDK5 Backport) (com.google.guava:guava-jdk5:17.0 - http://code.google.com/p/guava-libraries/guava-jdk5)
* Guava ListenableFuture only (com.google.guava:listenablefuture:9999.0-empty-to-avoid-conflict-with-guava - https://github.com/google/guava/listenablefuture)
* Google HTTP Client Library for Java (com.google.http-client:google-http-client:1.23.0 - https://github.com/google/google-http-java-client/google-http-client)
* GSON extensions to the Google HTTP Client Library for Java. (com.google.http-client:google-http-client-gson:1.41.7 - https://github.com/googleapis/google-http-java-client/google-http-client-gson)
* Jackson 2 extensions to the Google HTTP Client Library for Java. (com.google.http-client:google-http-client-jackson2:1.23.0 - https://github.com/google/google-http-java-client/google-http-client-jackson2)
* J2ObjC Annotations (com.google.j2objc:j2objc-annotations:1.3 - https://github.com/google/j2objc/)
* Google OAuth Client Library for Java (com.google.oauth-client:google-oauth-client:1.32.1 - https://github.com/googleapis/google-oauth-java-client/google-oauth-client)
* Google OAuth Client Library for Java (com.google.oauth-client:google-oauth-client:1.33.3 - https://github.com/googleapis/google-oauth-java-client/google-oauth-client)
* ConcurrentLinkedHashMap (com.googlecode.concurrentlinkedhashmap:concurrentlinkedhashmap-lru:1.4.2 - http://code.google.com/p/concurrentlinkedhashmap)
* JSON.simple (com.googlecode.json-simple:json-simple:1.1.1 - http://code.google.com/p/json-simple/)
* libphonenumber (com.googlecode.libphonenumber:libphonenumber:8.11.1 - https://github.com/google/libphonenumber/)
* Jackcess (com.healthmarketscience.jackcess:jackcess:3.0.1 - https://jackcess.sourceforge.io)
* Jackcess Encrypt (com.healthmarketscience.jackcess:jackcess-encrypt:3.0.0 - http://jackcessencrypt.sf.net)
* project ':json-path' (com.jayway.jsonpath:json-path:2.4.0 - https://github.com/jayway/JsonPath)
* project ':json-path-assert' (com.jayway.jsonpath:json-path-assert:2.4.0 - https://github.com/jayway/JsonPath)
* Jackcess (com.healthmarketscience.jackcess:jackcess:4.0.1 - https://jackcess.sourceforge.io)
* Jackcess Encrypt (com.healthmarketscience.jackcess:jackcess-encrypt:4.0.1 - http://jackcessencrypt.sf.net)
* project ':json-path' (com.jayway.jsonpath:json-path:2.6.0 - https://github.com/jayway/JsonPath)
* project ':json-path-assert' (com.jayway.jsonpath:json-path-assert:2.6.0 - https://github.com/jayway/JsonPath)
* Disruptor Framework (com.lmax:disruptor:3.4.2 - http://lmax-exchange.github.com/disruptor)
* builder-commons (com.lyncode:builder-commons:1.0.2 - http://nexus.sonatype.org/oss-repository-hosting.html/builder-commons)
* MaxMind DB Reader (com.maxmind.db:maxmind-db:1.2.2 - http://dev.maxmind.com/)
* MaxMind GeoIP2 API (com.maxmind.geoip2:geoip2:2.11.0 - http://dev.maxmind.com/geoip/geoip2/web-services)
* Nimbus JOSE+JWT (com.nimbusds:nimbus-jose-jwt:7.9 - https://bitbucket.org/connect2id/nimbus-jose-jwt)
* opencsv (com.opencsv:opencsv:5.2 - http://opencsv.sf.net)
* opencsv (com.opencsv:opencsv:5.6 - http://opencsv.sf.net)
* java-libpst (com.pff:java-libpst:0.9.3 - https://github.com/rjohnsondev/java-libpst)
* rome (com.rometools:rome:1.12.2 - http://rometools.com/rome)
* rome-utils (com.rometools:rome-utils:1.12.2 - http://rometools.com/rome-utils)
* rome (com.rometools:rome:1.18.0 - http://rometools.com/rome)
* rome-modules (com.rometools:rome-modules:1.18.0 - http://rometools.com/rome-modules)
* rome-utils (com.rometools:rome-utils:1.18.0 - http://rometools.com/rome-utils)
* fastinfoset (com.sun.xml.fastinfoset:FastInfoset:1.2.15 - http://fi.java.net)
* T-Digest (com.tdunning:t-digest:3.1 - https://github.com/tdunning/t-digest)
* JSON library from Android SDK (com.vaadin.external.google:android-json:0.0.20131108.vaadin1 - http://developer.android.com/sdk)
* HikariCP (com.zaxxer:HikariCP-java7:2.4.13 - https://github.com/brettwooldridge/HikariCP)
* SparseBitSet (com.zaxxer:SparseBitSet:1.2 - https://github.com/brettwooldridge/SparseBitSet)
* Apache Commons BeanUtils (commons-beanutils:commons-beanutils:1.9.4 - https://commons.apache.org/proper/commons-beanutils/)
* Apache Commons CLI (commons-cli:commons-cli:1.4 - http://commons.apache.org/proper/commons-cli/)
@@ -98,30 +98,24 @@ https://wiki.lyrasis.org/display/DSPACE/Code+Contribution+Guidelines
* Apache Commons Validator (commons-validator:commons-validator:1.5.0 - http://commons.apache.org/proper/commons-validator/)
* GeoJson POJOs for Jackson (de.grundid.opendatalab:geojson-jackson:1.14 - https://github.com/opendatalab-de/geojson-jackson)
* Boilerpipe -- Boilerplate Removal and Fulltext Extraction from HTML pages (de.l3s.boilerpipe:boilerpipe:1.1.0 - http://code.google.com/p/boilerpipe/)
* SentimentAnalysisParser (edu.usc.ir:sentiment-analysis-parser:0.1 - https://github.com/USCDataScience/SentimentAnalysisParser)
* OpenAIRE Funders Model (eu.openaire:funders-model:2.0.0 - https://api.openaire.eu)
* Metrics Core (io.dropwizard.metrics:metrics-core:4.1.5 - https://metrics.dropwizard.io/metrics-core)
* Graphite Integration for Metrics (io.dropwizard.metrics:metrics-graphite:4.1.5 - https://metrics.dropwizard.io/metrics-graphite)
* Metrics Integration for Jetty 9.3 and higher (io.dropwizard.metrics:metrics-jetty9:4.1.5 - https://metrics.dropwizard.io/metrics-jetty9)
* Metrics Integration with JMX (io.dropwizard.metrics:metrics-jmx:4.1.5 - https://metrics.dropwizard.io/metrics-jmx)
* JVM Integration for Metrics (io.dropwizard.metrics:metrics-jvm:4.1.5 - https://metrics.dropwizard.io/metrics-jvm)
* Netty (io.netty:netty:3.10.6.Final - http://netty.io/)
* Netty/Buffer (io.netty:netty-buffer:4.1.50.Final - https://netty.io/netty-buffer/)
* micrometer-core (io.micrometer:micrometer-core:1.8.6 - https://github.com/micrometer-metrics/micrometer)
* Netty/Buffer (io.netty:netty-buffer:4.1.68.Final - https://netty.io/netty-buffer/)
* Netty/Codec (io.netty:netty-codec:4.1.50.Final - https://netty.io/netty-codec/)
* Netty/Codec (io.netty:netty-codec:4.1.68.Final - https://netty.io/netty-codec/)
* Netty/Codec/HTTP (io.netty:netty-codec-http:4.1.53.Final - https://netty.io/netty-codec-http/)
* Netty/Codec/Socks (io.netty:netty-codec-socks:4.1.53.Final - https://netty.io/netty-codec-socks/)
* Netty/Common (io.netty:netty-common:4.1.50.Final - https://netty.io/netty-common/)
* Netty/Common (io.netty:netty-common:4.1.68.Final - https://netty.io/netty-common/)
* Netty/Handler (io.netty:netty-handler:4.1.50.Final - https://netty.io/netty-handler/)
* Netty/Handler (io.netty:netty-handler:4.1.68.Final - https://netty.io/netty-handler/)
* Netty/Handler/Proxy (io.netty:netty-handler-proxy:4.1.53.Final - https://netty.io/netty-handler-proxy/)
* Netty/Resolver (io.netty:netty-resolver:4.1.50.Final - https://netty.io/netty-resolver/)
* Netty/Transport (io.netty:netty-transport:4.1.50.Final - https://netty.io/netty-transport/)
* Netty/Resolver (io.netty:netty-resolver:4.1.68.Final - https://netty.io/netty-resolver/)
* Netty/Transport (io.netty:netty-transport:4.1.68.Final - https://netty.io/netty-transport/)
* Netty/Transport/Native/Epoll (io.netty:netty-transport-native-epoll:4.1.50.Final - https://netty.io/netty-transport-native-epoll/)
* Netty/Transport/Native/Unix/Common (io.netty:netty-transport-native-unix-common:4.1.50.Final - https://netty.io/netty-transport-native-unix-common/)
* Netty/Transport/Native/Epoll (io.netty:netty-transport-native-epoll:4.1.68.Final - https://netty.io/netty-transport-native-epoll/)
* Netty/Transport/Native/Unix/Common (io.netty:netty-transport-native-unix-common:4.1.68.Final - https://netty.io/netty-transport-native-unix-common/)
* OpenTracing API (io.opentracing:opentracing-api:0.33.0 - https://github.com/opentracing/opentracing-java/opentracing-api)
* OpenTracing-noop (io.opentracing:opentracing-noop:0.33.0 - https://github.com/opentracing/opentracing-java/opentracing-noop)
* OpenTracing-util (io.opentracing:opentracing-util:0.33.0 - https://github.com/opentracing/opentracing-java/opentracing-util)
@@ -147,53 +141,44 @@ https://wiki.lyrasis.org/display/DSPACE/Code+Contribution+Guidelines
* Byte Buddy (without dependencies) (net.bytebuddy:byte-buddy:1.11.13 - https://bytebuddy.net/byte-buddy)
* Byte Buddy agent (net.bytebuddy:byte-buddy-agent:1.11.13 - https://bytebuddy.net/byte-buddy-agent)
* eigenbase-properties (net.hydromatic:eigenbase-properties:1.1.5 - http://github.com/julianhyde/eigenbase-properties)
* Java Native Access (net.java.dev.jna:jna:5.5.0 - https://github.com/java-native-access/jna)
* json-unit-core (net.javacrumbs.json-unit:json-unit-core:2.19.0 - https://github.com/lukas-krecan/JsonUnit/json-unit-core)
* "Java Concurrency in Practice" book annotations (net.jcip:jcip-annotations:1.0 - http://jcip.net/)
* ASM based accessors helper used by json-smart (net.minidev:accessors-smart:1.2 - http://www.minidev.net/)
* ASM based accessors helper used by json-smart (net.minidev:accessors-smart:2.4.7 - https://urielch.github.io/)
* JSON Small and Fast Parser (net.minidev:json-smart:2.3 - http://www.minidev.net/)
* ehcache (net.sf.ehcache:ehcache:2.10.6 - http://ehcache.org)
* Ehcache Core (net.sf.ehcache:ehcache-core:2.6.11 - http://ehcache.org)
* JSON Small and Fast Parser (net.minidev:json-smart:2.4.7 - https://urielch.github.io/)
* Abdera Core (org.apache.abdera:abdera-core:1.1.3 - http://abdera.apache.org/abdera-core)
* I18N Libraries (org.apache.abdera:abdera-i18n:1.1.3 - http://abdera.apache.org)
* Apache Ant Core (org.apache.ant:ant:1.10.11 - https://ant.apache.org/)
* Apache Ant Launcher (org.apache.ant:ant-launcher:1.10.11 - https://ant.apache.org/)
* Apache Commons BCEL (org.apache.bcel:bcel:6.4.0 - https://commons.apache.org/proper/commons-bcel)
* Calcite Core (org.apache.calcite:calcite-core:1.18.0 - https://calcite.apache.org/calcite-core)
* Calcite Linq4j (org.apache.calcite:calcite-linq4j:1.18.0 - https://calcite.apache.org/calcite-linq4j)
* Apache Calcite Avatica (org.apache.calcite.avatica:avatica-core:1.13.0 - https://calcite.apache.org/avatica/avatica-core)
* Calcite Core (org.apache.calcite:calcite-core:1.27.0 - https://calcite.apache.org)
* Calcite Linq4j (org.apache.calcite:calcite-linq4j:1.27.0 - https://calcite.apache.org)
* Apache Calcite Avatica (org.apache.calcite.avatica:avatica-core:1.18.0 - https://calcite.apache.org/avatica)
* Apache Commons Collections (org.apache.commons:commons-collections4:4.1 - http://commons.apache.org/proper/commons-collections/)
* Apache Commons Compress (org.apache.commons:commons-compress:1.20 - https://commons.apache.org/proper/commons-compress/)
* Apache Commons Configuration (org.apache.commons:commons-configuration2:2.7 - https://commons.apache.org/proper/commons-configuration/)
* Apache Commons CSV (org.apache.commons:commons-csv:1.8 - https://commons.apache.org/proper/commons-csv/)
* Apache Commons Compress (org.apache.commons:commons-compress:1.21 - https://commons.apache.org/proper/commons-compress/)
* Apache Commons Configuration (org.apache.commons:commons-configuration2:2.8.0 - https://commons.apache.org/proper/commons-configuration/)
* Apache Commons CSV (org.apache.commons:commons-csv:1.9.0 - https://commons.apache.org/proper/commons-csv/)
* Apache Commons DBCP (org.apache.commons:commons-dbcp2:2.8.0 - https://commons.apache.org/dbcp/)
* Apache Commons Exec (org.apache.commons:commons-exec:1.3 - http://commons.apache.org/proper/commons-exec/)
* Apache Commons Lang (org.apache.commons:commons-lang3:3.7 - http://commons.apache.org/proper/commons-lang/)
* Apache Commons Lang (org.apache.commons:commons-lang3:3.12.0 - https://commons.apache.org/proper/commons-lang/)
* Apache Commons Math (org.apache.commons:commons-math3:3.6.1 - http://commons.apache.org/proper/commons-math/)
* Apache Commons Pool (org.apache.commons:commons-pool2:2.9.0 - https://commons.apache.org/proper/commons-pool/)
* Apache Commons Text (org.apache.commons:commons-text:1.8 - https://commons.apache.org/proper/commons-text)
* Apache Commons Text (org.apache.commons:commons-text:1.9 - https://commons.apache.org/proper/commons-text)
* Curator Client (org.apache.curator:curator-client:2.13.0 - http://curator.apache.org/curator-client)
* Curator Framework (org.apache.curator:curator-framework:2.13.0 - http://curator.apache.org/curator-framework)
* Curator Recipes (org.apache.curator:curator-recipes:2.13.0 - http://curator.apache.org/curator-recipes)
* Apache CXF Core (org.apache.cxf:cxf-core:3.3.6 - https://cxf.apache.org)
* Apache CXF Runtime JAX-RS Frontend (org.apache.cxf:cxf-rt-frontend-jaxrs:3.3.6 - https://cxf.apache.org)
* Apache CXF JAX-RS Client (org.apache.cxf:cxf-rt-rs-client:3.3.6 - https://cxf.apache.org)
* Apache CXF Runtime Security functionality (org.apache.cxf:cxf-rt-security:3.3.6 - https://cxf.apache.org)
* Apache CXF Runtime HTTP Transport (org.apache.cxf:cxf-rt-transports-http:3.3.6 - https://cxf.apache.org)
* JTA 1.1 (org.apache.geronimo.specs:geronimo-jta_1.1_spec:1.1.1 - http://geronimo.apache.org/specs/geronimo-jta_1.1_spec)
* Web Services Metadata 2.0 (org.apache.geronimo.specs:geronimo-ws-metadata_2.0_spec:1.1.3 - http://geronimo.apache.org/maven/specs/geronimo-ws-metadata_2.0_spec/1.1.3)
* Apache Hadoop Annotations (org.apache.hadoop:hadoop-annotations:3.2.0 - no url defined)
* Apache Hadoop Auth (org.apache.hadoop:hadoop-auth:3.2.0 - no url defined)
* Apache Hadoop Common (org.apache.hadoop:hadoop-common:3.2.0 - no url defined)
* Apache Hadoop HDFS Client (org.apache.hadoop:hadoop-hdfs-client:3.2.0 - no url defined)
* Apache Hadoop Annotations (org.apache.hadoop:hadoop-annotations:3.2.2 - no url defined)
* Apache Hadoop Auth (org.apache.hadoop:hadoop-auth:3.2.2 - no url defined)
* Apache Hadoop Common (org.apache.hadoop:hadoop-common:3.2.2 - no url defined)
* Apache Hadoop HDFS Client (org.apache.hadoop:hadoop-hdfs-client:3.2.2 - no url defined)
* htrace-core4 (org.apache.htrace:htrace-core4:4.1.0-incubating - http://incubator.apache.org/projects/htrace.html)
* Apache HttpClient (org.apache.httpcomponents:httpclient:4.5.13 - http://hc.apache.org/httpcomponents-client)
* Apache HttpClient Cache (org.apache.httpcomponents:httpclient-cache:4.2.6 - http://hc.apache.org/httpcomponents-client)
* Apache HttpCore (org.apache.httpcomponents:httpcore:4.4.4 - http://hc.apache.org/httpcomponents-core-ga)
* Apache HttpClient Mime (org.apache.httpcomponents:httpmime:4.5.12 - http://hc.apache.org/httpcomponents-client)
* Apache James :: Mime4j :: Core (org.apache.james:apache-mime4j-core:0.8.3 - http://james.apache.org/mime4j/apache-mime4j-core)
* Apache James :: Mime4j :: DOM (org.apache.james:apache-mime4j-dom:0.8.3 - http://james.apache.org/mime4j/apache-mime4j-dom)
* Apache HttpCore (org.apache.httpcomponents:httpcore:4.4.15 - http://hc.apache.org/httpcomponents-core-ga)
* Apache HttpClient Mime (org.apache.httpcomponents:httpmime:4.5.13 - http://hc.apache.org/httpcomponents-client)
* Apache James :: Mime4j :: Core (org.apache.james:apache-mime4j-core:0.8.4 - http://james.apache.org/mime4j/apache-mime4j-core)
* Apache James :: Mime4j :: DOM (org.apache.james:apache-mime4j-dom:0.8.4 - http://james.apache.org/mime4j/apache-mime4j-dom)
* Apache Jena - Libraries POM (org.apache.jena:apache-jena-libs:2.13.0 - http://jena.apache.org/apache-jena-libs/)
* Apache Jena - ARQ (SPARQL 1.1 Query Engine) (org.apache.jena:jena-arq:2.13.0 - http://jena.apache.org/jena-arq/)
* Apache Jena - Core (org.apache.jena:jena-core:2.13.0 - http://jena.apache.org/jena-core/)
@@ -207,121 +192,135 @@ https://wiki.lyrasis.org/display/DSPACE/Code+Contribution+Guidelines
* Apache Log4j API (org.apache.logging.log4j:log4j-api:2.17.1 - https://logging.apache.org/log4j/2.x/log4j-api/)
* Apache Log4j Core (org.apache.logging.log4j:log4j-core:2.17.1 - https://logging.apache.org/log4j/2.x/log4j-core/)
* Apache Log4j JUL Adapter (org.apache.logging.log4j:log4j-jul:2.17.1 - https://logging.apache.org/log4j/2.x/log4j-jul/)
* Apache Log4j Layout for JSON template (org.apache.logging.log4j:log4j-layout-template-json:2.16.0 - https://logging.apache.org/log4j/2.x/log4j-layout-template-json/)
* Apache Log4j SLF4J Binding (org.apache.logging.log4j:log4j-slf4j-impl:2.17.1 - https://logging.apache.org/log4j/2.x/log4j-slf4j-impl/)
* Apache Log4j Web (org.apache.logging.log4j:log4j-web:2.17.1 - https://logging.apache.org/log4j/2.x/log4j-web/)
* Lucene Common Analyzers (org.apache.lucene:lucene-analyzers-common:8.8.1 - https://lucene.apache.org/lucene-parent/lucene-analyzers-common)
* Lucene ICU Analysis Components (org.apache.lucene:lucene-analyzers-icu:8.8.1 - https://lucene.apache.org/lucene-parent/lucene-analyzers-icu)
* Lucene Kuromoji Japanese Morphological Analyzer (org.apache.lucene:lucene-analyzers-kuromoji:8.8.1 - https://lucene.apache.org/lucene-parent/lucene-analyzers-kuromoji)
* Lucene Nori Korean Morphological Analyzer (org.apache.lucene:lucene-analyzers-nori:8.8.1 - https://lucene.apache.org/lucene-parent/lucene-analyzers-nori)
* Lucene Phonetic Filters (org.apache.lucene:lucene-analyzers-phonetic:8.8.1 - https://lucene.apache.org/lucene-parent/lucene-analyzers-phonetic)
* Lucene Smart Chinese Analyzer (org.apache.lucene:lucene-analyzers-smartcn:8.8.1 - https://lucene.apache.org/lucene-parent/lucene-analyzers-smartcn)
* Lucene Stempel Analyzer (org.apache.lucene:lucene-analyzers-stempel:8.8.1 - https://lucene.apache.org/lucene-parent/lucene-analyzers-stempel)
* Lucene Memory (org.apache.lucene:lucene-backward-codecs:8.8.1 - https://lucene.apache.org/lucene-parent/lucene-backward-codecs)
* Lucene Classification (org.apache.lucene:lucene-classification:8.8.1 - https://lucene.apache.org/lucene-parent/lucene-classification)
* Lucene codecs (org.apache.lucene:lucene-codecs:8.8.1 - https://lucene.apache.org/lucene-parent/lucene-codecs)
* Lucene Core (org.apache.lucene:lucene-core:8.8.1 - https://lucene.apache.org/lucene-parent/lucene-core)
* Lucene Expressions (org.apache.lucene:lucene-expressions:8.8.1 - https://lucene.apache.org/lucene-parent/lucene-expressions)
* Lucene Grouping (org.apache.lucene:lucene-grouping:8.8.1 - https://lucene.apache.org/lucene-parent/lucene-grouping)
* Lucene Highlighter (org.apache.lucene:lucene-highlighter:8.8.1 - https://lucene.apache.org/lucene-parent/lucene-highlighter)
* Lucene Join (org.apache.lucene:lucene-join:8.8.1 - https://lucene.apache.org/lucene-parent/lucene-join)
* Lucene Memory (org.apache.lucene:lucene-memory:8.8.1 - https://lucene.apache.org/lucene-parent/lucene-memory)
* Lucene Miscellaneous (org.apache.lucene:lucene-misc:8.8.1 - https://lucene.apache.org/lucene-parent/lucene-misc)
* Lucene Queries (org.apache.lucene:lucene-queries:8.8.1 - https://lucene.apache.org/lucene-parent/lucene-queries)
* Lucene QueryParsers (org.apache.lucene:lucene-queryparser:8.8.1 - https://lucene.apache.org/lucene-parent/lucene-queryparser)
* Lucene Sandbox (org.apache.lucene:lucene-sandbox:8.8.1 - https://lucene.apache.org/lucene-parent/lucene-sandbox)
* Lucene Spatial Extras (org.apache.lucene:lucene-spatial-extras:8.8.1 - https://lucene.apache.org/lucene-parent/lucene-spatial-extras)
* Lucene Spatial 3D (org.apache.lucene:lucene-spatial3d:8.8.1 - https://lucene.apache.org/lucene-parent/lucene-spatial3d)
* Lucene Suggest (org.apache.lucene:lucene-suggest:8.8.1 - https://lucene.apache.org/lucene-parent/lucene-suggest)
* Apache OpenNLP Tools (org.apache.opennlp:opennlp-tools:1.9.2 - https://www.apache.org/opennlp/opennlp-tools/)
* Lucene Common Analyzers (org.apache.lucene:lucene-analyzers-common:8.11.1 - https://lucene.apache.org/lucene-parent/lucene-analyzers-common)
* Lucene ICU Analysis Components (org.apache.lucene:lucene-analyzers-icu:8.11.1 - https://lucene.apache.org/lucene-parent/lucene-analyzers-icu)
* Lucene Kuromoji Japanese Morphological Analyzer (org.apache.lucene:lucene-analyzers-kuromoji:8.11.1 - https://lucene.apache.org/lucene-parent/lucene-analyzers-kuromoji)
* Lucene Nori Korean Morphological Analyzer (org.apache.lucene:lucene-analyzers-nori:8.11.1 - https://lucene.apache.org/lucene-parent/lucene-analyzers-nori)
* Lucene Phonetic Filters (org.apache.lucene:lucene-analyzers-phonetic:8.11.1 - https://lucene.apache.org/lucene-parent/lucene-analyzers-phonetic)
* Lucene Smart Chinese Analyzer (org.apache.lucene:lucene-analyzers-smartcn:8.11.1 - https://lucene.apache.org/lucene-parent/lucene-analyzers-smartcn)
* Lucene Stempel Analyzer (org.apache.lucene:lucene-analyzers-stempel:8.11.1 - https://lucene.apache.org/lucene-parent/lucene-analyzers-stempel)
* Lucene Memory (org.apache.lucene:lucene-backward-codecs:8.11.1 - https://lucene.apache.org/lucene-parent/lucene-backward-codecs)
* Lucene Classification (org.apache.lucene:lucene-classification:8.11.1 - https://lucene.apache.org/lucene-parent/lucene-classification)
* Lucene codecs (org.apache.lucene:lucene-codecs:8.11.1 - https://lucene.apache.org/lucene-parent/lucene-codecs)
* Lucene Core (org.apache.lucene:lucene-core:8.11.1 - https://lucene.apache.org/lucene-parent/lucene-core)
* Lucene Expressions (org.apache.lucene:lucene-expressions:8.11.1 - https://lucene.apache.org/lucene-parent/lucene-expressions)
* Lucene Grouping (org.apache.lucene:lucene-grouping:8.11.1 - https://lucene.apache.org/lucene-parent/lucene-grouping)
* Lucene Highlighter (org.apache.lucene:lucene-highlighter:8.11.1 - https://lucene.apache.org/lucene-parent/lucene-highlighter)
* Lucene Join (org.apache.lucene:lucene-join:8.11.1 - https://lucene.apache.org/lucene-parent/lucene-join)
* Lucene Memory (org.apache.lucene:lucene-memory:8.11.1 - https://lucene.apache.org/lucene-parent/lucene-memory)
* Lucene Miscellaneous (org.apache.lucene:lucene-misc:8.11.1 - https://lucene.apache.org/lucene-parent/lucene-misc)
* Lucene Queries (org.apache.lucene:lucene-queries:8.11.1 - https://lucene.apache.org/lucene-parent/lucene-queries)
* Lucene QueryParsers (org.apache.lucene:lucene-queryparser:8.11.1 - https://lucene.apache.org/lucene-parent/lucene-queryparser)
* Lucene Sandbox (org.apache.lucene:lucene-sandbox:8.11.1 - https://lucene.apache.org/lucene-parent/lucene-sandbox)
* Lucene Spatial Extras (org.apache.lucene:lucene-spatial-extras:8.11.1 - https://lucene.apache.org/lucene-parent/lucene-spatial-extras)
* Lucene Spatial 3D (org.apache.lucene:lucene-spatial3d:8.11.1 - https://lucene.apache.org/lucene-parent/lucene-spatial3d)
* Lucene Suggest (org.apache.lucene:lucene-suggest:8.11.1 - https://lucene.apache.org/lucene-parent/lucene-suggest)
* Apache FontBox (org.apache.pdfbox:fontbox:2.0.24 - http://pdfbox.apache.org/)
* PDFBox JBIG2 ImageIO plugin (org.apache.pdfbox:jbig2-imageio:3.0.3 - https://www.apache.org/jbig2-imageio/)
* Apache JempBox (org.apache.pdfbox:jempbox:1.8.16 - http://www.apache.org/pdfbox-parent/jempbox/)
* Apache PDFBox (org.apache.pdfbox:pdfbox:2.0.24 - https://www.apache.org/pdfbox-parent/pdfbox/)
* Apache PDFBox tools (org.apache.pdfbox:pdfbox-tools:2.0.19 - https://www.apache.org/pdfbox-parent/pdfbox-tools/)
* Apache Preflight (org.apache.pdfbox:preflight:2.0.19 - https://www.apache.org/pdfbox-parent/preflight/)
* Apache XmpBox (org.apache.pdfbox:xmpbox:2.0.19 - https://www.apache.org/pdfbox-parent/xmpbox/)
* Apache POI (org.apache.poi:poi:3.17 - http://poi.apache.org/)
* Apache POI (org.apache.poi:poi-ooxml:3.17 - http://poi.apache.org/)
* Apache POI (org.apache.poi:poi-ooxml-schemas:3.17 - http://poi.apache.org/)
* Apache POI (org.apache.poi:poi-scratchpad:3.17 - http://poi.apache.org/)
* Apache SIS features (org.apache.sis.core:sis-feature:1.0 - http://sis.apache.org/core/sis-feature)
* Apache SIS metadata (org.apache.sis.core:sis-metadata:1.0 - http://sis.apache.org/core/sis-metadata)
* Apache SIS referencing (org.apache.sis.core:sis-referencing:1.0 - http://sis.apache.org/core/sis-referencing)
* Apache SIS utilities (org.apache.sis.core:sis-utility:1.0 - http://sis.apache.org/core/sis-utility)
* Apache SIS netCDF storage (org.apache.sis.storage:sis-netcdf:1.0 - http://sis.apache.org/storage/sis-netcdf)
* Apache SIS common storage (org.apache.sis.storage:sis-storage:1.0 - http://sis.apache.org/storage/sis-storage)
* Apache Solr Content Extraction Library (org.apache.solr:solr-cell:8.8.1 - https://lucene.apache.org/solr-parent/solr-cell)
* Apache Solr Core (org.apache.solr:solr-core:8.8.1 - https://lucene.apache.org/solr-parent/solr-core)
* Apache Solr Solrj (org.apache.solr:solr-solrj:8.8.1 - https://lucene.apache.org/solr-parent/solr-solrj)
* Apache PDFBox Debugger (org.apache.pdfbox:pdfbox-debugger:2.0.25 - https://www.apache.org/pdfbox-parent/pdfbox-debugger/)
* Apache PDFBox tools (org.apache.pdfbox:pdfbox-tools:2.0.25 - https://www.apache.org/pdfbox-parent/pdfbox-tools/)
* Apache XmpBox (org.apache.pdfbox:xmpbox:2.0.25 - https://www.apache.org/pdfbox-parent/xmpbox/)
* Apache POI - Common (org.apache.poi:poi:5.2.0 - https://poi.apache.org/)
* Apache POI - API based on OPC and OOXML schemas (org.apache.poi:poi-ooxml:5.2.0 - https://poi.apache.org/)
* Apache POI (org.apache.poi:poi-ooxml-lite:5.2.0 - https://poi.apache.org/)
* Apache POI (org.apache.poi:poi-scratchpad:5.2.0 - https://poi.apache.org/)
* Apache Solr Core (org.apache.solr:solr-core:8.11.1 - https://lucene.apache.org/solr-parent/solr-core)
* Apache Solr Solrj (org.apache.solr:solr-solrj:8.11.1 - https://lucene.apache.org/solr-parent/solr-solrj)
* Apache Standard Taglib Implementation (org.apache.taglibs:taglibs-standard-impl:1.2.5 - http://tomcat.apache.org/taglibs/standard-1.2.5/taglibs-standard-impl)
* Apache Standard Taglib Specification API (org.apache.taglibs:taglibs-standard-spec:1.2.5 - http://tomcat.apache.org/taglibs/standard-1.2.5/taglibs-standard-spec)
* Apache Thrift (org.apache.thrift:libthrift:0.9.2 - http://thrift.apache.org)
* Apache Tika core (org.apache.tika:tika-core:1.24.1 - http://tika.apache.org/)
* Apache Tika Java-7 Components (org.apache.tika:tika-java7:1.24.1 - http://tika.apache.org/)
* Apache Tika parsers (org.apache.tika:tika-parsers:1.24.1 - http://tika.apache.org/)
* Apache Tika XMP (org.apache.tika:tika-xmp:1.24.1 - http://tika.apache.org/)
* tomcat-embed-core (org.apache.tomcat.embed:tomcat-embed-core:9.0.33 - https://tomcat.apache.org/)
* tomcat-embed-el (org.apache.tomcat.embed:tomcat-embed-el:9.0.33 - https://tomcat.apache.org/)
* tomcat-embed-websocket (org.apache.tomcat.embed:tomcat-embed-websocket:9.0.33 - https://tomcat.apache.org/)
* Apache Velocity - Engine (org.apache.velocity:velocity-engine-core:2.2 - http://velocity.apache.org/engine/devel/velocity-engine-core/)
* Apache Tika core (org.apache.tika:tika-core:2.3.0 - https://tika.apache.org/)
* Apache Tika Apple parser module (org.apache.tika:tika-parser-apple-module:2.3.0 - https://tika.apache.org/tika-parser-apple-module/)
* Apache Tika audiovideo parser module (org.apache.tika:tika-parser-audiovideo-module:2.3.0 - https://tika.apache.org/tika-parser-audiovideo-module/)
* Apache Tika cad parser module (org.apache.tika:tika-parser-cad-module:2.3.0 - https://tika.apache.org/tika-parser-cad-module/)
* Apache Tika code parser module (org.apache.tika:tika-parser-code-module:2.3.0 - https://tika.apache.org/tika-parser-code-module/)
* Apache Tika crypto parser module (org.apache.tika:tika-parser-crypto-module:2.3.0 - https://tika.apache.org/tika-parser-crypto-module/)
* Apache Tika digest commons (org.apache.tika:tika-parser-digest-commons:2.3.0 - https://tika.apache.org/tika-parser-digest-commons/)
* Apache Tika font parser module (org.apache.tika:tika-parser-font-module:2.3.0 - https://tika.apache.org/tika-parser-font-module/)
* Apache Tika html commons (org.apache.tika:tika-parser-html-commons:2.3.0 - https://tika.apache.org/tika-parser-html-commons/)
* Apache Tika html parser module (org.apache.tika:tika-parser-html-module:2.3.0 - https://tika.apache.org/tika-parser-html-module/)
* Apache Tika image parser module (org.apache.tika:tika-parser-image-module:2.3.0 - https://tika.apache.org/tika-parser-image-module/)
* Apache Tika mail commons (org.apache.tika:tika-parser-mail-commons:2.3.0 - https://tika.apache.org/tika-parser-mail-commons/)
* Apache Tika mail parser module (org.apache.tika:tika-parser-mail-module:2.3.0 - https://tika.apache.org/tika-parser-mail-module/)
* Apache Tika Microsoft parser module (org.apache.tika:tika-parser-microsoft-module:2.3.0 - https://tika.apache.org/tika-parser-microsoft-module/)
* Apache Tika miscellaneous office format parser module (org.apache.tika:tika-parser-miscoffice-module:2.3.0 - https://tika.apache.org/tika-parser-miscoffice-module/)
* Apache Tika news parser module (org.apache.tika:tika-parser-news-module:2.3.0 - https://tika.apache.org/tika-parser-news-module/)
* Apache Tika OCR parser module (org.apache.tika:tika-parser-ocr-module:2.3.0 - https://tika.apache.org/tika-parser-ocr-module/)
* Apache Tika PDF parser module (org.apache.tika:tika-parser-pdf-module:2.3.0 - https://tika.apache.org/tika-parser-pdf-module/)
* Apache Tika package parser module (org.apache.tika:tika-parser-pkg-module:2.3.0 - https://tika.apache.org/tika-parser-pkg-module/)
* Apache Tika text parser module (org.apache.tika:tika-parser-text-module:2.3.0 - https://tika.apache.org/tika-parser-text-module/)
* Apache Tika XML parser module (org.apache.tika:tika-parser-xml-module:2.3.0 - https://tika.apache.org/tika-parser-xml-module/)
* Apache Tika XMP commons (org.apache.tika:tika-parser-xmp-commons:2.3.0 - https://tika.apache.org/tika-parser-xmp-commons/)
* Apache Tika ZIP commons (org.apache.tika:tika-parser-zip-commons:2.3.0 - https://tika.apache.org/tika-parser-zip-commons/)
* Apache Tika standard parser package (org.apache.tika:tika-parsers-standard-package:2.3.0 - https://tika.apache.org/tika-parsers/tika-parsers-standard/tika-parsers-standard-package/)
* tomcat-embed-core (org.apache.tomcat.embed:tomcat-embed-core:9.0.63 - https://tomcat.apache.org/)
* tomcat-embed-el (org.apache.tomcat.embed:tomcat-embed-el:9.0.63 - https://tomcat.apache.org/)
* tomcat-embed-websocket (org.apache.tomcat.embed:tomcat-embed-websocket:9.0.63 - https://tomcat.apache.org/)
* Apache Velocity - Engine (org.apache.velocity:velocity-engine-core:2.3 - http://velocity.apache.org/engine/devel/velocity-engine-core/)
* Apache Velocity - JSR 223 Scripting (org.apache.velocity:velocity-engine-scripting:2.2 - http://velocity.apache.org/engine/devel/velocity-engine-scripting/)
* Axiom API (org.apache.ws.commons.axiom:axiom-api:1.2.22 - http://ws.apache.org/axiom/)
* LLOM (org.apache.ws.commons.axiom:axiom-impl:1.2.22 - http://ws.apache.org/axiom/implementations/axiom-impl/)
* Abdera Model (FOM) Implementation (org.apache.ws.commons.axiom:fom-impl:1.2.22 - http://ws.apache.org/axiom/implementations/fom-impl/)
* XmlSchema Core (org.apache.ws.xmlschema:xmlschema-core:2.2.5 - https://ws.apache.org/commons/xmlschema20/xmlschema-core/)
* XmlBeans (org.apache.xmlbeans:xmlbeans:3.1.0 - https://xmlbeans.apache.org/)
* zookeeper (org.apache.zookeeper:zookeeper:3.4.14 - no url defined)
* XmlBeans (org.apache.xmlbeans:xmlbeans:5.0.3 - https://xmlbeans.apache.org/)
* Apache ZooKeeper - Server (org.apache.zookeeper:zookeeper:3.6.2 - http://zookeeper.apache.org/zookeeper)
* Apache ZooKeeper - Jute (org.apache.zookeeper:zookeeper-jute:3.6.2 - http://zookeeper.apache.org/zookeeper-jute)
* AssertJ fluent assertions (org.assertj:assertj-core:3.13.2 - http://assertj.org/assertj-core)
* Evo Inflector (org.atteo:evo-inflector:1.2.2 - http://atteo.org/static/evo-inflector)
* org.apiguardian:apiguardian-api (org.apiguardian:apiguardian-api:1.1.0 - https://github.com/apiguardian-team/apiguardian)
* AssertJ fluent assertions (org.assertj:assertj-core:3.21.0 - https://assertj.github.io/doc/assertj-core/)
* Evo Inflector (org.atteo:evo-inflector:1.3 - http://atteo.org/static/evo-inflector)
* jose4j (org.bitbucket.b_c:jose4j:0.6.5 - https://bitbucket.org/b_c/jose4j/)
* TagSoup (org.ccil.cowan.tagsoup:tagsoup:1.2.1 - http://home.ccil.org/~cowan/XML/tagsoup/)
* Woodstox (org.codehaus.woodstox:woodstox-core-asl:4.4.1 - http://woodstox.codehaus.org)
* jems (org.dmfs:jems:1.18 - https://github.com/dmfs/jems)
* rfc3986-uri (org.dmfs:rfc3986-uri:0.8.1 - https://github.com/dmfs/uri-toolkit)
* Jetty :: Apache JSP Implementation (org.eclipse.jetty:apache-jsp:9.4.15.v20190215 - http://www.eclipse.org/jetty)
* Apache :: JSTL module (org.eclipse.jetty:apache-jstl:9.4.15.v20190215 - http://tomcat.apache.org/taglibs/standard/)
* Jetty :: ALPN :: Client (org.eclipse.jetty:jetty-alpn-client:9.4.34.v20201102 - https://eclipse.org/jetty/jetty-alpn-parent/jetty-alpn-client)
* Jetty :: ALPN :: JDK9 Client Implementation (org.eclipse.jetty:jetty-alpn-java-client:9.4.34.v20201102 - https://eclipse.org/jetty/jetty-alpn-parent/jetty-alpn-java-client)
* Jetty :: ALPN :: JDK9 Server Implementation (org.eclipse.jetty:jetty-alpn-java-server:9.4.34.v20201102 - https://eclipse.org/jetty/jetty-alpn-parent/jetty-alpn-java-server)
* Jetty :: ALPN :: Server (org.eclipse.jetty:jetty-alpn-server:9.4.34.v20201102 - https://eclipse.org/jetty/jetty-alpn-parent/jetty-alpn-server)
* Jetty :: ALPN :: Client (org.eclipse.jetty:jetty-alpn-client:9.4.44.v20210927 - https://eclipse.org/jetty/jetty-alpn-parent/jetty-alpn-client)
* Jetty :: ALPN :: JDK9 Client Implementation (org.eclipse.jetty:jetty-alpn-java-client:9.4.44.v20210927 - https://eclipse.org/jetty/jetty-alpn-parent/jetty-alpn-java-client)
* Jetty :: ALPN :: JDK9 Server Implementation (org.eclipse.jetty:jetty-alpn-java-server:9.4.48.v20220622 - https://eclipse.org/jetty/jetty-alpn-parent/jetty-alpn-java-server)
* Jetty :: ALPN :: Server (org.eclipse.jetty:jetty-alpn-server:9.4.44.v20210927 - https://eclipse.org/jetty/jetty-alpn-parent/jetty-alpn-server)
* Jetty :: ALPN :: Server (org.eclipse.jetty:jetty-alpn-server:9.4.48.v20220622 - https://eclipse.org/jetty/jetty-alpn-parent/jetty-alpn-server)
* Jetty :: Servlet Annotations (org.eclipse.jetty:jetty-annotations:9.4.15.v20190215 - http://www.eclipse.org/jetty)
* Jetty :: Asynchronous HTTP Client (org.eclipse.jetty:jetty-client:9.4.34.v20201102 - https://eclipse.org/jetty/jetty-client)
* Jetty :: Continuation (org.eclipse.jetty:jetty-continuation:9.4.34.v20201102 - https://eclipse.org/jetty/jetty-continuation)
* Jetty :: Deployers (org.eclipse.jetty:jetty-deploy:9.4.34.v20201102 - https://eclipse.org/jetty/jetty-deploy)
* Jetty :: Http Utility (org.eclipse.jetty:jetty-http:9.4.41.v20210516 - https://eclipse.org/jetty/jetty-http)
* Jetty :: IO Utility (org.eclipse.jetty:jetty-io:9.4.41.v20210516 - https://eclipse.org/jetty/jetty-io)
* Jetty :: JMX Management (org.eclipse.jetty:jetty-jmx:9.4.34.v20201102 - https://eclipse.org/jetty/jetty-jmx)
* Jetty :: Asynchronous HTTP Client (org.eclipse.jetty:jetty-client:9.4.44.v20210927 - https://eclipse.org/jetty/jetty-client)
* Jetty :: Continuation (org.eclipse.jetty:jetty-continuation:9.4.44.v20210927 - https://eclipse.org/jetty/jetty-continuation)
* Jetty :: Continuation (org.eclipse.jetty:jetty-continuation:9.4.48.v20220622 - https://eclipse.org/jetty/jetty-continuation)
* Jetty :: Deployers (org.eclipse.jetty:jetty-deploy:9.4.48.v20220622 - https://eclipse.org/jetty/jetty-deploy)
* Jetty :: Http Utility (org.eclipse.jetty:jetty-http:9.4.48.v20220622 - https://eclipse.org/jetty/jetty-http)
* Jetty :: IO Utility (org.eclipse.jetty:jetty-io:9.4.48.v20220622 - https://eclipse.org/jetty/jetty-io)
* Jetty :: JMX Management (org.eclipse.jetty:jetty-jmx:9.4.44.v20210927 - https://eclipse.org/jetty/jetty-jmx)
* Jetty :: JNDI Naming (org.eclipse.jetty:jetty-jndi:9.4.15.v20190215 - http://www.eclipse.org/jetty)
* Jetty :: Plus (org.eclipse.jetty:jetty-plus:9.4.15.v20190215 - http://www.eclipse.org/jetty)
* Jetty :: Rewrite Handler (org.eclipse.jetty:jetty-rewrite:9.4.34.v20201102 - https://eclipse.org/jetty/jetty-rewrite)
* Jetty :: Security (org.eclipse.jetty:jetty-security:9.4.34.v20201102 - https://eclipse.org/jetty/jetty-security)
* Jetty :: Server Core (org.eclipse.jetty:jetty-server:9.4.41.v20210516 - https://eclipse.org/jetty/jetty-server)
* Jetty :: Servlet Handling (org.eclipse.jetty:jetty-servlet:9.4.34.v20201102 - https://eclipse.org/jetty/jetty-servlet)
* Jetty :: Utility Servlets and Filters (org.eclipse.jetty:jetty-servlets:9.4.34.v20201102 - https://eclipse.org/jetty/jetty-servlets)
* Jetty :: Utilities (org.eclipse.jetty:jetty-util:9.4.41.v20210516 - https://eclipse.org/jetty/jetty-util)
* Jetty :: Webapp Application Support (org.eclipse.jetty:jetty-webapp:9.4.34.v20201102 - https://eclipse.org/jetty/jetty-webapp)
* Jetty :: XML utilities (org.eclipse.jetty:jetty-xml:9.4.34.v20201102 - https://eclipse.org/jetty/jetty-xml)
* Jetty :: HTTP2 :: Client (org.eclipse.jetty.http2:http2-client:9.4.34.v20201102 - https://eclipse.org/jetty/http2-parent/http2-client)
* Jetty :: HTTP2 :: Common (org.eclipse.jetty.http2:http2-common:9.4.34.v20201102 - https://eclipse.org/jetty/http2-parent/http2-common)
* Jetty :: HTTP2 :: HPACK (org.eclipse.jetty.http2:http2-hpack:9.4.34.v20201102 - https://eclipse.org/jetty/http2-parent/http2-hpack)
* Jetty :: HTTP2 :: HTTP Client Transport (org.eclipse.jetty.http2:http2-http-client-transport:9.4.34.v20201102 - https://eclipse.org/jetty/http2-parent/http2-http-client-transport)
* Jetty :: HTTP2 :: Server (org.eclipse.jetty.http2:http2-server:9.4.34.v20201102 - https://eclipse.org/jetty/http2-parent/http2-server)
* Jetty :: Rewrite Handler (org.eclipse.jetty:jetty-rewrite:9.4.44.v20210927 - https://eclipse.org/jetty/jetty-rewrite)
* Jetty :: Security (org.eclipse.jetty:jetty-security:9.4.44.v20210927 - https://eclipse.org/jetty/jetty-security)
* Jetty :: Security (org.eclipse.jetty:jetty-security:9.4.48.v20220622 - https://eclipse.org/jetty/jetty-security)
* Jetty :: Server Core (org.eclipse.jetty:jetty-server:9.4.48.v20220622 - https://eclipse.org/jetty/jetty-server)
* Jetty :: Servlet Handling (org.eclipse.jetty:jetty-servlet:9.4.48.v20220622 - https://eclipse.org/jetty/jetty-servlet)
* Jetty :: Utility Servlets and Filters (org.eclipse.jetty:jetty-servlets:9.4.48.v20220622 - https://eclipse.org/jetty/jetty-servlets)
* Jetty :: Utilities (org.eclipse.jetty:jetty-util:9.4.48.v20220622 - https://eclipse.org/jetty/jetty-util)
* Jetty :: Utilities :: Ajax(JSON) (org.eclipse.jetty:jetty-util-ajax:9.4.48.v20220622 - https://eclipse.org/jetty/jetty-util-ajax)
* Jetty :: Webapp Application Support (org.eclipse.jetty:jetty-webapp:9.4.48.v20220622 - https://eclipse.org/jetty/jetty-webapp)
* Jetty :: XML utilities (org.eclipse.jetty:jetty-xml:9.4.48.v20220622 - https://eclipse.org/jetty/jetty-xml)
* Jetty :: HTTP2 :: Client (org.eclipse.jetty.http2:http2-client:9.4.44.v20210927 - https://eclipse.org/jetty/http2-parent/http2-client)
* Jetty :: HTTP2 :: Common (org.eclipse.jetty.http2:http2-common:9.4.48.v20220622 - https://eclipse.org/jetty/http2-parent/http2-common)
* Jetty :: HTTP2 :: HPACK (org.eclipse.jetty.http2:http2-hpack:9.4.44.v20210927 - https://eclipse.org/jetty/http2-parent/http2-hpack)
* Jetty :: HTTP2 :: HTTP Client Transport (org.eclipse.jetty.http2:http2-http-client-transport:9.4.44.v20210927 - https://eclipse.org/jetty/http2-parent/http2-http-client-transport)
* Jetty :: HTTP2 :: Server (org.eclipse.jetty.http2:http2-server:9.4.48.v20220622 - https://eclipse.org/jetty/http2-parent/http2-server)
* Jetty :: Schemas (org.eclipse.jetty.toolchain:jetty-schemas:3.1.2 - https://eclipse.org/jetty/jetty-schemas)
* Ehcache (org.ehcache:ehcache:3.4.0 - http://ehcache.org)
* flyway-core (org.flywaydb:flyway-core:6.5.7 - https://flywaydb.org/flyway-core)
* flyway-core (org.flywaydb:flyway-core:8.4.4 - https://flywaydb.org/flyway-core)
* Ogg and Vorbis for Java, Core (org.gagravarr:vorbis-java-core:0.8 - https://github.com/Gagravarr/VorbisJava)
* Apache Tika plugin for Ogg, Vorbis and FLAC (org.gagravarr:vorbis-java-tika:0.8 - https://github.com/Gagravarr/VorbisJava)
* jersey-core-client (org.glassfish.jersey.core:jersey-client:2.30.1 - https://projects.eclipse.org/projects/ee4j.jersey/jersey-client)
* jersey-core-common (org.glassfish.jersey.core:jersey-common:2.30.1 - https://projects.eclipse.org/projects/ee4j.jersey/jersey-common)
* jersey-inject-hk2 (org.glassfish.jersey.inject:jersey-hk2:2.30.1 - https://projects.eclipse.org/projects/ee4j.jersey/project/jersey-hk2)
* Hibernate Validator Engine (org.hibernate.validator:hibernate-validator:6.0.18.Final - http://hibernate.org/validator/hibernate-validator)
* Hibernate Validator Portable Extension (org.hibernate.validator:hibernate-validator-cdi:6.0.18.Final - http://hibernate.org/validator/hibernate-validator-cdi)
* jersey-core-client (org.glassfish.jersey.core:jersey-client:2.35 - https://projects.eclipse.org/projects/ee4j.jersey/jersey-client)
* jersey-core-common (org.glassfish.jersey.core:jersey-common:2.35 - https://projects.eclipse.org/projects/ee4j.jersey/jersey-common)
* jersey-inject-hk2 (org.glassfish.jersey.inject:jersey-hk2:2.35 - https://projects.eclipse.org/projects/ee4j.jersey/project/jersey-hk2)
* Hibernate Validator Engine (org.hibernate.validator:hibernate-validator:6.0.23.Final - http://hibernate.org/validator/hibernate-validator)
* Hibernate Validator Portable Extension (org.hibernate.validator:hibernate-validator-cdi:6.0.23.Final - http://hibernate.org/validator/hibernate-validator-cdi)
* Javassist (org.javassist:javassist:3.25.0-GA - http://www.javassist.org/)
* Java Annotation Indexer (org.jboss:jandex:2.1.1.Final - http://www.jboss.org/jandex)
* JBoss Logging 3 (org.jboss.logging:jboss-logging:3.3.2.Final - http://www.jboss.org)
* JDOM (org.jdom:jdom:1.1.3 - http://www.jdom.org)
* JDOM (org.jdom:jdom2:2.0.6 - http://www.jdom.org)
* Java Annotation Indexer (org.jboss:jandex:2.4.2.Final - http://www.jboss.org/jandex)
* JBoss Logging 3 (org.jboss.logging:jboss-logging:3.4.3.Final - http://www.jboss.org)
* JDOM (org.jdom:jdom2:2.0.6.1 - http://www.jdom.org)
* jtwig-core (org.jtwig:jtwig-core:5.87.0.RELEASE - http://jtwig.org)
* jtwig-reflection (org.jtwig:jtwig-reflection:5.87.0.RELEASE - http://jtwig.org)
* jtwig-spring (org.jtwig:jtwig-spring:5.87.0.RELEASE - http://jtwig.org)
@@ -341,69 +340,67 @@ https://wiki.lyrasis.org/display/DSPACE/Code+Contribution+Guidelines
* Objenesis (org.objenesis:objenesis:3.2 - http://objenesis.org/objenesis)
* parboiled-core (org.parboiled:parboiled-core:1.3.1 - http://parboiled.org)
* parboiled-java (org.parboiled:parboiled-java:1.3.1 - http://parboiled.org)
* quartz (org.quartz-scheduler:quartz:2.3.2 - http://www.quartz-scheduler.org/quartz)
* rome-modules (org.rometools:rome-modules:1.0 - http://www.rometools.org)
* RRD4J (org.rrd4j:rrd4j:3.5 - https://github.com/rrd4j/rrd4j/)
* JSONassert (org.skyscreamer:jsonassert:1.5.0 - https://github.com/skyscreamer/JSONassert)
* Spring AOP (org.springframework:spring-aop:5.2.5.RELEASE - https://github.com/spring-projects/spring-framework)
* Spring Beans (org.springframework:spring-beans:5.2.5.RELEASE - https://github.com/spring-projects/spring-framework)
* Spring Context (org.springframework:spring-context:5.2.5.RELEASE - https://github.com/spring-projects/spring-framework)
* Spring Context Support (org.springframework:spring-context-support:5.2.5.RELEASE - https://github.com/spring-projects/spring-framework)
* Spring Core (org.springframework:spring-core:5.2.5.RELEASE - https://github.com/spring-projects/spring-framework)
* Spring Expression Language (SpEL) (org.springframework:spring-expression:5.2.5.RELEASE - https://github.com/spring-projects/spring-framework)
* Spring Commons Logging Bridge (org.springframework:spring-jcl:5.2.5.RELEASE - https://github.com/spring-projects/spring-framework)
* Spring JDBC (org.springframework:spring-jdbc:5.2.5.RELEASE - https://github.com/spring-projects/spring-framework)
* Spring Object/Relational Mapping (org.springframework:spring-orm:5.2.5.RELEASE - https://github.com/spring-projects/spring-framework)
* Spring TestContext Framework (org.springframework:spring-test:5.2.5.RELEASE - https://github.com/spring-projects/spring-framework)
* Spring Transaction (org.springframework:spring-tx:5.2.5.RELEASE - https://github.com/spring-projects/spring-framework)
* Spring Web (org.springframework:spring-web:5.2.5.RELEASE - https://github.com/spring-projects/spring-framework)
* Spring Web MVC (org.springframework:spring-webmvc:5.2.5.RELEASE - https://github.com/spring-projects/spring-framework)
* Spring Boot (org.springframework.boot:spring-boot:2.2.6.RELEASE - https://projects.spring.io/spring-boot/#/spring-boot-parent/spring-boot)
* Spring Boot AutoConfigure (org.springframework.boot:spring-boot-autoconfigure:2.2.6.RELEASE - https://projects.spring.io/spring-boot/#/spring-boot-parent/spring-boot-autoconfigure)
* Spring AOP (org.springframework:spring-aop:5.3.20 - https://github.com/spring-projects/spring-framework)
* Spring Beans (org.springframework:spring-beans:5.3.20 - https://github.com/spring-projects/spring-framework)
* Spring Context (org.springframework:spring-context:5.3.20 - https://github.com/spring-projects/spring-framework)
* Spring Context Support (org.springframework:spring-context-support:5.3.20 - https://github.com/spring-projects/spring-framework)
* Spring Core (org.springframework:spring-core:5.3.20 - https://github.com/spring-projects/spring-framework)
* Spring Expression Language (SpEL) (org.springframework:spring-expression:5.3.20 - https://github.com/spring-projects/spring-framework)
* Spring Commons Logging Bridge (org.springframework:spring-jcl:5.3.20 - https://github.com/spring-projects/spring-framework)
* Spring JDBC (org.springframework:spring-jdbc:5.3.20 - https://github.com/spring-projects/spring-framework)
* Spring Object/Relational Mapping (org.springframework:spring-orm:5.3.20 - https://github.com/spring-projects/spring-framework)
* Spring TestContext Framework (org.springframework:spring-test:5.3.20 - https://github.com/spring-projects/spring-framework)
* Spring Transaction (org.springframework:spring-tx:5.3.20 - https://github.com/spring-projects/spring-framework)
* Spring Web (org.springframework:spring-web:5.3.20 - https://github.com/spring-projects/spring-framework)
* Spring Web MVC (org.springframework:spring-webmvc:5.3.20 - https://github.com/spring-projects/spring-framework)
* spring-boot (org.springframework.boot:spring-boot:2.6.8 - https://spring.io/projects/spring-boot)
* spring-boot-actuator (org.springframework.boot:spring-boot-actuator:2.6.8 - https://spring.io/projects/spring-boot)
* spring-boot-actuator-autoconfigure (org.springframework.boot:spring-boot-actuator-autoconfigure:2.6.8 - https://spring.io/projects/spring-boot)
* spring-boot-autoconfigure (org.springframework.boot:spring-boot-autoconfigure:2.6.8 - https://spring.io/projects/spring-boot)
* Spring Boot Configuration Processor (org.springframework.boot:spring-boot-configuration-processor:2.0.0.RELEASE - https://projects.spring.io/spring-boot/#/spring-boot-parent/spring-boot-tools/spring-boot-configuration-processor)
* Spring Boot Starter (org.springframework.boot:spring-boot-starter:2.2.6.RELEASE - https://projects.spring.io/spring-boot/#/spring-boot-parent/spring-boot-starters/spring-boot-starter)
* Spring Boot AOP Starter (org.springframework.boot:spring-boot-starter-aop:2.2.6.RELEASE - https://projects.spring.io/spring-boot/#/spring-boot-parent/spring-boot-starters/spring-boot-starter-aop)
* Spring Boot Cache Starter (org.springframework.boot:spring-boot-starter-cache:2.2.6.RELEASE - https://projects.spring.io/spring-boot/#/spring-boot-parent/spring-boot-starters/spring-boot-starter-cache)
* Spring Boot Data REST Starter (org.springframework.boot:spring-boot-starter-data-rest:2.2.6.RELEASE - https://projects.spring.io/spring-boot/#/spring-boot-parent/spring-boot-starters/spring-boot-starter-data-rest)
* Spring Boot Json Starter (org.springframework.boot:spring-boot-starter-json:2.2.6.RELEASE - https://projects.spring.io/spring-boot/#/spring-boot-parent/spring-boot-starters/spring-boot-starter-json)
* Spring Boot Log4j 2 Starter (org.springframework.boot:spring-boot-starter-log4j2:2.2.6.RELEASE - https://projects.spring.io/spring-boot/#/spring-boot-parent/spring-boot-starters/spring-boot-starter-log4j2)
* Spring Boot Security Starter (org.springframework.boot:spring-boot-starter-security:2.2.6.RELEASE - https://projects.spring.io/spring-boot/#/spring-boot-parent/spring-boot-starters/spring-boot-starter-security)
* Spring Boot Test Starter (org.springframework.boot:spring-boot-starter-test:2.2.6.RELEASE - https://projects.spring.io/spring-boot/#/spring-boot-parent/spring-boot-starters/spring-boot-starter-test)
* Spring Boot Tomcat Starter (org.springframework.boot:spring-boot-starter-tomcat:2.2.6.RELEASE - https://projects.spring.io/spring-boot/#/spring-boot-parent/spring-boot-starters/spring-boot-starter-tomcat)
* Spring Boot Validation Starter (org.springframework.boot:spring-boot-starter-validation:2.2.6.RELEASE - https://projects.spring.io/spring-boot/#/spring-boot-parent/spring-boot-starters/spring-boot-starter-validation)
* Spring Boot Web Starter (org.springframework.boot:spring-boot-starter-web:2.2.6.RELEASE - https://projects.spring.io/spring-boot/#/spring-boot-parent/spring-boot-starters/spring-boot-starter-web)
* Spring Boot Test (org.springframework.boot:spring-boot-test:2.2.6.RELEASE - https://projects.spring.io/spring-boot/#/spring-boot-parent/spring-boot-test)
* Spring Boot Test Auto-Configure (org.springframework.boot:spring-boot-test-autoconfigure:2.2.6.RELEASE - https://projects.spring.io/spring-boot/#/spring-boot-parent/spring-boot-test-autoconfigure)
* Spring Data Core (org.springframework.data:spring-data-commons:2.2.6.RELEASE - https://www.spring.io/spring-data/spring-data-commons)
* Spring Data REST - Core (org.springframework.data:spring-data-rest-core:3.2.6.RELEASE - https://www.spring.io/spring-data/spring-data-rest-parent/spring-data-rest-core)
* Spring Data REST - HAL Browser (org.springframework.data:spring-data-rest-hal-browser:3.2.6.RELEASE - https://www.spring.io/spring-data/spring-data-rest-parent/spring-data-rest-hal-browser)
* Spring Data REST - WebMVC (org.springframework.data:spring-data-rest-webmvc:3.2.6.RELEASE - https://www.spring.io/spring-data/spring-data-rest-parent/spring-data-rest-webmvc)
* Spring HATEOAS (org.springframework.hateoas:spring-hateoas:1.0.4.RELEASE - https://github.com/spring-projects/spring-hateoas)
* spring-boot-starter (org.springframework.boot:spring-boot-starter:2.6.8 - https://spring.io/projects/spring-boot)
* spring-boot-starter-actuator (org.springframework.boot:spring-boot-starter-actuator:2.6.8 - https://spring.io/projects/spring-boot)
* spring-boot-starter-aop (org.springframework.boot:spring-boot-starter-aop:2.6.8 - https://spring.io/projects/spring-boot)
* spring-boot-starter-cache (org.springframework.boot:spring-boot-starter-cache:2.6.8 - https://spring.io/projects/spring-boot)
* spring-boot-starter-data-rest (org.springframework.boot:spring-boot-starter-data-rest:2.6.8 - https://spring.io/projects/spring-boot)
* spring-boot-starter-json (org.springframework.boot:spring-boot-starter-json:2.6.8 - https://spring.io/projects/spring-boot)
* spring-boot-starter-log4j2 (org.springframework.boot:spring-boot-starter-log4j2:2.6.8 - https://spring.io/projects/spring-boot)
* spring-boot-starter-security (org.springframework.boot:spring-boot-starter-security:2.6.8 - https://spring.io/projects/spring-boot)
* spring-boot-starter-test (org.springframework.boot:spring-boot-starter-test:2.6.8 - https://spring.io/projects/spring-boot)
* spring-boot-starter-tomcat (org.springframework.boot:spring-boot-starter-tomcat:2.6.8 - https://spring.io/projects/spring-boot)
* spring-boot-starter-web (org.springframework.boot:spring-boot-starter-web:2.6.8 - https://spring.io/projects/spring-boot)
* spring-boot-test (org.springframework.boot:spring-boot-test:2.6.8 - https://spring.io/projects/spring-boot)
* spring-boot-test-autoconfigure (org.springframework.boot:spring-boot-test-autoconfigure:2.6.8 - https://spring.io/projects/spring-boot)
* Spring Data Core (org.springframework.data:spring-data-commons:2.6.4 - https://www.spring.io/spring-data/spring-data-commons)
* Spring Data REST - Core (org.springframework.data:spring-data-rest-core:3.6.4 - https://www.spring.io/spring-data/spring-data-rest-parent/spring-data-rest-core)
* Spring Data REST - WebMVC (org.springframework.data:spring-data-rest-webmvc:3.6.4 - https://www.spring.io/spring-data/spring-data-rest-parent/spring-data-rest-webmvc)
* Spring HATEOAS (org.springframework.hateoas:spring-hateoas:1.4.2 - https://github.com/spring-projects/spring-hateoas)
* Spring Plugin - Core (org.springframework.plugin:spring-plugin-core:2.0.0.RELEASE - https://github.com/spring-projects/spring-plugin/spring-plugin-core)
* spring-security-config (org.springframework.security:spring-security-config:5.2.2.RELEASE - http://spring.io/spring-security)
* spring-security-core (org.springframework.security:spring-security-core:5.2.2.RELEASE - http://spring.io/spring-security)
* spring-security-test (org.springframework.security:spring-security-test:5.2.2.RELEASE - http://spring.io/spring-security)
* spring-security-web (org.springframework.security:spring-security-web:5.2.2.RELEASE - http://spring.io/spring-security)
* spring-security-config (org.springframework.security:spring-security-config:5.6.5 - https://spring.io/projects/spring-security)
* spring-security-core (org.springframework.security:spring-security-core:5.6.5 - https://spring.io/projects/spring-security)
* spring-security-crypto (org.springframework.security:spring-security-crypto:5.6.5 - https://spring.io/projects/spring-security)
* spring-security-test (org.springframework.security:spring-security-test:5.6.5 - https://spring.io/projects/spring-security)
* spring-security-web (org.springframework.security:spring-security-web:5.6.5 - https://spring.io/projects/spring-security)
* SWORD v2 :: Common Server Library (org.swordapp:sword2-server:1.0 - http://www.swordapp.org/)
* ISO Parser (org.tallison:isoparser:1.9.41.2 - https://github.com/tballison/mp4parser)
* org.tallison:metadata-extractor (org.tallison:metadata-extractor:2.13.0 - https://drewnoakes.com/code/exif/)
* XMPCore Shaded (org.tallison.xmp:xmpcore-shaded:6.1.10 - https://github.com/tballison)
* snappy-java (org.xerial.snappy:snappy-java:1.1.7.6 - https://github.com/xerial/snappy-java)
* xml-matchers (org.xmlmatchers:xml-matchers:0.10 - http://code.google.com/p/xml-matchers/)
* org.xmlunit:xmlunit-core (org.xmlunit:xmlunit-core:2.6.4 - https://www.xmlunit.org/)
* org.xmlunit:xmlunit-core (org.xmlunit:xmlunit-core:2.8.0 - https://www.xmlunit.org/)
* org.xmlunit:xmlunit-core (org.xmlunit:xmlunit-core:2.8.4 - https://www.xmlunit.org/)
* org.xmlunit:xmlunit-placeholders (org.xmlunit:xmlunit-placeholders:2.8.0 - https://www.xmlunit.org/xmlunit-placeholders/)
* SnakeYAML (org.yaml:snakeyaml:1.25 - http://www.snakeyaml.org)
* SnakeYAML (org.yaml:snakeyaml:1.26 - http://www.snakeyaml.org)
* ROME, RSS and atOM utilitiEs for Java (rome:rome:1.0 - https://rome.dev.java.net/)
* SnakeYAML (org.yaml:snakeyaml:1.29 - http://www.snakeyaml.org)
* software.amazon.ion:ion-java (software.amazon.ion:ion-java:1.0.2 - https://github.com/amznlabs/ion-java/)
* Xalan Java Serializer (xalan:serializer:2.7.2 - http://xml.apache.org/xalan-j/)
* xalan (xalan:xalan:2.7.0 - no url defined)
* Xerces2-j (xerces:xercesImpl:2.12.0 - https://xerces.apache.org/xerces2-j/)
* Xalan Java (xalan:xalan:2.7.2 - http://xml.apache.org/xalan-j/)
* Xerces2-j (xerces:xercesImpl:2.12.2 - https://xerces.apache.org/xerces2-j/)
* XML Commons External Components XML APIs (xml-apis:xml-apis:1.4.01 - http://xml.apache.org/commons/components/external/)
BSD License:
* AntLR Parser Generator (antlr:antlr:2.7.7 - http://www.antlr.org/)
* Adobe XMPCore (com.adobe.xmp:xmpcore:6.1.11 - https://www.adobe.com/devnet/xmp/library/eula-xmp-library-java.html)
* coverity-escapers (com.coverity.security:coverity-escapers:1.1.1 - http://coverity.com/security)
* Java Advanced Imaging Image I/O Tools API core (standalone) (com.github.jai-imageio:jai-imageio-core:1.4.0 - https://github.com/jai-imageio/jai-imageio-core)
* JSONLD Java :: Core (com.github.jsonld-java:jsonld-java:0.5.1 - http://github.com/jsonld-java/jsonld-java/jsonld-java/)
@@ -411,41 +408,36 @@ https://wiki.lyrasis.org/display/DSPACE/Code+Contribution+Guidelines
* Protocol Buffers [Core] (com.google.protobuf:protobuf-java:3.11.0 - https://developers.google.com/protocol-buffers/protobuf-java/)
* JZlib (com.jcraft:jzlib:1.1.3 - http://www.jcraft.com/jzlib/)
* dnsjava (dnsjava:dnsjava:2.1.7 - http://www.dnsjava.org)
* Units of Measurement API (javax.measure:unit-api:1.0 - http://unitsofmeasurement.github.io/)
* jaxen (jaxen:jaxen:1.1.6 - http://jaxen.codehaus.org/)
* JLine (jline:jline:0.9.94 - http://jline.sourceforge.net)
* ANTLR 4 Runtime (org.antlr:antlr4-runtime:4.5.1-1 - http://www.antlr.org/antlr4-runtime)
* commons-compiler (org.codehaus.janino:commons-compiler:3.0.9 - http://janino-compiler.github.io/commons-compiler/)
* janino (org.codehaus.janino:janino:3.0.9 - http://janino-compiler.github.io/janino/)
* Stax2 API (org.codehaus.woodstox:stax2-api:3.1.4 - http://wiki.fasterxml.com/WoodstoxStax2)
* dom4j (org.dom4j:dom4j:2.1.1 - http://dom4j.github.io/)
* Stax2 API (org.codehaus.woodstox:stax2-api:4.2.1 - http://github.com/FasterXML/stax2-api)
* Hamcrest Date (org.exparity:hamcrest-date:2.0.7 - https://github.com/exparity/hamcrest-date)
* jersey-core-client (org.glassfish.jersey.core:jersey-client:2.30.1 - https://projects.eclipse.org/projects/ee4j.jersey/jersey-client)
* jersey-inject-hk2 (org.glassfish.jersey.inject:jersey-hk2:2.30.1 - https://projects.eclipse.org/projects/ee4j.jersey/project/jersey-hk2)
* Hamcrest (org.hamcrest:hamcrest:2.1 - http://hamcrest.org/JavaHamcrest/)
* jersey-core-client (org.glassfish.jersey.core:jersey-client:2.35 - https://projects.eclipse.org/projects/ee4j.jersey/jersey-client)
* jersey-inject-hk2 (org.glassfish.jersey.inject:jersey-hk2:2.35 - https://projects.eclipse.org/projects/ee4j.jersey/project/jersey-hk2)
* Hamcrest (org.hamcrest:hamcrest:2.2 - http://hamcrest.org/JavaHamcrest/)
* Hamcrest All (org.hamcrest:hamcrest-all:1.3 - https://github.com/hamcrest/JavaHamcrest/hamcrest-all)
* Hamcrest Core (org.hamcrest:hamcrest-core:1.3 - https://github.com/hamcrest/JavaHamcrest/hamcrest-core)
* Hamcrest library (org.hamcrest:hamcrest-library:1.3 - https://github.com/hamcrest/JavaHamcrest/hamcrest-library)
* JBibTeX (org.jbibtex:jbibtex:1.0.10 - http://www.jbibtex.org)
* HdrHistogram (org.hdrhistogram:HdrHistogram:2.1.12 - http://hdrhistogram.github.io/HdrHistogram/)
* JBibTeX (org.jbibtex:jbibtex:1.0.20 - http://www.jbibtex.org)
* asm (org.ow2.asm:asm:8.0.1 - http://asm.ow2.io/)
* asm-analysis (org.ow2.asm:asm-analysis:7.1 - http://asm.ow2.org/)
* asm-commons (org.ow2.asm:asm-commons:8.0.1 - http://asm.ow2.io/)
* asm-tree (org.ow2.asm:asm-tree:7.1 - http://asm.ow2.org/)
* asm-util (org.ow2.asm:asm-util:7.1 - http://asm.ow2.org/)
* PostgreSQL JDBC Driver (org.postgresql:postgresql:42.2.25 - https://jdbc.postgresql.org)
* PostgreSQL JDBC Driver (org.postgresql:postgresql:42.4.1 - https://jdbc.postgresql.org)
* Reflections (org.reflections:reflections:0.9.12 - http://github.com/ronmamo/reflections)
* JMatIO (org.tallison:jmatio:1.5 - https://github.com/tballison/jmatio)
* XMLUnit for Java (xmlunit:xmlunit:1.3 - http://xmlunit.sourceforge.net/)
Common Development and Distribution License (CDDL):
* JavaBeans Activation Framework (com.sun.activation:javax.activation:1.2.0 - http://java.net/all/javax.activation/)
* istack common utility code runtime (com.sun.istack:istack-commons-runtime:3.0.7 - http://java.net/istack-commons/istack-commons-runtime/)
* JavaMail API (com.sun.mail:javax.mail:1.6.2 - http://javaee.github.io/javamail/javax.mail)
* JavaMail API (no providers) (com.sun.mail:mailapi:1.6.2 - http://javaee.github.io/javamail/mailapi)
* Old JAXB Core (com.sun.xml.bind:jaxb-core:2.3.0.1 - http://jaxb.java.net/jaxb-bundles/jaxb-core)
* Old JAXB Runtime (com.sun.xml.bind:jaxb-impl:2.3.1 - http://jaxb.java.net/jaxb-bundles/jaxb-impl)
* saaj-impl (com.sun.xml.messaging.saaj:saaj-impl:1.4.0-b03 - http://java.net/saaj-impl/)
* Jakarta Annotations API (jakarta.annotation:jakarta.annotation-api:1.3.5 - https://projects.eclipse.org/projects/ee4j.ca)
* jakarta.ws.rs-api (jakarta.ws.rs:jakarta.ws.rs-api:2.1.6 - https://github.com/eclipse-ee4j/jaxrs-api)
* JavaBeans Activation Framework (JAF) (javax.activation:activation:1.1 - http://java.sun.com/products/javabeans/jaf/index.jsp)
@@ -454,7 +446,6 @@ https://wiki.lyrasis.org/display/DSPACE/Code+Contribution+Guidelines
* Java Servlet API (javax.servlet:javax.servlet-api:3.1.0 - http://servlet-spec.java.net)
* javax.transaction API (javax.transaction:javax.transaction-api:1.3 - http://jta-spec.java.net)
* jaxb-api (javax.xml.bind:jaxb-api:2.3.1 - https://github.com/javaee/jaxb-spec/jaxb-api)
* JAX-WS API (javax.xml.ws:jaxws-api:2.3.1 - https://github.com/javaee/jax-ws-spec)
* JHighlight (org.codelibs:jhighlight:1.0.3 - https://github.com/codelibs/jhighlight)
* HK2 API module (org.glassfish.hk2:hk2-api:2.6.1 - https://github.com/eclipse-ee4j/glassfish-hk2/hk2-api)
* ServiceLocator Default Implementation (org.glassfish.hk2:hk2-locator:2.6.1 - https://github.com/eclipse-ee4j/glassfish-hk2/hk2-locator)
@@ -464,10 +455,9 @@ https://wiki.lyrasis.org/display/DSPACE/Code+Contribution+Guidelines
* javax.inject:1 as OSGi bundle (org.glassfish.hk2.external:jakarta.inject:2.6.1 - https://github.com/eclipse-ee4j/glassfish-hk2/external/jakarta.inject)
* JAXB Runtime (org.glassfish.jaxb:jaxb-runtime:2.3.1 - http://jaxb.java.net/jaxb-runtime-parent/jaxb-runtime)
* TXW2 Runtime (org.glassfish.jaxb:txw2:2.3.1 - http://jaxb.java.net/jaxb-txw-parent/txw2)
* jersey-core-client (org.glassfish.jersey.core:jersey-client:2.30.1 - https://projects.eclipse.org/projects/ee4j.jersey/jersey-client)
* jersey-inject-hk2 (org.glassfish.jersey.inject:jersey-hk2:2.30.1 - https://projects.eclipse.org/projects/ee4j.jersey/project/jersey-hk2)
* jersey-core-client (org.glassfish.jersey.core:jersey-client:2.35 - https://projects.eclipse.org/projects/ee4j.jersey/jersey-client)
* jersey-inject-hk2 (org.glassfish.jersey.inject:jersey-hk2:2.35 - https://projects.eclipse.org/projects/ee4j.jersey/project/jersey-hk2)
* Java Transaction API (org.jboss.spec.javax.transaction:jboss-transaction-api_1.2_spec:1.1.1.Final - http://www.jboss.org/jboss-transaction-api_1.2_spec)
* MIME streaming extension (org.jvnet.mimepull:mimepull:1.9.7 - http://mimepull.java.net)
* Extended StAX API (org.jvnet.staxex:stax-ex:1.8 - http://stax-ex.java.net/)
Cordra (Version 2) License Agreement:
@@ -478,56 +468,55 @@ https://wiki.lyrasis.org/display/DSPACE/Code+Contribution+Guidelines
Eclipse Distribution License, Version 1.0:
* JavaBeans Activation Framework (com.sun.activation:jakarta.activation:1.2.1 - https://github.com/eclipse-ee4j/jaf/jakarta.activation)
* JavaBeans Activation Framework API jar (jakarta.activation:jakarta.activation-api:1.2.1 - https://github.com/eclipse-ee4j/jaf/jakarta.activation-api)
* Jakarta Activation API jar (jakarta.activation:jakarta.activation-api:1.2.2 - https://github.com/eclipse-ee4j/jaf/jakarta.activation-api)
* jakarta.xml.bind-api (jakarta.xml.bind:jakarta.xml.bind-api:2.3.2 - https://github.com/eclipse-ee4j/jaxb-api/jakarta.xml.bind-api)
* Jakarta XML Binding API (jakarta.xml.bind:jakarta.xml.bind-api:2.3.3 - https://github.com/eclipse-ee4j/jaxb-api/jakarta.xml.bind-api)
* javax.persistence-api (javax.persistence:javax.persistence-api:2.2 - https://github.com/javaee/jpa-spec)
* jersey-core-client (org.glassfish.jersey.core:jersey-client:2.30.1 - https://projects.eclipse.org/projects/ee4j.jersey/jersey-client)
* jersey-inject-hk2 (org.glassfish.jersey.inject:jersey-hk2:2.30.1 - https://projects.eclipse.org/projects/ee4j.jersey/project/jersey-hk2)
* Java Persistence API, Version 2.1 (org.hibernate.javax.persistence:hibernate-jpa-2.1-api:1.0.0.Final - http://hibernate.org)
* jersey-core-client (org.glassfish.jersey.core:jersey-client:2.35 - https://projects.eclipse.org/projects/ee4j.jersey/jersey-client)
* jersey-inject-hk2 (org.glassfish.jersey.inject:jersey-hk2:2.35 - https://projects.eclipse.org/projects/ee4j.jersey/project/jersey-hk2)
* Java Persistence API, Version 2.1 (org.hibernate.javax.persistence:hibernate-jpa-2.1-api:1.0.2.Final - http://hibernate.org)
Eclipse Public License:
* System Rules (com.github.stefanbirkner:system-rules:1.19.0 - http://stefanbirkner.github.io/system-rules/)
* c3p0 (com.mchange:c3p0:0.9.5.5 - https://github.com/swaldman/c3p0)
* mchange-commons-java (com.mchange:mchange-commons-java:0.2.19 - https://github.com/swaldman/mchange-commons-java)
* H2 Database Engine (com.h2database:h2:2.1.210 - https://h2database.com)
* Jakarta Annotations API (jakarta.annotation:jakarta.annotation-api:1.3.5 - https://projects.eclipse.org/projects/ee4j.ca)
* jakarta.ws.rs-api (jakarta.ws.rs:jakarta.ws.rs-api:2.1.6 - https://github.com/eclipse-ee4j/jaxrs-api)
* javax.persistence-api (javax.persistence:javax.persistence-api:2.2 - https://github.com/javaee/jpa-spec)
* JUnit (junit:junit:4.13.1 - http://junit.org)
* AspectJ runtime (org.aspectj:aspectjrt:1.8.0 - http://www.aspectj.org)
* AspectJ weaver (org.aspectj:aspectjweaver:1.9.5 - http://www.aspectj.org)
* AspectJ Weaver (org.aspectj:aspectjweaver:1.9.7 - https://www.eclipse.org/aspectj/)
* Eclipse Compiler for Java(TM) (org.eclipse.jdt:ecj:3.14.0 - http://www.eclipse.org/jdt)
* Jetty :: Apache JSP Implementation (org.eclipse.jetty:apache-jsp:9.4.15.v20190215 - http://www.eclipse.org/jetty)
* Apache :: JSTL module (org.eclipse.jetty:apache-jstl:9.4.15.v20190215 - http://tomcat.apache.org/taglibs/standard/)
* Jetty :: ALPN :: Client (org.eclipse.jetty:jetty-alpn-client:9.4.34.v20201102 - https://eclipse.org/jetty/jetty-alpn-parent/jetty-alpn-client)
* Jetty :: ALPN :: JDK9 Client Implementation (org.eclipse.jetty:jetty-alpn-java-client:9.4.34.v20201102 - https://eclipse.org/jetty/jetty-alpn-parent/jetty-alpn-java-client)
* Jetty :: ALPN :: JDK9 Server Implementation (org.eclipse.jetty:jetty-alpn-java-server:9.4.34.v20201102 - https://eclipse.org/jetty/jetty-alpn-parent/jetty-alpn-java-server)
* Jetty :: ALPN :: Server (org.eclipse.jetty:jetty-alpn-server:9.4.34.v20201102 - https://eclipse.org/jetty/jetty-alpn-parent/jetty-alpn-server)
* Jetty :: ALPN :: Client (org.eclipse.jetty:jetty-alpn-client:9.4.44.v20210927 - https://eclipse.org/jetty/jetty-alpn-parent/jetty-alpn-client)
* Jetty :: ALPN :: JDK9 Client Implementation (org.eclipse.jetty:jetty-alpn-java-client:9.4.44.v20210927 - https://eclipse.org/jetty/jetty-alpn-parent/jetty-alpn-java-client)
* Jetty :: ALPN :: JDK9 Server Implementation (org.eclipse.jetty:jetty-alpn-java-server:9.4.48.v20220622 - https://eclipse.org/jetty/jetty-alpn-parent/jetty-alpn-java-server)
* Jetty :: ALPN :: Server (org.eclipse.jetty:jetty-alpn-server:9.4.44.v20210927 - https://eclipse.org/jetty/jetty-alpn-parent/jetty-alpn-server)
* Jetty :: ALPN :: Server (org.eclipse.jetty:jetty-alpn-server:9.4.48.v20220622 - https://eclipse.org/jetty/jetty-alpn-parent/jetty-alpn-server)
* Jetty :: Servlet Annotations (org.eclipse.jetty:jetty-annotations:9.4.15.v20190215 - http://www.eclipse.org/jetty)
* Jetty :: Asynchronous HTTP Client (org.eclipse.jetty:jetty-client:9.4.34.v20201102 - https://eclipse.org/jetty/jetty-client)
* Jetty :: Continuation (org.eclipse.jetty:jetty-continuation:9.4.34.v20201102 - https://eclipse.org/jetty/jetty-continuation)
* Jetty :: Deployers (org.eclipse.jetty:jetty-deploy:9.4.34.v20201102 - https://eclipse.org/jetty/jetty-deploy)
* Jetty :: Http Utility (org.eclipse.jetty:jetty-http:9.4.41.v20210516 - https://eclipse.org/jetty/jetty-http)
* Jetty :: IO Utility (org.eclipse.jetty:jetty-io:9.4.41.v20210516 - https://eclipse.org/jetty/jetty-io)
* Jetty :: JMX Management (org.eclipse.jetty:jetty-jmx:9.4.34.v20201102 - https://eclipse.org/jetty/jetty-jmx)
* Jetty :: Asynchronous HTTP Client (org.eclipse.jetty:jetty-client:9.4.44.v20210927 - https://eclipse.org/jetty/jetty-client)
* Jetty :: Continuation (org.eclipse.jetty:jetty-continuation:9.4.44.v20210927 - https://eclipse.org/jetty/jetty-continuation)
* Jetty :: Continuation (org.eclipse.jetty:jetty-continuation:9.4.48.v20220622 - https://eclipse.org/jetty/jetty-continuation)
* Jetty :: Deployers (org.eclipse.jetty:jetty-deploy:9.4.48.v20220622 - https://eclipse.org/jetty/jetty-deploy)
* Jetty :: Http Utility (org.eclipse.jetty:jetty-http:9.4.48.v20220622 - https://eclipse.org/jetty/jetty-http)
* Jetty :: IO Utility (org.eclipse.jetty:jetty-io:9.4.48.v20220622 - https://eclipse.org/jetty/jetty-io)
* Jetty :: JMX Management (org.eclipse.jetty:jetty-jmx:9.4.44.v20210927 - https://eclipse.org/jetty/jetty-jmx)
* Jetty :: JNDI Naming (org.eclipse.jetty:jetty-jndi:9.4.15.v20190215 - http://www.eclipse.org/jetty)
* Jetty :: Plus (org.eclipse.jetty:jetty-plus:9.4.15.v20190215 - http://www.eclipse.org/jetty)
* Jetty :: Rewrite Handler (org.eclipse.jetty:jetty-rewrite:9.4.34.v20201102 - https://eclipse.org/jetty/jetty-rewrite)
* Jetty :: Security (org.eclipse.jetty:jetty-security:9.4.34.v20201102 - https://eclipse.org/jetty/jetty-security)
* Jetty :: Server Core (org.eclipse.jetty:jetty-server:9.4.41.v20210516 - https://eclipse.org/jetty/jetty-server)
* Jetty :: Servlet Handling (org.eclipse.jetty:jetty-servlet:9.4.34.v20201102 - https://eclipse.org/jetty/jetty-servlet)
* Jetty :: Utility Servlets and Filters (org.eclipse.jetty:jetty-servlets:9.4.34.v20201102 - https://eclipse.org/jetty/jetty-servlets)
* Jetty :: Utilities (org.eclipse.jetty:jetty-util:9.4.41.v20210516 - https://eclipse.org/jetty/jetty-util)
* Jetty :: Webapp Application Support (org.eclipse.jetty:jetty-webapp:9.4.34.v20201102 - https://eclipse.org/jetty/jetty-webapp)
* Jetty :: XML utilities (org.eclipse.jetty:jetty-xml:9.4.34.v20201102 - https://eclipse.org/jetty/jetty-xml)
* Jetty :: HTTP2 :: Client (org.eclipse.jetty.http2:http2-client:9.4.34.v20201102 - https://eclipse.org/jetty/http2-parent/http2-client)
* Jetty :: HTTP2 :: Common (org.eclipse.jetty.http2:http2-common:9.4.34.v20201102 - https://eclipse.org/jetty/http2-parent/http2-common)
* Jetty :: HTTP2 :: HPACK (org.eclipse.jetty.http2:http2-hpack:9.4.34.v20201102 - https://eclipse.org/jetty/http2-parent/http2-hpack)
* Jetty :: HTTP2 :: HTTP Client Transport (org.eclipse.jetty.http2:http2-http-client-transport:9.4.34.v20201102 - https://eclipse.org/jetty/http2-parent/http2-http-client-transport)
* Jetty :: HTTP2 :: Server (org.eclipse.jetty.http2:http2-server:9.4.34.v20201102 - https://eclipse.org/jetty/http2-parent/http2-server)
* Jetty :: Rewrite Handler (org.eclipse.jetty:jetty-rewrite:9.4.44.v20210927 - https://eclipse.org/jetty/jetty-rewrite)
* Jetty :: Security (org.eclipse.jetty:jetty-security:9.4.44.v20210927 - https://eclipse.org/jetty/jetty-security)
* Jetty :: Security (org.eclipse.jetty:jetty-security:9.4.48.v20220622 - https://eclipse.org/jetty/jetty-security)
* Jetty :: Server Core (org.eclipse.jetty:jetty-server:9.4.48.v20220622 - https://eclipse.org/jetty/jetty-server)
* Jetty :: Servlet Handling (org.eclipse.jetty:jetty-servlet:9.4.48.v20220622 - https://eclipse.org/jetty/jetty-servlet)
* Jetty :: Utility Servlets and Filters (org.eclipse.jetty:jetty-servlets:9.4.48.v20220622 - https://eclipse.org/jetty/jetty-servlets)
* Jetty :: Utilities (org.eclipse.jetty:jetty-util:9.4.48.v20220622 - https://eclipse.org/jetty/jetty-util)
* Jetty :: Utilities :: Ajax(JSON) (org.eclipse.jetty:jetty-util-ajax:9.4.48.v20220622 - https://eclipse.org/jetty/jetty-util-ajax)
* Jetty :: Webapp Application Support (org.eclipse.jetty:jetty-webapp:9.4.48.v20220622 - https://eclipse.org/jetty/jetty-webapp)
* Jetty :: XML utilities (org.eclipse.jetty:jetty-xml:9.4.48.v20220622 - https://eclipse.org/jetty/jetty-xml)
* Jetty :: HTTP2 :: Client (org.eclipse.jetty.http2:http2-client:9.4.44.v20210927 - https://eclipse.org/jetty/http2-parent/http2-client)
* Jetty :: HTTP2 :: Common (org.eclipse.jetty.http2:http2-common:9.4.48.v20220622 - https://eclipse.org/jetty/http2-parent/http2-common)
* Jetty :: HTTP2 :: HPACK (org.eclipse.jetty.http2:http2-hpack:9.4.44.v20210927 - https://eclipse.org/jetty/http2-parent/http2-hpack)
* Jetty :: HTTP2 :: HTTP Client Transport (org.eclipse.jetty.http2:http2-http-client-transport:9.4.44.v20210927 - https://eclipse.org/jetty/http2-parent/http2-http-client-transport)
* Jetty :: HTTP2 :: Server (org.eclipse.jetty.http2:http2-server:9.4.48.v20220622 - https://eclipse.org/jetty/http2-parent/http2-server)
* Jetty :: Schemas (org.eclipse.jetty.toolchain:jetty-schemas:3.1.2 - https://eclipse.org/jetty/jetty-schemas)
* HK2 API module (org.glassfish.hk2:hk2-api:2.6.1 - https://github.com/eclipse-ee4j/glassfish-hk2/hk2-api)
* ServiceLocator Default Implementation (org.glassfish.hk2:hk2-locator:2.6.1 - https://github.com/eclipse-ee4j/glassfish-hk2/hk2-locator)
@@ -535,10 +524,10 @@ https://wiki.lyrasis.org/display/DSPACE/Code+Contribution+Guidelines
* OSGi resource locator (org.glassfish.hk2:osgi-resource-locator:1.0.3 - https://projects.eclipse.org/projects/ee4j/osgi-resource-locator)
* aopalliance version 1.0 repackaged as a module (org.glassfish.hk2.external:aopalliance-repackaged:2.6.1 - https://github.com/eclipse-ee4j/glassfish-hk2/external/aopalliance-repackaged)
* javax.inject:1 as OSGi bundle (org.glassfish.hk2.external:jakarta.inject:2.6.1 - https://github.com/eclipse-ee4j/glassfish-hk2/external/jakarta.inject)
* jersey-core-client (org.glassfish.jersey.core:jersey-client:2.30.1 - https://projects.eclipse.org/projects/ee4j.jersey/jersey-client)
* jersey-core-common (org.glassfish.jersey.core:jersey-common:2.30.1 - https://projects.eclipse.org/projects/ee4j.jersey/jersey-common)
* jersey-inject-hk2 (org.glassfish.jersey.inject:jersey-hk2:2.30.1 - https://projects.eclipse.org/projects/ee4j.jersey/project/jersey-hk2)
* Java Persistence API, Version 2.1 (org.hibernate.javax.persistence:hibernate-jpa-2.1-api:1.0.0.Final - http://hibernate.org)
* jersey-core-client (org.glassfish.jersey.core:jersey-client:2.35 - https://projects.eclipse.org/projects/ee4j.jersey/jersey-client)
* jersey-core-common (org.glassfish.jersey.core:jersey-common:2.35 - https://projects.eclipse.org/projects/ee4j.jersey/jersey-common)
* jersey-inject-hk2 (org.glassfish.jersey.inject:jersey-hk2:2.35 - https://projects.eclipse.org/projects/ee4j.jersey/project/jersey-hk2)
* Java Persistence API, Version 2.1 (org.hibernate.javax.persistence:hibernate-jpa-2.1-api:1.0.2.Final - http://hibernate.org)
* Jetty Server (org.mortbay.jetty:jetty:6.1.26 - http://www.eclipse.org/jetty/jetty-parent/project/modules/jetty)
* Jetty Servlet Tester (org.mortbay.jetty:jetty-servlet-tester:6.1.26 - http://www.eclipse.org/jetty/jetty-parent/project/jetty-servlet-tester)
* Jetty Utilities (org.mortbay.jetty:jetty-util:6.1.26 - http://www.eclipse.org/jetty/jetty-parent/project/jetty-util)
@@ -552,21 +541,16 @@ https://wiki.lyrasis.org/display/DSPACE/Code+Contribution+Guidelines
* json-schema-validator (com.github.java-json-tools:json-schema-validator:2.2.14 - https://github.com/java-json-tools/json-schema-validator)
* msg-simple (com.github.java-json-tools:msg-simple:1.2 - https://github.com/java-json-tools/msg-simple)
* uri-template (com.github.java-json-tools:uri-template:0.10 - https://github.com/java-json-tools/uri-template)
* SpotBugs Annotations (com.github.spotbugs:spotbugs-annotations:3.1.9 - https://spotbugs.github.io/)
* FindBugs-Annotations (com.google.code.findbugs:annotations:3.0.1u2 - http://findbugs.sourceforge.net/)
* c3p0 (com.mchange:c3p0:0.9.5.5 - https://github.com/swaldman/c3p0)
* mchange-commons-java (com.mchange:mchange-commons-java:0.2.19 - https://github.com/swaldman/mchange-commons-java)
* Java Native Access (net.java.dev.jna:jna:5.5.0 - https://github.com/java-native-access/jna)
* JHighlight (org.codelibs:jhighlight:1.0.3 - https://github.com/codelibs/jhighlight)
* Hibernate ORM - hibernate-core (org.hibernate:hibernate-core:5.4.10.Final - http://hibernate.org/orm)
* Hibernate ORM - hibernate-ehcache (org.hibernate:hibernate-ehcache:5.4.10.Final - http://hibernate.org/orm)
* Hibernate ORM - hibernate-jpamodelgen (org.hibernate:hibernate-jpamodelgen:5.4.10.Final - http://hibernate.org/orm)
* Hibernate Commons Annotations (org.hibernate.common:hibernate-commons-annotations:5.1.0.Final - http://hibernate.org)
* Hibernate ORM - hibernate-core (org.hibernate:hibernate-core:5.6.5.Final - https://hibernate.org/orm)
* Hibernate ORM - hibernate-jcache (org.hibernate:hibernate-jcache:5.6.5.Final - https://hibernate.org/orm)
* Hibernate ORM - hibernate-jpamodelgen (org.hibernate:hibernate-jpamodelgen:5.6.5.Final - https://hibernate.org/orm)
* Hibernate Commons Annotations (org.hibernate.common:hibernate-commons-annotations:5.1.2.Final - http://hibernate.org)
* im4java (org.im4java:im4java:1.4.0 - http://sourceforge.net/projects/im4java/)
* JacORB OMG-API (org.jacorb:jacorb-omgapi:3.9 - http://www.jacorb.org)
* Javassist (org.javassist:javassist:3.25.0-GA - http://www.javassist.org/)
* Java RMI API (org.jboss.spec.javax.rmi:jboss-rmi-api_1.0_spec:1.0.6.Final - http://www.jboss.org/jboss-rmi-api_1.0_spec)
* XOM (xom:xom:1.2.5 - http://xom.nu)
* XOM (xom:xom:1.3.7 - https://xom.nu)
Go License:
@@ -576,29 +560,21 @@ https://wiki.lyrasis.org/display/DSPACE/Code+Contribution+Guidelines
* Handle Server (net.handle:handle:9.3.0 - https://www.handle.net)
JDOM License (Apache-style license):
* jdom (jdom:jdom:1.0 - no url defined)
MIT License:
* Java SemVer (com.github.zafarkhaja:java-semver:0.9.0 - https://github.com/zafarkhaja/jsemver)
* dd-plist (com.googlecode.plist:dd-plist:1.23 - http://www.github.com/3breadt/dd-plist)
* DigitalCollections: IIIF API Library (de.digitalcollections.iiif:iiif-apis:0.3.9 - https://github.com/dbmdz/iiif-apis)
* CDM core library (edu.ucar:cdm:4.5.5 - http://www.unidata.ucar.edu/software/netcdf-java/documentation.htm)
* GRIB IOSP and Feature Collection (edu.ucar:grib:4.5.5 - http://www.unidata.ucar.edu/software/netcdf-java/)
* HttpClient Wrappers (edu.ucar:httpservices:4.5.5 - http://www.unidata.ucar.edu/software/netcdf-java/documentation.htm)
* netCDF-4 IOSP JNI connection to C library (edu.ucar:netcdf4:4.5.5 - http://www.unidata.ucar.edu/software/netcdf-java/netcdf4/)
* udunits (edu.ucar:udunits:4.5.5 - http://www.unidata.ucar.edu/software/udunits//)
* JOpt Simple (net.sf.jopt-simple:jopt-simple:5.0.4 - http://jopt-simple.github.io/jopt-simple)
* Bouncy Castle S/MIME API (org.bouncycastle:bcmail-jdk15on:1.65 - http://www.bouncycastle.org/java.html)
* Bouncy Castle PKIX, CMS, EAC, TSP, PKCS, OCSP, CMP, and CRMF APIs (org.bouncycastle:bcpkix-jdk15on:1.65 - http://www.bouncycastle.org/java.html)
* Bouncy Castle Provider (org.bouncycastle:bcprov-jdk15on:1.65 - http://www.bouncycastle.org/java.html)
* Bouncy Castle S/MIME API (org.bouncycastle:bcmail-jdk15on:1.70 - https://www.bouncycastle.org/java.html)
* Bouncy Castle PKIX, CMS, EAC, TSP, PKCS, OCSP, CMP, and CRMF APIs (org.bouncycastle:bcpkix-jdk15on:1.70 - https://www.bouncycastle.org/java.html)
* Bouncy Castle Provider (org.bouncycastle:bcprov-jdk15on:1.70 - https://www.bouncycastle.org/java.html)
* Bouncy Castle ASN.1 Extension and Utility APIs (org.bouncycastle:bcutil-jdk15on:1.70 - https://www.bouncycastle.org/java.html)
* org.brotli:dec (org.brotli:dec:0.1.2 - http://brotli.org/dec)
* Checker Qual (org.checkerframework:checker-qual:3.10.0 - https://checkerframework.org)
* Checker Qual (org.checkerframework:checker-qual:3.5.0 - https://checkerframework.org)
* jersey-core-client (org.glassfish.jersey.core:jersey-client:2.30.1 - https://projects.eclipse.org/projects/ee4j.jersey/jersey-client)
* jersey-inject-hk2 (org.glassfish.jersey.inject:jersey-hk2:2.30.1 - https://projects.eclipse.org/projects/ee4j.jersey/project/jersey-hk2)
* Itadaki jbzip2 (org.itadaki:bzip2:0.9.1 - https://code.google.com/p/jbzip2/)
* jsoup Java HTML Parser (org.jsoup:jsoup:1.13.1 - https://jsoup.org/)
* jersey-core-client (org.glassfish.jersey.core:jersey-client:2.35 - https://projects.eclipse.org/projects/ee4j.jersey/jersey-client)
* jersey-inject-hk2 (org.glassfish.jersey.inject:jersey-hk2:2.35 - https://projects.eclipse.org/projects/ee4j.jersey/project/jersey-hk2)
* mockito-core (org.mockito:mockito-core:3.12.4 - https://github.com/mockito/mockito)
* mockito-inline (org.mockito:mockito-inline:3.12.4 - https://github.com/mockito/mockito)
* ORCID - Model (org.orcid:orcid-model:3.0.2 - http://github.com/ORCID/orcid-model)
@@ -606,29 +582,33 @@ https://wiki.lyrasis.org/display/DSPACE/Code+Contribution+Guidelines
* JUL to SLF4J bridge (org.slf4j:jul-to-slf4j:1.7.25 - http://www.slf4j.org)
* SLF4J API Module (org.slf4j:slf4j-api:1.7.25 - http://www.slf4j.org)
* SLF4J Extensions Module (org.slf4j:slf4j-ext:1.7.28 - http://www.slf4j.org)
* HAL Browser (org.webjars:hal-browser:ad9b865 - http://webjars.org)
* toastr (org.webjars.bowergithub.codeseven:toastr:2.1.4 - http://webjars.org)
* jquery (org.webjars.bowergithub.jquery:jquery-dist:3.5.1 - https://www.webjars.org)
* bootstrap (org.webjars.bowergithub.twbs:bootstrap:4.5.2 - https://www.webjars.org)
* backbone (org.webjars.bowergithub.jashkenas:backbone:1.4.1 - https://www.webjars.org)
* underscore (org.webjars.bowergithub.jashkenas:underscore:1.13.2 - https://www.webjars.org)
* jquery (org.webjars.bowergithub.jquery:jquery-dist:3.6.0 - https://www.webjars.org)
* urijs (org.webjars.bowergithub.medialize:uri.js:1.19.10 - https://www.webjars.org)
* bootstrap (org.webjars.bowergithub.twbs:bootstrap:4.6.1 - https://www.webjars.org)
* core-js (org.webjars.npm:core-js:3.25.2 - https://www.webjars.org)
* @json-editor/json-editor (org.webjars.npm:json-editor__json-editor:2.6.1 - https://www.webjars.org)
Mozilla Public License:
* juniversalchardet (com.googlecode.juniversalchardet:juniversalchardet:1.0.3 - http://juniversalchardet.googlecode.com/)
* h2 (com.h2database:h2:1.4.187 - no url defined)
* H2 Database Engine (com.h2database:h2:2.1.210 - https://h2database.com)
* Saxon-HE (net.sf.saxon:Saxon-HE:9.8.0-14 - http://www.saxonica.com/)
* Javassist (org.javassist:javassist:3.25.0-GA - http://www.javassist.org/)
* Mozilla Rhino (org.mozilla:rhino:1.7.7.2 - https://developer.mozilla.org/en/Rhino)
OGC copyright:
* GeoAPI (org.opengis:geoapi:3.0.1 - http://www.geoapi.org/geoapi/)
Public Domain:
* jersey-core-client (org.glassfish.jersey.core:jersey-client:2.30.1 - https://projects.eclipse.org/projects/ee4j.jersey/jersey-client)
* jersey-core-common (org.glassfish.jersey.core:jersey-common:2.30.1 - https://projects.eclipse.org/projects/ee4j.jersey/jersey-common)
* jersey-inject-hk2 (org.glassfish.jersey.inject:jersey-hk2:2.30.1 - https://projects.eclipse.org/projects/ee4j.jersey/project/jersey-hk2)
* jersey-core-client (org.glassfish.jersey.core:jersey-client:2.35 - https://projects.eclipse.org/projects/ee4j.jersey/jersey-client)
* jersey-core-common (org.glassfish.jersey.core:jersey-common:2.35 - https://projects.eclipse.org/projects/ee4j.jersey/jersey-common)
* jersey-inject-hk2 (org.glassfish.jersey.inject:jersey-hk2:2.35 - https://projects.eclipse.org/projects/ee4j.jersey/project/jersey-hk2)
* HdrHistogram (org.hdrhistogram:HdrHistogram:2.1.12 - http://hdrhistogram.github.io/HdrHistogram/)
* LatencyUtils (org.latencyutils:LatencyUtils:2.0.3 - http://latencyutils.github.io/LatencyUtils/)
* Reflections (org.reflections:reflections:0.9.12 - http://github.com/ronmamo/reflections)
* XZ for Java (org.tukaani:xz:1.8 - https://tukaani.org/xz/java.html)
* XZ for Java (org.tukaani:xz:1.9 - https://tukaani.org/xz/java.html)
The JSON License:
@@ -636,7 +616,7 @@ https://wiki.lyrasis.org/display/DSPACE/Code+Contribution+Guidelines
UnRar License:
* Java UnRar (com.github.junrar:junrar:4.0.0 - https://github.com/junrar/junrar)
* Java Unrar (com.github.junrar:junrar:7.4.1 - https://github.com/junrar/junrar)
Unicode/ICU License:
@@ -644,10 +624,10 @@ https://wiki.lyrasis.org/display/DSPACE/Code+Contribution+Guidelines
W3C license:
* jersey-core-client (org.glassfish.jersey.core:jersey-client:2.30.1 - https://projects.eclipse.org/projects/ee4j.jersey/jersey-client)
* jersey-inject-hk2 (org.glassfish.jersey.inject:jersey-hk2:2.30.1 - https://projects.eclipse.org/projects/ee4j.jersey/project/jersey-hk2)
* jersey-core-client (org.glassfish.jersey.core:jersey-client:2.35 - https://projects.eclipse.org/projects/ee4j.jersey/jersey-client)
* jersey-inject-hk2 (org.glassfish.jersey.inject:jersey-hk2:2.35 - https://projects.eclipse.org/projects/ee4j.jersey/project/jersey-hk2)
jQuery license:
* jersey-core-client (org.glassfish.jersey.core:jersey-client:2.30.1 - https://projects.eclipse.org/projects/ee4j.jersey/jersey-client)
* jersey-inject-hk2 (org.glassfish.jersey.inject:jersey-hk2:2.30.1 - https://projects.eclipse.org/projects/ee4j.jersey/project/jersey-hk2)
* jersey-core-client (org.glassfish.jersey.core:jersey-client:2.35 - https://projects.eclipse.org/projects/ee4j.jersey/jersey-client)
* jersey-inject-hk2 (org.glassfish.jersey.inject:jersey-hk2:2.35 - https://projects.eclipse.org/projects/ee4j.jersey/project/jersey-hk2)

View File

@@ -35,7 +35,7 @@ Documentation for each release may be viewed online or downloaded via our [Docum
The latest DSpace Installation instructions are available at:
https://wiki.lyrasis.org/display/DSDOC7x/Installing+DSpace
Please be aware that, as a Java web application, DSpace requires a database (PostgreSQL or Oracle)
Please be aware that, as a Java web application, DSpace requires a database (PostgreSQL)
and a servlet container (usually Tomcat) in order to function.
More information about these and all other prerequisites can be found in the Installation instructions above.
@@ -48,18 +48,7 @@ See [Running DSpace 7 with Docker Compose](dspace/src/main/docker-compose/README
## Contributing
DSpace is a community built and supported project. We do not have a centralized development or support team,
but have a dedicated group of volunteers who help us improve the software, documentation, resources, etc.
We welcome contributions of any type. Here's a few basic guides that provide suggestions for contributing to DSpace:
* [How to Contribute to DSpace](https://wiki.lyrasis.org/display/DSPACE/How+to+Contribute+to+DSpace): How to contribute in general (via code, documentation, bug reports, expertise, etc)
* [Code Contribution Guidelines](https://wiki.lyrasis.org/display/DSPACE/Code+Contribution+Guidelines): How to give back code or contribute features, bug fixes, etc.
* [DSpace Community Advisory Team (DCAT)](https://wiki.lyrasis.org/display/cmtygp/DSpace+Community+Advisory+Team): If you are not a developer, we also have an interest group specifically for repository managers. The DCAT group meets virtually, once a month, and sends open invitations to join their meetings via the [DCAT mailing list](https://groups.google.com/d/forum/DSpaceCommunityAdvisoryTeam).
We also encourage GitHub Pull Requests (PRs) at any time. Please see our [Development with Git](https://wiki.lyrasis.org/display/DSPACE/Development+with+Git) guide for more info.
In addition, a listing of all known contributors to DSpace software can be
found online at: https://wiki.lyrasis.org/display/DSPACE/DSpaceContributors
See [Contributing documentation](CONTRIBUTING.md)
## Getting Help

View File

@@ -92,9 +92,7 @@ For more information on CheckStyle configurations below, see: http://checkstyle.
<!-- Requirements for Javadocs for methods -->
<module name="JavadocMethod">
<!-- All public methods MUST HAVE Javadocs -->
<!-- <property name="scope" value="public"/> -->
<!-- TODO: Above rule has been disabled because of large amount of missing public method Javadocs -->
<property name="scope" value="nothing"/>
<property name="scope" value="public"/>
<!-- Allow params, throws and return tags to be optional -->
<property name="allowMissingParamTags" value="true"/>
<property name="allowMissingReturnTag" value="true"/>

View File

@@ -41,6 +41,8 @@ services:
target: 8080
- published: 8009
target: 8009
- published: 8000
target: 8000
stdin_open: true
tty: true
volumes:

View File

@@ -12,7 +12,7 @@
<parent>
<groupId>org.dspace</groupId>
<artifactId>dspace-parent</artifactId>
<version>7.2</version>
<version>7.5-SNAPSHOT</version>
<relativePath>..</relativePath>
</parent>
@@ -334,18 +334,47 @@
</profiles>
<dependencies>
<dependency>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-api</artifactId>
</dependency>
<dependency>
<groupId>org.hibernate</groupId>
<artifactId>hibernate-ehcache</artifactId>
<artifactId>hibernate-core</artifactId>
<exclusions>
<!-- Newer version pulled in via Jersey below -->
<exclusion>
<groupId>org.javassist</groupId>
<artifactId>javassist</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.hibernate</groupId>
<artifactId>hibernate-jcache</artifactId>
</dependency>
<dependency>
<groupId>org.ehcache</groupId>
<artifactId>ehcache</artifactId>
<version>${ehcache.version}</version>
</dependency>
<!-- https://mvnrepository.com/artifact/org.springframework.boot/spring-boot-starter-cache
Caching dependencies for sherpa service. -->
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-cache</artifactId>
<version>${spring-boot.version}</version>
<exclusions>
<!-- Newer version pulled in via Jersey below -->
<exclusion>
<groupId>org.javassist</groupId>
<artifactId>javassist</artifactId>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-logging</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>javax.cache</groupId>
<artifactId>cache-api</artifactId>
</dependency>
<dependency>
<groupId>org.hibernate</groupId>
<artifactId>hibernate-jpamodelgen</artifactId>
@@ -358,7 +387,7 @@
<dependency>
<groupId>org.hibernate.javax.persistence</groupId>
<artifactId>hibernate-jpa-2.1-api</artifactId>
<version>1.0.0.Final</version>
<version>1.0.2.Final</version>
</dependency>
<dependency>
@@ -379,7 +408,7 @@
<groupId>org.ow2.asm</groupId>
<artifactId>asm-commons</artifactId>
</exclusion>
<!-- Newer version of Bouncycastle brought in via solr-cell -->
<!-- Newer version of Bouncycastle brought in via Tika -->
<exclusion>
<groupId>org.bouncycastle</groupId>
<artifactId>bcpkix-jdk15on</artifactId>
@@ -388,39 +417,6 @@
<groupId>org.bouncycastle</groupId>
<artifactId>bcprov-jdk15on</artifactId>
</exclusion>
<!-- Newer version of Jetty in our parent POM & via Solr -->
<exclusion>
<groupId>org.eclipse.jetty</groupId>
<artifactId>jetty-alpn-java-server</artifactId>
</exclusion>
<exclusion>
<groupId>org.eclipse.jetty</groupId>
<artifactId>jetty-deploy</artifactId>
</exclusion>
<exclusion>
<groupId>org.eclipse.jetty</groupId>
<artifactId>jetty-servlet</artifactId>
</exclusion>
<exclusion>
<groupId>org.eclipse.jetty</groupId>
<artifactId>jetty-servlets</artifactId>
</exclusion>
<exclusion>
<groupId>org.eclipse.jetty</groupId>
<artifactId>jetty-webapp</artifactId>
</exclusion>
<exclusion>
<groupId>org.eclipse.jetty</groupId>
<artifactId>jetty-xml</artifactId>
</exclusion>
<exclusion>
<groupId>org.eclipse.jetty.http2</groupId>
<artifactId>http2-common</artifactId>
</exclusion>
<exclusion>
<groupId>org.eclipse.jetty.http2</groupId>
<artifactId>http2-server</artifactId>
</exclusion>
</exclusions>
</dependency>
<!-- Jetty is needed to run Handle Server -->
@@ -505,7 +501,7 @@
</dependency>
<dependency>
<groupId>org.jdom</groupId>
<artifactId>jdom</artifactId>
<artifactId>jdom2</artifactId>
</dependency>
<dependency>
<groupId>org.apache.pdfbox</groupId>
@@ -515,22 +511,11 @@
<groupId>org.apache.pdfbox</groupId>
<artifactId>fontbox</artifactId>
</dependency>
<dependency>
<groupId>org.apache.poi</groupId>
<artifactId>poi-scratchpad</artifactId>
</dependency>
<dependency>
<groupId>xalan</groupId>
<artifactId>xalan</artifactId>
</dependency>
<dependency>
<groupId>xerces</groupId>
<artifactId>xercesImpl</artifactId>
</dependency>
<dependency>
<groupId>com.ibm.icu</groupId>
<artifactId>icu4j</artifactId>
</dependency>
<!-- Codebase at https://github.com/DSpace/oclc-harvester2 -->
<dependency>
<groupId>org.dspace</groupId>
<artifactId>oclc-harvester2</artifactId>
@@ -566,101 +551,49 @@
</dependency>
<!-- Used for RSS / ATOM syndication feeds -->
<dependency>
<groupId>org.rometools</groupId>
<groupId>com.rometools</groupId>
<artifactId>rome</artifactId>
</dependency>
<dependency>
<groupId>com.rometools</groupId>
<artifactId>rome-modules</artifactId>
<version>1.0</version>
</dependency>
<dependency>
<groupId>org.jbibtex</groupId>
<artifactId>jbibtex</artifactId>
<version>1.0.10</version>
<version>1.0.20</version>
</dependency>
<dependency>
<groupId>org.apache.httpcomponents</groupId>
<artifactId>httpclient</artifactId>
</dependency>
<dependency>
<groupId>org.apache.httpcomponents</groupId>
<artifactId>httpcore</artifactId>
</dependency>
<dependency>
<groupId>org.apache.httpcomponents</groupId>
<artifactId>httpmime</artifactId>
</dependency>
<!-- SolrJ is used to communicate with Solr throughout the dspace-api -->
<dependency>
<groupId>org.apache.solr</groupId>
<artifactId>solr-solrj</artifactId>
<version>${solr.client.version}</version>
<exclusions>
<!-- Newer Jetty version brought in via Parent POM -->
<exclusion>
<groupId>org.eclipse.jetty</groupId>
<artifactId>jetty-http</artifactId>
</exclusion>
<exclusion>
<groupId>org.eclipse.jetty</groupId>
<artifactId>jetty-io</artifactId>
</exclusion>
<exclusion>
<groupId>org.eclipse.jetty</groupId>
<artifactId>jetty-util</artifactId>
</exclusion>
</exclusions>
</dependency>
<!-- Solr Core is needed for Integration Tests (to run a MockSolrServer) -->
<!-- Solr Core is only needed for Integration Tests (to run a MockSolrServer) -->
<!-- The following Solr / Lucene dependencies also support integration tests -->
<dependency>
<groupId>org.apache.solr</groupId>
<artifactId>solr-core</artifactId>
<scope>test</scope>
<version>${solr.client.version}</version>
<exclusions>
<!-- Newer version brought in by opencsv -->
<exclusion>
<groupId>org.apache.commons</groupId>
<artifactId>commons-text</artifactId>
</exclusion>
<!-- Newer Jetty version brought in via Parent POM -->
<exclusion>
<groupId>org.eclipse.jetty</groupId>
<artifactId>jetty-http</artifactId>
</exclusion>
<exclusion>
<groupId>org.eclipse.jetty</groupId>
<artifactId>jetty-io</artifactId>
</exclusion>
<exclusion>
<groupId>org.eclipse.jetty</groupId>
<artifactId>jetty-util</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.apache.solr</groupId>
<artifactId>solr-cell</artifactId>
<exclusions>
<!-- Newer version brought in by opencsv -->
<exclusion>
<groupId>org.apache.commons</groupId>
<artifactId>commons-text</artifactId>
</exclusion>
<!-- Newer Jetty version brought in via Parent POM -->
<exclusion>
<groupId>org.eclipse.jetty</groupId>
<artifactId>jetty-http</artifactId>
</exclusion>
<exclusion>
<groupId>org.eclipse.jetty</groupId>
<artifactId>jetty-io</artifactId>
</exclusion>
<exclusion>
<groupId>org.eclipse.jetty</groupId>
<artifactId>jetty-util</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.apache.lucene</groupId>
<artifactId>lucene-core</artifactId>
</dependency>
<!-- Used for full-text indexing with Solr -->
<dependency>
<groupId>org.apache.tika</groupId>
<artifactId>tika-parsers</artifactId>
</dependency>
<dependency>
<groupId>org.apache.lucene</groupId>
<artifactId>lucene-analyzers-icu</artifactId>
@@ -676,9 +609,15 @@
<artifactId>lucene-analyzers-stempel</artifactId>
<scope>test</scope>
</dependency>
<!-- Tika is used to extract full text from documents in order to index in Solr -->
<dependency>
<groupId>org.apache.xmlbeans</groupId>
<artifactId>xmlbeans</artifactId>
<groupId>org.apache.tika</groupId>
<artifactId>tika-core</artifactId>
</dependency>
<dependency>
<groupId>org.apache.tika</groupId>
<artifactId>tika-parsers-standard-package</artifactId>
</dependency>
<dependency>
@@ -702,13 +641,6 @@
<version>1.1.1</version>
</dependency>
<!-- Gson: Java to Json conversion -->
<dependency>
<groupId>com.google.code.gson</groupId>
<artifactId>gson</artifactId>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>com.google.guava</groupId>
<artifactId>guava</artifactId>
@@ -736,7 +668,7 @@
<dependency>
<groupId>org.flywaydb</groupId>
<artifactId>flyway-core</artifactId>
<version>6.5.7</version>
<version>8.4.4</version>
</dependency>
<!-- Google Analytics -->
@@ -792,44 +724,6 @@
<artifactId>jaxb-runtime</artifactId>
</dependency>
<!-- Apache Axiom -->
<dependency>
<groupId>org.apache.ws.commons.axiom</groupId>
<artifactId>axiom-impl</artifactId>
<version>${axiom.version}</version>
<exclusions>
<!-- Exclude Geronimo as it is NOT necessary when using javax.activation (which we use)
See: https://ws.apache.org/axiom/userguide/ch04.html#d0e732 -->
<exclusion>
<groupId>org.apache.geronimo.specs</groupId>
<artifactId>*</artifactId>
</exclusion>
<!-- Exclude Woodstox, as later version provided by Solr dependencies -->
<exclusion>
<groupId>org.codehaus.woodstox</groupId>
<artifactId>woodstox-core-asl</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.apache.ws.commons.axiom</groupId>
<artifactId>axiom-api</artifactId>
<version>${axiom.version}</version>
<exclusions>
<!-- Exclude Geronimo as it is NOT necessary when using javax.activation (which we use)
See: https://ws.apache.org/axiom/userguide/ch04.html#d0e732 -->
<exclusion>
<groupId>org.apache.geronimo.specs</groupId>
<artifactId>*</artifactId>
</exclusion>
<!-- Exclude Woodstox, as later version provided by Solr dependencies -->
<exclusion>
<groupId>org.codehaus.woodstox</groupId>
<artifactId>woodstox-core-asl</artifactId>
</exclusion>
</exclusions>
</dependency>
<!-- Jersey / JAX-RS client (javax.ws.rs.*) dependencies needed to integrate with external sources/services -->
<dependency>
<groupId>org.glassfish.jersey.core</groupId>
@@ -848,7 +742,7 @@
<dependency>
<groupId>com.amazonaws</groupId>
<artifactId>aws-java-sdk-s3</artifactId>
<version>1.12.116</version>
<version>1.12.261</version>
</dependency>
<dependency>
@@ -885,18 +779,25 @@
<version>20180130</version>
</dependency>
<!-- Useful for testing command-line tools -->
<dependency>
<groupId>com.github.stefanbirkner</groupId>
<artifactId>system-rules</artifactId>
<version>1.19.0</version>
<scope>test</scope>
</dependency>
<!-- Used for Solr core export/import -->
<dependency>
<groupId>com.opencsv</groupId>
<artifactId>opencsv</artifactId>
<version>5.2</version>
<version>5.6</version>
</dependency>
<!-- Email templating -->
<dependency>
<groupId>org.apache.velocity</groupId>
<artifactId>velocity-engine-core</artifactId>
<type>jar</type>
</dependency>
<dependency>
@@ -918,18 +819,18 @@
<version>2.0.0</version>
</dependency>
<dependency>
<groupId>com.github.stefanbirkner</groupId>
<artifactId>system-rules</artifactId>
<version>1.19.0</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.mock-server</groupId>
<artifactId>mockserver-junit-rule</artifactId>
<version>5.11.2</version>
<scope>test</scope>
<exclusions>
<!-- Exclude snakeyaml to avoid conflicts with: spring-boot-starter-cache -->
<exclusion>
<groupId>org.yaml</groupId>
<artifactId>snakeyaml</artifactId>
</exclusion>
</exclusions>
</dependency>
</dependencies>
@@ -938,11 +839,6 @@
<!-- for mockserver -->
<!-- Solve dependency convergence issues related to
'mockserver-junit-rule' by selecting the versions we want to use. -->
<dependency>
<groupId>org.apache.commons</groupId>
<artifactId>commons-text</artifactId>
<version>1.9</version>
</dependency>
<dependency>
<groupId>io.netty</groupId>
<artifactId>netty-buffer</artifactId>
@@ -971,7 +867,7 @@
<dependency>
<groupId>org.apache.velocity</groupId>
<artifactId>velocity-engine-core</artifactId>
<version>2.2</version>
<version>2.3</version>
</dependency>
<dependency>
<groupId>org.xmlunit</groupId>

View File

@@ -0,0 +1,30 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.access.status;
import java.sql.SQLException;
import java.util.Date;
import org.dspace.content.Item;
import org.dspace.core.Context;
/**
* Plugin interface for the access status calculation.
*/
public interface AccessStatusHelper {
/**
* Calculate the access status for the item.
*
* @param context the DSpace context
* @param item the item
* @return an access status value
* @throws SQLException An exception that provides information on a database access error or other errors.
*/
public String getAccessStatusFromItem(Context context, Item item, Date threshold)
throws SQLException;
}

View File

@@ -0,0 +1,66 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.access.status;
import java.sql.SQLException;
import java.util.Date;
import org.dspace.access.status.service.AccessStatusService;
import org.dspace.content.Item;
import org.dspace.core.Context;
import org.dspace.core.service.PluginService;
import org.dspace.services.ConfigurationService;
import org.joda.time.LocalDate;
import org.springframework.beans.factory.annotation.Autowired;
/**
* Implementation for the access status calculation service.
*/
public class AccessStatusServiceImpl implements AccessStatusService {
// Plugin implementation, set from the DSpace configuration by init().
protected AccessStatusHelper helper = null;
protected Date forever_date = null;
@Autowired(required = true)
protected ConfigurationService configurationService;
@Autowired(required = true)
protected PluginService pluginService;
/**
* Initialize the bean (after dependency injection has already taken place).
* Ensures the configurationService is injected, so that we can get the plugin
* and the forever embargo date threshold from the configuration.
* Called by "init-method" in Spring configuration.
*
* @throws Exception on generic exception
*/
public void init() throws Exception {
if (helper == null) {
helper = (AccessStatusHelper) pluginService.getSinglePlugin(AccessStatusHelper.class);
if (helper == null) {
throw new IllegalStateException("The AccessStatusHelper plugin was not defined in "
+ "DSpace configuration.");
}
// Defines the embargo forever date threshold for the access status.
// Look at EmbargoService.FOREVER for some improvements?
int year = configurationService.getIntProperty("access.status.embargo.forever.year");
int month = configurationService.getIntProperty("access.status.embargo.forever.month");
int day = configurationService.getIntProperty("access.status.embargo.forever.day");
forever_date = new LocalDate(year, month, day).toDate();
}
}
@Override
public String getAccessStatus(Context context, Item item) throws SQLException {
return helper.getAccessStatusFromItem(context, item, forever_date);
}
}

View File

@@ -0,0 +1,159 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.access.status;
import java.sql.SQLException;
import java.util.Date;
import java.util.List;
import java.util.Objects;
import org.apache.commons.lang3.StringUtils;
import org.dspace.authorize.ResourcePolicy;
import org.dspace.authorize.factory.AuthorizeServiceFactory;
import org.dspace.authorize.service.AuthorizeService;
import org.dspace.authorize.service.ResourcePolicyService;
import org.dspace.content.Bitstream;
import org.dspace.content.Bundle;
import org.dspace.content.DSpaceObject;
import org.dspace.content.Item;
import org.dspace.content.factory.ContentServiceFactory;
import org.dspace.content.service.ItemService;
import org.dspace.core.Constants;
import org.dspace.core.Context;
import org.dspace.eperson.Group;
/**
* Default plugin implementation of the access status helper.
* The getAccessStatusFromItem method provides a simple logic to
* calculate the access status of an item based on the policies of
* the primary or the first bitstream in the original bundle.
* Users can override this method for enhanced functionality.
*/
public class DefaultAccessStatusHelper implements AccessStatusHelper {
public static final String EMBARGO = "embargo";
public static final String METADATA_ONLY = "metadata.only";
public static final String OPEN_ACCESS = "open.access";
public static final String RESTRICTED = "restricted";
public static final String UNKNOWN = "unknown";
protected ItemService itemService =
ContentServiceFactory.getInstance().getItemService();
protected ResourcePolicyService resourcePolicyService =
AuthorizeServiceFactory.getInstance().getResourcePolicyService();
protected AuthorizeService authorizeService =
AuthorizeServiceFactory.getInstance().getAuthorizeService();
public DefaultAccessStatusHelper() {
super();
}
/**
* Look at the item's policies to determine an access status value.
* It is also considering a date threshold for embargos and restrictions.
*
* If the item is null, simply returns the "unknown" value.
*
* @param context the DSpace context
* @param item the item to embargo
* @param threshold the embargo threshold date
* @return an access status value
*/
@Override
public String getAccessStatusFromItem(Context context, Item item, Date threshold)
throws SQLException {
if (item == null) {
return UNKNOWN;
}
// Consider only the original bundles.
List<Bundle> bundles = item.getBundles(Constants.DEFAULT_BUNDLE_NAME);
// Check for primary bitstreams first.
Bitstream bitstream = bundles.stream()
.map(bundle -> bundle.getPrimaryBitstream())
.filter(Objects::nonNull)
.findFirst()
.orElse(null);
if (bitstream == null) {
// If there is no primary bitstream,
// take the first bitstream in the bundles.
bitstream = bundles.stream()
.map(bundle -> bundle.getBitstreams())
.flatMap(List::stream)
.findFirst()
.orElse(null);
}
return caculateAccessStatusForDso(context, bitstream, threshold);
}
/**
* Look at the DSpace object's policies to determine an access status value.
*
* If the object is null, returns the "metadata.only" value.
* If any policy attached to the object is valid for the anonymous group,
* returns the "open.access" value.
* Otherwise, if the policy start date is before the embargo threshold date,
* returns the "embargo" value.
* Every other cases return the "restricted" value.
*
* @param context the DSpace context
* @param dso the DSpace object
* @param threshold the embargo threshold date
* @return an access status value
*/
private String caculateAccessStatusForDso(Context context, DSpaceObject dso, Date threshold)
throws SQLException {
if (dso == null) {
return METADATA_ONLY;
}
// Only consider read policies.
List<ResourcePolicy> policies = authorizeService
.getPoliciesActionFilter(context, dso, Constants.READ);
int openAccessCount = 0;
int embargoCount = 0;
int restrictedCount = 0;
int unknownCount = 0;
// Looks at all read policies.
for (ResourcePolicy policy : policies) {
boolean isValid = resourcePolicyService.isDateValid(policy);
Group group = policy.getGroup();
// The group must not be null here. However,
// if it is, consider this as an unexpected case.
if (group == null) {
unknownCount++;
} else if (StringUtils.equals(group.getName(), Group.ANONYMOUS)) {
// Only calculate the status for the anonymous group.
if (isValid) {
// If the policy is valid, the anonymous group have access
// to the bitstream.
openAccessCount++;
} else {
Date startDate = policy.getStartDate();
if (startDate != null && !startDate.before(threshold)) {
// If the policy start date have a value and if this value
// is equal or superior to the configured forever date, the
// access status is also restricted.
restrictedCount++;
} else {
// If the current date is not between the policy start date
// and end date, the access status is embargo.
embargoCount++;
}
}
}
}
if (openAccessCount > 0) {
return OPEN_ACCESS;
}
if (embargoCount > 0 && restrictedCount == 0) {
return EMBARGO;
}
if (unknownCount > 0) {
return UNKNOWN;
}
return RESTRICTED;
}
}

View File

@@ -0,0 +1,25 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.access.status.factory;
import org.dspace.access.status.service.AccessStatusService;
import org.dspace.services.factory.DSpaceServicesFactory;
/**
* Abstract factory to get services for the access status package,
* use AccessStatusServiceFactory.getInstance() to retrieve an implementation.
*/
public abstract class AccessStatusServiceFactory {
public abstract AccessStatusService getAccessStatusService();
public static AccessStatusServiceFactory getInstance() {
return DSpaceServicesFactory.getInstance().getServiceManager()
.getServiceByName("accessStatusServiceFactory", AccessStatusServiceFactory.class);
}
}

View File

@@ -0,0 +1,26 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.access.status.factory;
import org.dspace.access.status.service.AccessStatusService;
import org.springframework.beans.factory.annotation.Autowired;
/**
* Factory implementation to get services for the access status package,
* use AccessStatusServiceFactory.getInstance() to retrieve an implementation.
*/
public class AccessStatusServiceFactoryImpl extends AccessStatusServiceFactory {
@Autowired(required = true)
private AccessStatusService accessStatusService;
@Override
public AccessStatusService getAccessStatusService() {
return accessStatusService;
}
}

View File

@@ -0,0 +1,30 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
/**
* <p>
* Access status allows the users to view the bitstreams availability before
* browsing into the item itself.
* </p>
* <p>
* The access status is calculated through a pluggable class:
* {@link org.dspace.access.status.AccessStatusHelper}.
* The {@link org.dspace.access.status.AccessStatusServiceImpl}
* must be configured to specify this class, as well as a forever embargo date
* threshold year, month and day.
* </p>
* <p>
* See {@link org.dspace.access.status.DefaultAccessStatusHelper} for a simple calculation
* based on the primary or the first bitstream of the original bundle. You can
* supply your own class to implement more complex access statuses.
* </p>
* <p>
* For now, the access status is calculated when the item is shown in a list.
* </p>
*/
package org.dspace.access.status;

View File

@@ -0,0 +1,46 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.access.status.service;
import java.sql.SQLException;
import org.dspace.content.Item;
import org.dspace.core.Context;
/**
* Public interface to the access status subsystem.
* <p>
* Configuration properties: (with examples)
* {@code
* # values for the forever embargo date threshold
* # This threshold date is used in the default access status helper to dermine if an item is
* # restricted or embargoed based on the start date of the primary (or first) file policies.
* # In this case, if the policy start date is inferior to the threshold date, the status will
* # be embargo, else it will be restricted.
* # You might want to change this threshold based on your needs. For example: some databases
* # doesn't accept a date superior to 31 december 9999.
* access.status.embargo.forever.year = 10000
* access.status.embargo.forever.month = 1
* access.status.embargo.forever.day = 1
* # implementation of access status helper plugin - replace with local implementation if applicable
* # This default access status helper provides an item status based on the policies of the primary
* # bitstream (or first bitstream in the original bundles if no primary file is specified).
* plugin.single.org.dspace.access.status.AccessStatusHelper = org.dspace.access.status.DefaultAccessStatusHelper
* }
*/
public interface AccessStatusService {
/**
* Calculate the access status for an Item while considering the forever embargo date threshold.
*
* @param context the DSpace context
* @param item the item
* @throws SQLException An exception that provides information on a database access error or other errors.
*/
public String getAccessStatus(Context context, Item item) throws SQLException;
}

View File

@@ -14,6 +14,7 @@ import java.util.Locale;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.CommandLineParser;
import org.apache.commons.cli.DefaultParser;
import org.apache.commons.cli.HelpFormatter;
import org.apache.commons.cli.Options;
import org.apache.commons.lang3.StringUtils;
import org.dspace.core.Context;
@@ -54,14 +55,14 @@ public final class CreateAdministrator {
protected GroupService groupService;
/**
* For invoking via the command line. If called with no command line arguments,
* For invoking via the command line. If called with no command line arguments,
* it will negotiate with the user for the administrator details
*
* @param argv the command line arguments given
* @throws Exception if error
*/
public static void main(String[] argv)
throws Exception {
throws Exception {
CommandLineParser parser = new DefaultParser();
Options options = new Options();
@@ -69,19 +70,41 @@ public final class CreateAdministrator {
options.addOption("e", "email", true, "administrator email address");
options.addOption("f", "first", true, "administrator first name");
options.addOption("h", "help", false, "explain create-administrator options");
options.addOption("l", "last", true, "administrator last name");
options.addOption("c", "language", true, "administrator language");
options.addOption("p", "password", true, "administrator password");
CommandLine line = parser.parse(options, argv);
CommandLine line = null;
try {
line = parser.parse(options, argv);
} catch (Exception e) {
System.out.println(e.getMessage() + "\nTry \"dspace create-administrator -h\" to print help information.");
System.exit(1);
}
if (line.hasOption("e") && line.hasOption("f") && line.hasOption("l") &&
line.hasOption("c") && line.hasOption("p")) {
line.hasOption("c") && line.hasOption("p")) {
ca.createAdministrator(line.getOptionValue("e"),
line.getOptionValue("f"), line.getOptionValue("l"),
line.getOptionValue("c"), line.getOptionValue("p"));
line.getOptionValue("f"), line.getOptionValue("l"),
line.getOptionValue("c"), line.getOptionValue("p"));
} else if (line.hasOption("h")) {
String header = "\nA command-line tool for creating an initial administrator for setting up a" +
" DSpace site. Unless all the required parameters are passed it will" +
" prompt for an e-mail address, last name, first name and password from" +
" standard input.. An administrator group is then created and the data passed" +
" in used to create an e-person in that group.\n\n";
String footer = "\n";
HelpFormatter formatter = new HelpFormatter();
formatter.printHelp("dspace create-administrator", header, options, footer, true);
return;
} else {
ca.negotiateAdministratorDetails();
ca.negotiateAdministratorDetails(line);
}
}
@@ -91,7 +114,7 @@ public final class CreateAdministrator {
* @throws Exception if error
*/
protected CreateAdministrator()
throws Exception {
throws Exception {
context = new Context();
groupService = EPersonServiceFactory.getInstance().getGroupService();
ePersonService = EPersonServiceFactory.getInstance().getEPersonService();
@@ -103,20 +126,20 @@ public final class CreateAdministrator {
*
* @throws Exception if error
*/
protected void negotiateAdministratorDetails()
throws Exception {
protected void negotiateAdministratorDetails(CommandLine line)
throws Exception {
Console console = System.console();
System.out.println("Creating an initial administrator account");
boolean dataOK = false;
String email = null;
String firstName = null;
String lastName = null;
char[] password1 = null;
char[] password2 = null;
String email = line.getOptionValue('e');
String firstName = line.getOptionValue('f');
String lastName = line.getOptionValue('l');
String language = I18nUtil.getDefaultLocale().getLanguage();
ConfigurationService cfg = DSpaceServicesFactory.getInstance().getConfigurationService();
boolean flag = line.hasOption('p');
char[] password = null;
boolean dataOK = line.hasOption('f') && line.hasOption('e') && line.hasOption('l');
while (!dataOK) {
System.out.print("E-mail address: ");
@@ -147,8 +170,6 @@ public final class CreateAdministrator {
if (lastName != null) {
lastName = lastName.trim();
}
ConfigurationService cfg = DSpaceServicesFactory.getInstance().getConfigurationService();
if (cfg.hasProperty("webui.supported.locales")) {
System.out.println("Select one of the following languages: "
+ cfg.getProperty("webui.supported.locales"));
@@ -163,46 +184,59 @@ public final class CreateAdministrator {
}
}
System.out.println("Password will not display on screen.");
System.out.print("Password: ");
System.out.print("Is the above data correct? (y or n): ");
System.out.flush();
password1 = console.readPassword();
String s = console.readLine();
System.out.print("Again to confirm: ");
System.out.flush();
password2 = console.readPassword();
//TODO real password validation
if (password1.length > 1 && Arrays.equals(password1, password2)) {
// password OK
System.out.print("Is the above data correct? (y or n): ");
System.out.flush();
String s = console.readLine();
if (s != null) {
s = s.trim();
if (s.toLowerCase().startsWith("y")) {
dataOK = true;
}
if (s != null) {
s = s.trim();
if (s.toLowerCase().startsWith("y")) {
dataOK = true;
}
} else {
System.out.println("Passwords don't match");
}
}
if (!flag) {
password = getPassword(console);
if (password == null) {
return;
}
} else {
password = line.getOptionValue("p").toCharArray();
}
// if we make it to here, we are ready to create an administrator
createAdministrator(email, firstName, lastName, language, String.valueOf(password1));
createAdministrator(email, firstName, lastName, language, String.valueOf(password));
//Cleaning arrays that held password
Arrays.fill(password1, ' ');
Arrays.fill(password2, ' ');
}
private char[] getPassword(Console console) {
char[] password1 = null;
char[] password2 = null;
System.out.println("Password will not display on screen.");
System.out.print("Password: ");
System.out.flush();
password1 = console.readPassword();
System.out.print("Again to confirm: ");
System.out.flush();
password2 = console.readPassword();
// TODO real password validation
if (password1.length > 1 && Arrays.equals(password1, password2)) {
// password OK
Arrays.fill(password2, ' ');
return password1;
} else {
System.out.println("Passwords don't match");
return null;
}
}
/**
* Create the administrator with the given details. If the user
* Create the administrator with the given details. If the user
* already exists then they are simply upped to administrator status
*
* @param email the email for the user
@@ -213,8 +247,8 @@ public final class CreateAdministrator {
* @throws Exception if error
*/
protected void createAdministrator(String email, String first, String last,
String language, String pw)
throws Exception {
String language, String pw)
throws Exception {
// Of course we aren't an administrator yet so we need to
// circumvent authorisation
context.turnOffAuthorisationSystem();

View File

@@ -11,13 +11,16 @@ import java.io.IOException;
import java.sql.SQLException;
import javax.xml.parsers.ParserConfigurationException;
import javax.xml.transform.TransformerException;
import javax.xml.xpath.XPath;
import javax.xml.xpath.XPathConstants;
import javax.xml.xpath.XPathExpressionException;
import javax.xml.xpath.XPathFactory;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.CommandLineParser;
import org.apache.commons.cli.DefaultParser;
import org.apache.commons.cli.Options;
import org.apache.commons.cli.ParseException;
import org.apache.xpath.XPathAPI;
import org.dspace.authorize.AuthorizeException;
import org.dspace.content.MetadataField;
import org.dspace.content.MetadataSchema;
@@ -90,7 +93,7 @@ public class MetadataImporter {
public static void main(String[] args)
throws ParseException, SQLException, IOException, TransformerException,
ParserConfigurationException, AuthorizeException, SAXException,
NonUniqueMetadataException, RegistryImportException {
NonUniqueMetadataException, RegistryImportException, XPathExpressionException {
// create an options object and populate it
CommandLineParser parser = new DefaultParser();
@@ -124,8 +127,8 @@ public class MetadataImporter {
* @throws RegistryImportException if import fails
*/
public static void loadRegistry(String file, boolean forceUpdate)
throws SQLException, IOException, TransformerException, ParserConfigurationException,
AuthorizeException, SAXException, NonUniqueMetadataException, RegistryImportException {
throws SQLException, IOException, TransformerException, ParserConfigurationException, AuthorizeException,
SAXException, NonUniqueMetadataException, RegistryImportException, XPathExpressionException {
Context context = null;
try {
@@ -137,7 +140,9 @@ public class MetadataImporter {
Document document = RegistryImporter.loadXML(file);
// Get the nodes corresponding to types
NodeList schemaNodes = XPathAPI.selectNodeList(document, "/dspace-dc-types/dc-schema");
XPath xPath = XPathFactory.newInstance().newXPath();
NodeList schemaNodes = (NodeList) xPath.compile("/dspace-dc-types/dc-schema")
.evaluate(document, XPathConstants.NODESET);
// Add each one as a new format to the registry
for (int i = 0; i < schemaNodes.getLength(); i++) {
@@ -146,7 +151,8 @@ public class MetadataImporter {
}
// Get the nodes corresponding to types
NodeList typeNodes = XPathAPI.selectNodeList(document, "/dspace-dc-types/dc-type");
NodeList typeNodes = (NodeList) xPath.compile("/dspace-dc-types/dc-type")
.evaluate(document, XPathConstants.NODESET);
// Add each one as a new format to the registry
for (int i = 0; i < typeNodes.getLength(); i++) {
@@ -178,8 +184,8 @@ public class MetadataImporter {
* @throws RegistryImportException if import fails
*/
private static void loadSchema(Context context, Node node, boolean updateExisting)
throws SQLException, IOException, TransformerException,
AuthorizeException, NonUniqueMetadataException, RegistryImportException {
throws SQLException, AuthorizeException, NonUniqueMetadataException, RegistryImportException,
XPathExpressionException {
// Get the values
String name = RegistryImporter.getElementData(node, "name");
String namespace = RegistryImporter.getElementData(node, "namespace");
@@ -236,8 +242,8 @@ public class MetadataImporter {
* @throws RegistryImportException if import fails
*/
private static void loadType(Context context, Node node)
throws SQLException, IOException, TransformerException,
AuthorizeException, NonUniqueMetadataException, RegistryImportException {
throws SQLException, IOException, AuthorizeException, NonUniqueMetadataException, RegistryImportException,
XPathExpressionException {
// Get the values
String schema = RegistryImporter.getElementData(node, "schema");
String element = RegistryImporter.getElementData(node, "element");

View File

@@ -0,0 +1,140 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.administer;
import java.io.IOException;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import org.apache.commons.cli.ParseException;
import org.apache.commons.lang.time.DateUtils;
import org.dspace.authorize.AuthorizeException;
import org.dspace.content.ProcessStatus;
import org.dspace.core.Context;
import org.dspace.scripts.DSpaceRunnable;
import org.dspace.scripts.Process;
import org.dspace.scripts.factory.ScriptServiceFactory;
import org.dspace.scripts.service.ProcessService;
import org.dspace.services.ConfigurationService;
import org.dspace.services.factory.DSpaceServicesFactory;
import org.dspace.utils.DSpace;
/**
* Script to cleanup the old processes in the specified state.
*
* @author Luca Giamminonni (luca.giamminonni at 4science.it)
*
*/
public class ProcessCleaner extends DSpaceRunnable<ProcessCleanerConfiguration<ProcessCleaner>> {
private ConfigurationService configurationService;
private ProcessService processService;
private boolean cleanCompleted = false;
private boolean cleanFailed = false;
private boolean cleanRunning = false;
private boolean help = false;
private Integer days;
@Override
public void setup() throws ParseException {
this.configurationService = DSpaceServicesFactory.getInstance().getConfigurationService();
this.processService = ScriptServiceFactory.getInstance().getProcessService();
this.help = commandLine.hasOption('h');
this.cleanFailed = commandLine.hasOption('f');
this.cleanRunning = commandLine.hasOption('r');
this.cleanCompleted = commandLine.hasOption('c') || (!cleanFailed && !cleanRunning);
this.days = configurationService.getIntProperty("process-cleaner.days", 14);
if (this.days <= 0) {
throw new IllegalStateException("The number of days must be a positive integer.");
}
}
@Override
public void internalRun() throws Exception {
if (help) {
printHelp();
return;
}
Context context = new Context();
try {
context.turnOffAuthorisationSystem();
performDeletion(context);
} finally {
context.restoreAuthSystemState();
context.complete();
}
}
/**
* Delete the processes based on the specified statuses and the configured days
* from their creation.
*/
private void performDeletion(Context context) throws SQLException, IOException, AuthorizeException {
List<ProcessStatus> statuses = getProcessToDeleteStatuses();
Date creationDate = calculateCreationDate();
handler.logInfo("Searching for processes with status: " + statuses);
List<Process> processes = processService.findByStatusAndCreationTimeOlderThan(context, statuses, creationDate);
handler.logInfo("Found " + processes.size() + " processes to be deleted");
for (Process process : processes) {
processService.delete(context, process);
}
handler.logInfo("Process cleanup completed");
}
/**
* Returns the list of Process statuses do be deleted.
*/
private List<ProcessStatus> getProcessToDeleteStatuses() {
List<ProcessStatus> statuses = new ArrayList<ProcessStatus>();
if (cleanCompleted) {
statuses.add(ProcessStatus.COMPLETED);
}
if (cleanFailed) {
statuses.add(ProcessStatus.FAILED);
}
if (cleanRunning) {
statuses.add(ProcessStatus.RUNNING);
}
return statuses;
}
private Date calculateCreationDate() {
return DateUtils.addDays(new Date(), -days);
}
@Override
@SuppressWarnings("unchecked")
public ProcessCleanerConfiguration<ProcessCleaner> getScriptConfiguration() {
return new DSpace().getServiceManager()
.getServiceByName("process-cleaner", ProcessCleanerConfiguration.class);
}
}

View File

@@ -5,9 +5,14 @@
*
* http://www.dspace.org/license/
*/
package org.dspace.administer;
/**
* Implementations of the Cache type, for various purposes.
* The {@link ProcessCleaner} for CLI.
*
* @author Luca Giamminonni (luca.giamminonni at 4science.it)
*
*/
public class ProcessCleanerCli extends ProcessCleaner {
package org.dspace.services.caching.model;
}

View File

@@ -0,0 +1,18 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.administer;
/**
* The {@link ProcessCleanerConfiguration} for CLI.
*
* @author Luca Giamminonni (luca.giamminonni at 4science.it)
*
*/
public class ProcessCleanerCliConfiguration extends ProcessCleanerConfiguration<ProcessCleanerCli> {
}

View File

@@ -0,0 +1,70 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.administer;
import java.sql.SQLException;
import org.apache.commons.cli.Options;
import org.dspace.authorize.service.AuthorizeService;
import org.dspace.core.Context;
import org.dspace.scripts.configuration.ScriptConfiguration;
import org.springframework.beans.factory.annotation.Autowired;
/**
* The {@link ScriptConfiguration} for the {@link ProcessCleaner} script.
*/
public class ProcessCleanerConfiguration<T extends ProcessCleaner> extends ScriptConfiguration<T> {
@Autowired
private AuthorizeService authorizeService;
private Class<T> dspaceRunnableClass;
@Override
public boolean isAllowedToExecute(Context context) {
try {
return authorizeService.isAdmin(context);
} catch (SQLException e) {
throw new RuntimeException("SQLException occurred when checking if the current user is an admin", e);
}
}
@Override
public Options getOptions() {
if (options == null) {
Options options = new Options();
options.addOption("h", "help", false, "help");
options.addOption("r", "running", false, "delete the process with RUNNING status");
options.getOption("r").setType(boolean.class);
options.addOption("f", "failed", false, "delete the process with FAILED status");
options.getOption("f").setType(boolean.class);
options.addOption("c", "completed", false,
"delete the process with COMPLETED status (default if no statuses are specified)");
options.getOption("c").setType(boolean.class);
super.options = options;
}
return options;
}
@Override
public Class<T> getDspaceRunnableClass() {
return dspaceRunnableClass;
}
@Override
public void setDspaceRunnableClass(Class<T> dspaceRunnableClass) {
this.dspaceRunnableClass = dspaceRunnableClass;
}
}

View File

@@ -13,8 +13,11 @@ import javax.xml.parsers.DocumentBuilder;
import javax.xml.parsers.DocumentBuilderFactory;
import javax.xml.parsers.ParserConfigurationException;
import javax.xml.transform.TransformerException;
import javax.xml.xpath.XPath;
import javax.xml.xpath.XPathConstants;
import javax.xml.xpath.XPathExpressionException;
import javax.xml.xpath.XPathFactory;
import org.apache.xpath.XPathAPI;
import org.w3c.dom.Document;
import org.w3c.dom.Node;
import org.w3c.dom.NodeList;
@@ -72,9 +75,10 @@ public class RegistryImporter {
* @throws TransformerException if error
*/
public static String getElementData(Node parentElement, String childName)
throws TransformerException {
throws XPathExpressionException {
// Grab the child node
Node childNode = XPathAPI.selectSingleNode(parentElement, childName);
XPath xPath = XPathFactory.newInstance().newXPath();
Node childNode = (Node) xPath.compile(childName).evaluate(parentElement, XPathConstants.NODE);
if (childNode == null) {
// No child node, so no values
@@ -115,9 +119,10 @@ public class RegistryImporter {
* @throws TransformerException if error
*/
public static String[] getRepeatedElementData(Node parentElement,
String childName) throws TransformerException {
String childName) throws XPathExpressionException {
// Grab the child node
NodeList childNodes = XPathAPI.selectNodeList(parentElement, childName);
XPath xPath = XPathFactory.newInstance().newXPath();
NodeList childNodes = (NodeList) xPath.compile(childName).evaluate(parentElement, XPathConstants.NODESET);
String[] data = new String[childNodes.getLength()];

View File

@@ -16,9 +16,12 @@ import javax.xml.parsers.DocumentBuilder;
import javax.xml.parsers.DocumentBuilderFactory;
import javax.xml.parsers.ParserConfigurationException;
import javax.xml.transform.TransformerException;
import javax.xml.xpath.XPath;
import javax.xml.xpath.XPathConstants;
import javax.xml.xpath.XPathExpressionException;
import javax.xml.xpath.XPathFactory;
import org.apache.logging.log4j.Logger;
import org.apache.xpath.XPathAPI;
import org.dspace.authorize.AuthorizeException;
import org.dspace.content.BitstreamFormat;
import org.dspace.content.factory.ContentServiceFactory;
@@ -122,12 +125,13 @@ public class RegistryLoader {
*/
public static void loadBitstreamFormats(Context context, String filename)
throws SQLException, IOException, ParserConfigurationException,
SAXException, TransformerException, AuthorizeException {
SAXException, TransformerException, AuthorizeException, XPathExpressionException {
Document document = loadXML(filename);
// Get the nodes corresponding to formats
NodeList typeNodes = XPathAPI.selectNodeList(document,
"dspace-bitstream-types/bitstream-type");
XPath xPath = XPathFactory.newInstance().newXPath();
NodeList typeNodes = (NodeList) xPath.compile("dspace-bitstream-types/bitstream-type")
.evaluate(document, XPathConstants.NODESET);
// Add each one as a new format to the registry
for (int i = 0; i < typeNodes.getLength(); i++) {
@@ -151,8 +155,7 @@ public class RegistryLoader {
* @throws AuthorizeException if authorization error
*/
private static void loadFormat(Context context, Node node)
throws SQLException, IOException, TransformerException,
AuthorizeException {
throws SQLException, AuthorizeException, XPathExpressionException {
// Get the values
String mimeType = getElementData(node, "mimetype");
String shortDesc = getElementData(node, "short_description");
@@ -231,9 +234,10 @@ public class RegistryLoader {
* @throws TransformerException if transformer error
*/
private static String getElementData(Node parentElement, String childName)
throws TransformerException {
throws XPathExpressionException {
// Grab the child node
Node childNode = XPathAPI.selectSingleNode(parentElement, childName);
XPath xPath = XPathFactory.newInstance().newXPath();
Node childNode = (Node) xPath.compile(childName).evaluate(parentElement, XPathConstants.NODE);
if (childNode == null) {
// No child node, so no values
@@ -274,9 +278,10 @@ public class RegistryLoader {
* @throws TransformerException if transformer error
*/
private static String[] getRepeatedElementData(Node parentElement,
String childName) throws TransformerException {
String childName) throws XPathExpressionException {
// Grab the child node
NodeList childNodes = XPathAPI.selectNodeList(parentElement, childName);
XPath xPath = XPathFactory.newInstance().newXPath();
NodeList childNodes = (NodeList) xPath.compile(childName).evaluate(parentElement, XPathConstants.NODESET);
String[] data = new String[childNodes.getLength()];

View File

@@ -30,6 +30,10 @@ import javax.xml.parsers.DocumentBuilder;
import javax.xml.parsers.DocumentBuilderFactory;
import javax.xml.parsers.ParserConfigurationException;
import javax.xml.transform.TransformerException;
import javax.xml.xpath.XPath;
import javax.xml.xpath.XPathConstants;
import javax.xml.xpath.XPathExpressionException;
import javax.xml.xpath.XPathFactory;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.CommandLineParser;
@@ -38,7 +42,7 @@ import org.apache.commons.cli.HelpFormatter;
import org.apache.commons.cli.Option;
import org.apache.commons.cli.Options;
import org.apache.commons.cli.ParseException;
import org.apache.xpath.XPathAPI;
import org.apache.commons.lang3.StringUtils;
import org.dspace.authorize.AuthorizeException;
import org.dspace.content.Collection;
import org.dspace.content.Community;
@@ -52,9 +56,11 @@ import org.dspace.content.service.CommunityService;
import org.dspace.core.Context;
import org.dspace.eperson.factory.EPersonServiceFactory;
import org.dspace.eperson.service.EPersonService;
import org.jdom.Element;
import org.jdom.output.Format;
import org.jdom.output.XMLOutputter;
import org.dspace.handle.factory.HandleServiceFactory;
import org.dspace.handle.service.HandleService;
import org.jdom2.Element;
import org.jdom2.output.Format;
import org.jdom2.output.XMLOutputter;
import org.w3c.dom.Document;
import org.w3c.dom.Node;
import org.w3c.dom.NodeList;
@@ -76,6 +82,7 @@ import org.xml.sax.SAXException;
* </community>
* </import_structure>
* }</pre>
*
* <p>
* It can be arbitrarily deep, and supports all the metadata elements
* that make up the community and collection metadata. See the system
@@ -104,12 +111,14 @@ public class StructBuilder {
*/
private static final Map<String, MetadataFieldName> communityMap = new HashMap<>();
protected static CommunityService communityService
protected static final CommunityService communityService
= ContentServiceFactory.getInstance().getCommunityService();
protected static CollectionService collectionService
protected static final CollectionService collectionService
= ContentServiceFactory.getInstance().getCollectionService();
protected static EPersonService ePersonService
protected static final EPersonService ePersonService
= EPersonServiceFactory.getInstance().getEPersonService();
protected static final HandleService handleService
= HandleServiceFactory.getInstance().getHandleService();
/**
* Default constructor
@@ -135,16 +144,18 @@ public class StructBuilder {
* @throws SQLException passed through.
* @throws FileNotFoundException if input or output could not be opened.
* @throws TransformerException if the input document is invalid.
* @throws XPathExpressionException passed through.
*/
public static void main(String[] argv)
throws ParserConfigurationException, SQLException,
FileNotFoundException, IOException, TransformerException {
throws ParserConfigurationException, SQLException,
IOException, TransformerException, XPathExpressionException {
// Define command line options.
Options options = new Options();
options.addOption("h", "help", false, "Print this help message.");
options.addOption("?", "help");
options.addOption("x", "export", false, "Export the current structure as XML.");
options.addOption("k", "keep-handles", false, "Apply Handles from input document.");
options.addOption(Option.builder("e").longOpt("eperson")
.desc("User who is manipulating the repository's structure.")
@@ -206,6 +217,7 @@ public class StructBuilder {
// Export? Import?
if (line.hasOption('x')) { // export
exportStructure(context, outputStream);
outputStream.close();
} else { // Must be import
String input = line.getOptionValue('f');
if (null == input) {
@@ -220,7 +232,12 @@ public class StructBuilder {
inputStream = new FileInputStream(input);
}
importStructure(context, inputStream, outputStream);
boolean keepHandles = options.hasOption("k");
importStructure(context, inputStream, outputStream, keepHandles);
inputStream.close();
outputStream.close();
// save changes from import
context.complete();
}
@@ -233,14 +250,17 @@ public class StructBuilder {
* @param context
* @param input XML which describes the new communities and collections.
* @param output input, annotated with the new objects' identifiers.
* @param keepHandles true if Handles should be set from input.
* @throws IOException
* @throws ParserConfigurationException
* @throws SAXException
* @throws TransformerException
* @throws SQLException
*/
static void importStructure(Context context, InputStream input, OutputStream output)
throws IOException, ParserConfigurationException, SQLException, TransformerException {
static void importStructure(Context context, InputStream input,
OutputStream output, boolean keepHandles)
throws IOException, ParserConfigurationException, SQLException,
TransformerException, XPathExpressionException {
// load the XML
Document document = null;
@@ -258,15 +278,29 @@ public class StructBuilder {
// is properly structured.
try {
validate(document);
} catch (TransformerException ex) {
} catch (XPathExpressionException ex) {
System.err.format("The input document is invalid: %s%n", ex.getMessage());
System.exit(1);
}
// Check for 'identifier' attributes -- possibly output by this class.
NodeList identifierNodes = XPathAPI.selectNodeList(document, "//*[@identifier]");
XPath xPath = XPathFactory.newInstance().newXPath();
NodeList identifierNodes = (NodeList) xPath.compile("//*[@identifier]")
.evaluate(document, XPathConstants.NODESET);
if (identifierNodes.getLength() > 0) {
System.err.println("The input document has 'identifier' attributes, which will be ignored.");
if (!keepHandles) {
System.err.println("The input document has 'identifier' attributes, which will be ignored.");
} else {
for (int i = 0; i < identifierNodes.getLength() ; i++) {
String identifier = identifierNodes.item(i).getAttributes().item(0).getTextContent();
if (handleService.resolveToURL(context, identifier) != null) {
System.err.printf("The input document contains handle %s,"
+ " which is in use already. Aborting...%n",
identifier);
System.exit(1);
}
}
}
}
// load the mappings into the member variable hashmaps
@@ -287,10 +321,11 @@ public class StructBuilder {
Element[] elements = new Element[]{};
try {
// get the top level community list
NodeList first = XPathAPI.selectNodeList(document, "/import_structure/community");
NodeList first = (NodeList) xPath.compile("/import_structure/community")
.evaluate(document, XPathConstants.NODESET);
// run the import starting with the top level communities
elements = handleCommunities(context, first, null);
elements = handleCommunities(context, first, null, keepHandles);
} catch (TransformerException ex) {
System.err.format("Input content not understood: %s%n", ex.getMessage());
System.exit(1);
@@ -307,7 +342,7 @@ public class StructBuilder {
}
// finally write the string into the output file.
final org.jdom.Document xmlOutput = new org.jdom.Document(root);
final org.jdom2.Document xmlOutput = new org.jdom2.Document(root);
try {
new XMLOutputter().output(xmlOutput, output);
} catch (IOException e) {
@@ -411,7 +446,7 @@ public class StructBuilder {
}
// Now write the structure out.
org.jdom.Document xmlOutput = new org.jdom.Document(rootElement);
org.jdom2.Document xmlOutput = new org.jdom2.Document(rootElement);
try {
XMLOutputter outputter = new XMLOutputter(Format.getPrettyFormat());
outputter.output(xmlOutput, output);
@@ -456,14 +491,16 @@ public class StructBuilder {
* @throws TransformerException if transformer error
*/
private static void validate(org.w3c.dom.Document document)
throws TransformerException {
throws XPathExpressionException {
StringBuilder err = new StringBuilder();
boolean trip = false;
err.append("The following errors were encountered parsing the source XML.\n");
err.append("No changes have been made to the DSpace instance.\n\n");
NodeList first = XPathAPI.selectNodeList(document, "/import_structure/community");
XPath xPath = XPathFactory.newInstance().newXPath();
NodeList first = (NodeList) xPath.compile("/import_structure/community")
.evaluate(document, XPathConstants.NODESET);
if (first.getLength() == 0) {
err.append("-There are no top level communities in the source document.");
System.out.println(err.toString());
@@ -493,14 +530,15 @@ public class StructBuilder {
* no errors.
*/
private static String validateCommunities(NodeList communities, int level)
throws TransformerException {
throws XPathExpressionException {
StringBuilder err = new StringBuilder();
boolean trip = false;
String errs = null;
XPath xPath = XPathFactory.newInstance().newXPath();
for (int i = 0; i < communities.getLength(); i++) {
Node n = communities.item(i);
NodeList name = XPathAPI.selectNodeList(n, "name");
NodeList name = (NodeList) xPath.compile("name").evaluate(n, XPathConstants.NODESET);
if (name.getLength() != 1) {
String pos = Integer.toString(i + 1);
err.append("-The level ").append(level)
@@ -510,7 +548,7 @@ public class StructBuilder {
}
// validate sub communities
NodeList subCommunities = XPathAPI.selectNodeList(n, "community");
NodeList subCommunities = (NodeList) xPath.compile("community").evaluate(n, XPathConstants.NODESET);
String comErrs = validateCommunities(subCommunities, level + 1);
if (comErrs != null) {
err.append(comErrs);
@@ -518,7 +556,7 @@ public class StructBuilder {
}
// validate collections
NodeList collections = XPathAPI.selectNodeList(n, "collection");
NodeList collections = (NodeList) xPath.compile("collection").evaluate(n, XPathConstants.NODESET);
String colErrs = validateCollections(collections, level + 1);
if (colErrs != null) {
err.append(colErrs);
@@ -542,14 +580,15 @@ public class StructBuilder {
* @return the errors to be generated by the calling method, or null if none
*/
private static String validateCollections(NodeList collections, int level)
throws TransformerException {
throws XPathExpressionException {
StringBuilder err = new StringBuilder();
boolean trip = false;
String errs = null;
XPath xPath = XPathFactory.newInstance().newXPath();
for (int i = 0; i < collections.getLength(); i++) {
Node n = collections.item(i);
NodeList name = XPathAPI.selectNodeList(n, "name");
NodeList name = (NodeList) xPath.compile("name").evaluate(n, XPathConstants.NODESET);
if (name.getLength() != 1) {
String pos = Integer.toString(i + 1);
err.append("-The level ").append(level)
@@ -609,22 +648,29 @@ public class StructBuilder {
* @param context the context of the request
* @param communities a nodelist of communities to create along with their sub-structures
* @param parent the parent community of the nodelist of communities to create
* @param keepHandles use Handles from input.
* @return an element array containing additional information regarding the
* created communities (e.g. the handles they have been assigned)
*/
private static Element[] handleCommunities(Context context, NodeList communities, Community parent)
throws TransformerException, SQLException, AuthorizeException {
private static Element[] handleCommunities(Context context, NodeList communities,
Community parent, boolean keepHandles)
throws TransformerException, SQLException, AuthorizeException,
XPathExpressionException {
Element[] elements = new Element[communities.getLength()];
XPath xPath = XPathFactory.newInstance().newXPath();
for (int i = 0; i < communities.getLength(); i++) {
Community community;
Element element = new Element("community");
Node tn = communities.item(i);
Node identifier = tn.getAttributes().getNamedItem("identifier");
// create the community or sub community
if (parent != null) {
Community community;
if (null == identifier
|| StringUtils.isBlank(identifier.getNodeValue())
|| !keepHandles) {
community = communityService.create(parent, context);
} else {
community = communityService.create(null, context);
community = communityService.create(parent, context, identifier.getNodeValue());
}
// default the short description to be an empty string
@@ -632,9 +678,8 @@ public class StructBuilder {
MD_SHORT_DESCRIPTION, null, " ");
// now update the metadata
Node tn = communities.item(i);
for (Map.Entry<String, MetadataFieldName> entry : communityMap.entrySet()) {
NodeList nl = XPathAPI.selectNodeList(tn, entry.getKey());
NodeList nl = (NodeList) xPath.compile(entry.getKey()).evaluate(tn, XPathConstants.NODESET);
if (nl.getLength() == 1) {
communityService.setMetadataSingleValue(context, community,
entry.getValue(), null, getStringValue(nl.item(0)));
@@ -658,6 +703,7 @@ public class StructBuilder {
// but it's here to keep it separate from the create process in
// case
// we want to move it or make it switchable later
Element element = new Element("community");
element.setAttribute("identifier", community.getHandle());
Element nameElement = new Element("name");
@@ -700,12 +746,16 @@ public class StructBuilder {
}
// handle sub communities
NodeList subCommunities = XPathAPI.selectNodeList(tn, "community");
Element[] subCommunityElements = handleCommunities(context, subCommunities, community);
NodeList subCommunities = (NodeList) xPath.compile("community")
.evaluate(tn, XPathConstants.NODESET);
Element[] subCommunityElements = handleCommunities(context,
subCommunities, community, keepHandles);
// handle collections
NodeList collections = XPathAPI.selectNodeList(tn, "collection");
Element[] collectionElements = handleCollections(context, collections, community);
NodeList collections = (NodeList) xPath.compile("collection")
.evaluate(tn, XPathConstants.NODESET);
Element[] collectionElements = handleCollections(context,
collections, community, keepHandles);
int j;
for (j = 0; j < subCommunityElements.length; j++) {
@@ -730,22 +780,33 @@ public class StructBuilder {
* @return an Element array containing additional information about the
* created collections (e.g. the handle)
*/
private static Element[] handleCollections(Context context, NodeList collections, Community parent)
throws TransformerException, SQLException, AuthorizeException {
private static Element[] handleCollections(Context context,
NodeList collections, Community parent, boolean keepHandles)
throws SQLException, AuthorizeException, XPathExpressionException {
Element[] elements = new Element[collections.getLength()];
XPath xPath = XPathFactory.newInstance().newXPath();
for (int i = 0; i < collections.getLength(); i++) {
Element element = new Element("collection");
Collection collection = collectionService.create(context, parent);
Node tn = collections.item(i);
Node identifier = tn.getAttributes().getNamedItem("identifier");
// Create the Collection.
Collection collection;
if (null == identifier
|| StringUtils.isBlank(identifier.getNodeValue())
|| !keepHandles) {
collection = collectionService.create(context, parent);
} else {
collection = collectionService.create(context, parent, identifier.getNodeValue());
}
// default the short description to the empty string
collectionService.setMetadataSingleValue(context, collection,
MD_SHORT_DESCRIPTION, Item.ANY, " ");
// import the rest of the metadata
Node tn = collections.item(i);
for (Map.Entry<String, MetadataFieldName> entry : collectionMap.entrySet()) {
NodeList nl = XPathAPI.selectNodeList(tn, entry.getKey());
NodeList nl = (NodeList) xPath.compile(entry.getKey()).evaluate(tn, XPathConstants.NODESET);
if (nl.getLength() == 1) {
collectionService.setMetadataSingleValue(context, collection,
entry.getValue(), null, getStringValue(nl.item(0)));
@@ -754,6 +815,7 @@ public class StructBuilder {
collectionService.update(context, collection);
Element element = new Element("collection");
element.setAttribute("identifier", collection.getHandle());
Element nameElement = new Element("name");

View File

@@ -41,10 +41,8 @@ public class MetadataDeletionScriptConfiguration<T extends MetadataDeletion> ext
Options options = new Options();
options.addOption("m", "metadata", true, "metadata field name");
options.getOption("m").setType(String.class);
options.addOption("l", "list", false, "lists the metadata fields that can be deleted");
options.getOption("l").setType(boolean.class);
super.options = options;
}

View File

@@ -54,12 +54,9 @@ public class MetadataExportScriptConfiguration<T extends MetadataExport> extends
Options options = new Options();
options.addOption("i", "id", true, "ID or handle of thing to export (item, collection, or community)");
options.getOption("i").setType(String.class);
options.addOption("a", "all", false,
"include all metadata fields that are not normally changed (e.g. provenance)");
options.getOption("a").setType(boolean.class);
options.addOption("h", "help", false, "help");
options.getOption("h").setType(boolean.class);
super.options = options;

View File

@@ -0,0 +1,170 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.bulkedit;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import java.util.UUID;
import org.apache.commons.cli.ParseException;
import org.dspace.content.Item;
import org.dspace.content.MetadataDSpaceCsvExportServiceImpl;
import org.dspace.content.factory.ContentServiceFactory;
import org.dspace.content.service.CollectionService;
import org.dspace.content.service.CommunityService;
import org.dspace.content.service.MetadataDSpaceCsvExportService;
import org.dspace.core.Context;
import org.dspace.discovery.DiscoverQuery;
import org.dspace.discovery.IndexableObject;
import org.dspace.discovery.SearchService;
import org.dspace.discovery.SearchUtils;
import org.dspace.discovery.configuration.DiscoveryConfiguration;
import org.dspace.discovery.configuration.DiscoveryConfigurationService;
import org.dspace.discovery.indexobject.IndexableCollection;
import org.dspace.discovery.indexobject.IndexableCommunity;
import org.dspace.discovery.utils.DiscoverQueryBuilder;
import org.dspace.discovery.utils.parameter.QueryBuilderSearchFilter;
import org.dspace.eperson.factory.EPersonServiceFactory;
import org.dspace.eperson.service.EPersonService;
import org.dspace.scripts.DSpaceRunnable;
import org.dspace.sort.SortOption;
import org.dspace.utils.DSpace;
/**
* Metadata exporter to allow the batch export of metadata from a discovery search into a file
*
*/
public class MetadataExportSearch extends DSpaceRunnable<MetadataExportSearchScriptConfiguration> {
private static final String EXPORT_CSV = "exportCSV";
private boolean help = false;
private String identifier;
private String discoveryConfigName;
private String[] filterQueryStrings;
private boolean hasScope = false;
private String query;
private SearchService searchService;
private MetadataDSpaceCsvExportService metadataDSpaceCsvExportService;
private EPersonService ePersonService;
private DiscoveryConfigurationService discoveryConfigurationService;
private CommunityService communityService;
private CollectionService collectionService;
private DiscoverQueryBuilder queryBuilder;
@Override
public MetadataExportSearchScriptConfiguration getScriptConfiguration() {
return new DSpace().getServiceManager()
.getServiceByName("metadata-export-search", MetadataExportSearchScriptConfiguration.class);
}
@Override
public void setup() throws ParseException {
searchService = SearchUtils.getSearchService();
metadataDSpaceCsvExportService = new DSpace().getServiceManager()
.getServiceByName(
MetadataDSpaceCsvExportServiceImpl.class.getCanonicalName(),
MetadataDSpaceCsvExportService.class
);
ePersonService = EPersonServiceFactory.getInstance().getEPersonService();
discoveryConfigurationService = SearchUtils.getConfigurationService();
communityService = ContentServiceFactory.getInstance().getCommunityService();
collectionService = ContentServiceFactory.getInstance().getCollectionService();
queryBuilder = SearchUtils.getQueryBuilder();
if (commandLine.hasOption('h')) {
help = true;
return;
}
if (commandLine.hasOption('q')) {
query = commandLine.getOptionValue('q');
}
if (commandLine.hasOption('s')) {
hasScope = true;
identifier = commandLine.getOptionValue('s');
}
if (commandLine.hasOption('c')) {
discoveryConfigName = commandLine.getOptionValue('c');
}
if (commandLine.hasOption('f')) {
filterQueryStrings = commandLine.getOptionValues('f');
}
}
@Override
public void internalRun() throws Exception {
if (help) {
loghelpinfo();
printHelp();
return;
}
handler.logDebug("starting search export");
IndexableObject dso = null;
Context context = new Context();
context.setCurrentUser(ePersonService.find(context, this.getEpersonIdentifier()));
if (hasScope) {
dso = resolveScope(context, identifier);
}
DiscoveryConfiguration discoveryConfiguration =
discoveryConfigurationService.getDiscoveryConfiguration(discoveryConfigName);
List<QueryBuilderSearchFilter> queryBuilderSearchFilters = new ArrayList<>();
handler.logDebug("processing filter queries");
if (filterQueryStrings != null) {
for (String filterQueryString: filterQueryStrings) {
String field = filterQueryString.split(",", 2)[0];
String operator = filterQueryString.split("(,|=)", 3)[1];
String value = filterQueryString.split("=", 2)[1];
QueryBuilderSearchFilter queryBuilderSearchFilter =
new QueryBuilderSearchFilter(field, operator, value);
queryBuilderSearchFilters.add(queryBuilderSearchFilter);
}
}
handler.logDebug("building query");
DiscoverQuery discoverQuery =
queryBuilder.buildQuery(context, dso, discoveryConfiguration, query, queryBuilderSearchFilters,
"Item", 10, Long.getLong("0"), null, SortOption.DESCENDING);
handler.logDebug("creating iterator");
Iterator<Item> itemIterator = searchService.iteratorSearch(context, dso, discoverQuery);
handler.logDebug("creating dspacecsv");
DSpaceCSV dSpaceCSV = metadataDSpaceCsvExportService.export(context, itemIterator, true);
handler.logDebug("writing to file " + getFileNameOrExportFile());
handler.writeFilestream(context, getFileNameOrExportFile(), dSpaceCSV.getInputStream(), EXPORT_CSV);
context.restoreAuthSystemState();
context.complete();
}
protected void loghelpinfo() {
handler.logInfo("metadata-export");
}
protected String getFileNameOrExportFile() {
return "metadataExportSearch.csv";
}
public IndexableObject resolveScope(Context context, String id) throws SQLException {
UUID uuid = UUID.fromString(id);
IndexableObject scopeObj = new IndexableCommunity(communityService.find(context, uuid));
if (scopeObj.getIndexedObject() == null) {
scopeObj = new IndexableCollection(collectionService.find(context, uuid));
}
return scopeObj;
}
}

View File

@@ -0,0 +1,20 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.bulkedit;
/**
* The cli version of the {@link MetadataExportSearch} script
*/
public class MetadataExportSearchCli extends MetadataExportSearch {
@Override
protected String getFileNameOrExportFile() {
return commandLine.getOptionValue('n');
}
}

View File

@@ -0,0 +1,26 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.bulkedit;
import org.apache.commons.cli.Options;
/**
* This is the CLI version of the {@link MetadataExportSearchScriptConfiguration} class that handles the
* configuration for the {@link MetadataExportSearchCli} script
*/
public class MetadataExportSearchCliScriptConfiguration
extends MetadataExportSearchScriptConfiguration<MetadataExportSearchCli> {
@Override
public Options getOptions() {
Options options = super.getOptions();
options.addOption("n", "filename", true, "the filename to export to");
return super.getOptions();
}
}

View File

@@ -0,0 +1,62 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.bulkedit;
import org.apache.commons.cli.Options;
import org.dspace.core.Context;
import org.dspace.scripts.configuration.ScriptConfiguration;
/**
* The {@link ScriptConfiguration} for the {@link MetadataExportSearch} script
*/
public class MetadataExportSearchScriptConfiguration<T extends MetadataExportSearch> extends ScriptConfiguration<T> {
private Class<T> dspaceRunnableclass;
@Override
public Class<T> getDspaceRunnableClass() {
return dspaceRunnableclass;
}
@Override
public void setDspaceRunnableClass(Class<T> dspaceRunnableClass) {
this.dspaceRunnableclass = dspaceRunnableClass;
}
@Override
public boolean isAllowedToExecute(Context context) {
return true;
}
@Override
public Options getOptions() {
if (options == null) {
Options options = new Options();
options.addOption("q", "query", true,
"The discovery search string to will be used to match records. Not URL encoded");
options.getOption("q").setType(String.class);
options.addOption("s", "scope", true,
"UUID of a specific DSpace container (site, community or collection) to which the search has to be " +
"limited");
options.getOption("s").setType(String.class);
options.addOption("c", "configuration", true,
"The name of a Discovery configuration that should be used by this search");
options.getOption("c").setType(String.class);
options.addOption("f", "filter", true,
"Advanced search filter that has to be used to filter the result set, with syntax `<:filter-name>," +
"<:filter-operator>=<:filter-value>`. Not URL encoded. For example `author," +
"authority=5df05073-3be7-410d-8166-e254369e4166` or `title,contains=sample text`");
options.getOption("f").setType(String.class);
options.addOption("h", "help", false, "help");
super.options = options;
}
return options;
}
}

View File

@@ -598,18 +598,19 @@ public class MetadataImport extends DSpaceRunnable<MetadataImportScriptConfigura
changes.add(whatHasChanged);
}
if (change) {
//only clear cache if changes have been made.
c.uncacheEntity(wsItem);
c.uncacheEntity(wfItem);
c.uncacheEntity(item);
if (change && (rowCount % configurationService.getIntProperty("bulkedit.change.commit.count", 100) == 0)) {
c.commit();
handler.logInfo(LogHelper.getHeader(c, "metadata_import_commit", "lineNumber=" + rowCount));
}
populateRefAndRowMap(line, item == null ? null : item.getID());
// keep track of current rows processed
rowCount++;
}
if (change) {
c.commit();
}
c.setMode(originalMode);
c.setMode(Context.Mode.READ_ONLY);
// Return the changes
@@ -925,11 +926,10 @@ public class MetadataImport extends DSpaceRunnable<MetadataImportScriptConfigura
rightItem = item;
}
// Create the relationship
int leftPlace = relationshipService.findNextLeftPlaceByLeftItem(c, leftItem);
int rightPlace = relationshipService.findNextRightPlaceByRightItem(c, rightItem);
Relationship persistedRelationship = relationshipService.create(c, leftItem, rightItem,
foundRelationshipType, leftPlace, rightPlace);
// Create the relationship, appending to the end
Relationship persistedRelationship = relationshipService.create(
c, leftItem, rightItem, foundRelationshipType, -1, -1
);
relationshipService.update(c, persistedRelationship);
}

View File

@@ -19,7 +19,6 @@ public class MetadataImportCliScriptConfiguration extends MetadataImportScriptCo
public Options getOptions() {
Options options = super.getOptions();
options.addOption("e", "email", true, "email address or user id of user (required if adding new items)");
options.getOption("e").setType(String.class);
options.getOption("e").setRequired(true);
super.options = options;
return options;

View File

@@ -59,20 +59,14 @@ public class MetadataImportScriptConfiguration<T extends MetadataImport> extends
options.getOption("f").setRequired(true);
options.addOption("s", "silent", false,
"silent operation - doesn't request confirmation of changes USE WITH CAUTION");
options.getOption("s").setType(boolean.class);
options.addOption("w", "workflow", false, "workflow - when adding new items, use collection workflow");
options.getOption("w").setType(boolean.class);
options.addOption("n", "notify", false,
"notify - when adding new items using a workflow, send notification emails");
options.getOption("n").setType(boolean.class);
options.addOption("v", "validate-only", false,
"validate - just validate the csv, don't run the import");
options.getOption("v").setType(boolean.class);
options.addOption("t", "template", false,
"template - when adding new items, use the collection template (if it exists)");
options.getOption("t").setType(boolean.class);
options.addOption("h", "help", false, "help");
options.getOption("h").setType(boolean.class);
super.options = options;
}

View File

@@ -0,0 +1,32 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.exception;
/**
* This class provides an exception to be used when trying to save a resource
* that already exists.
*
* @author Luca Giamminonni (luca.giamminonni at 4science.it)
*
*/
public class ResourceAlreadyExistsException extends RuntimeException {
private static final long serialVersionUID = 1L;
/**
* Create a ResourceAlreadyExistsException with a message and the already
* existing resource.
*
* @param message the error message
*/
public ResourceAlreadyExistsException(String message) {
super(message);
}
}

View File

@@ -43,22 +43,14 @@ public class HarvestScriptConfiguration<T extends Harvest> extends ScriptConfigu
public Options getOptions() {
Options options = new Options();
options.addOption("p", "purge", false, "delete all items in the collection");
options.getOption("p").setType(boolean.class);
options.addOption("r", "run", false, "run the standard harvest procedure");
options.getOption("r").setType(boolean.class);
options.addOption("g", "ping", false, "test the OAI server and set");
options.getOption("g").setType(boolean.class);
options.addOption("s", "setup", false, "Set the collection up for harvesting");
options.getOption("s").setType(boolean.class);
options.addOption("S", "start", false, "start the harvest loop");
options.getOption("S").setType(boolean.class);
options.addOption("R", "reset", false, "reset harvest status on all collections");
options.getOption("R").setType(boolean.class);
options.addOption("P", "purgeCollections", false, "purge all harvestable collections");
options.getOption("P").setType(boolean.class);
options.addOption("o", "reimport", false, "reimport all items in the collection, " +
"this is equivalent to -p -r, purging all items in a collection and reimporting them");
options.getOption("o").setType(boolean.class);
options.addOption("c", "collection", true,
"harvesting collection (handle or id)");
options.addOption("t", "type", true,
@@ -72,7 +64,6 @@ public class HarvestScriptConfiguration<T extends Harvest> extends ScriptConfigu
"crosswalk in dspace.cfg");
options.addOption("h", "help", false, "help");
options.getOption("h").setType(boolean.class);
return options;
}

View File

@@ -0,0 +1,264 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.itemexport;
import java.io.File;
import java.io.FileInputStream;
import java.io.InputStream;
import java.nio.file.Path;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import java.util.UUID;
import org.apache.commons.cli.ParseException;
import org.apache.commons.io.file.PathUtils;
import org.dspace.app.itemexport.factory.ItemExportServiceFactory;
import org.dspace.app.itemexport.service.ItemExportService;
import org.dspace.content.Collection;
import org.dspace.content.Item;
import org.dspace.content.factory.ContentServiceFactory;
import org.dspace.content.service.CollectionService;
import org.dspace.content.service.ItemService;
import org.dspace.core.Constants;
import org.dspace.core.Context;
import org.dspace.eperson.EPerson;
import org.dspace.eperson.factory.EPersonServiceFactory;
import org.dspace.eperson.service.EPersonService;
import org.dspace.handle.factory.HandleServiceFactory;
import org.dspace.handle.service.HandleService;
import org.dspace.scripts.DSpaceRunnable;
import org.dspace.utils.DSpace;
/**
* Item exporter to create simple AIPs for DSpace content. Currently exports
* individual items, or entire collections. For instructions on use, see
* printUsage() method.
* <P>
* ItemExport creates the simple AIP package that the importer also uses. It
* consists of:
* <P>
* /exportdir/42/ (one directory per item) / dublin_core.xml - qualified dublin
* core in RDF schema / contents - text file, listing one file per line / file1
* - files contained in the item / file2 / ...
* <P>
* issues -doesn't handle special characters in metadata (needs to turn {@code &'s} into
* {@code &amp;}, etc.)
* <P>
* Modified by David Little, UCSD Libraries 12/21/04 to allow the registration
* of files (bitstreams) into DSpace.
*
* @author David Little
* @author Jay Paz
*/
public class ItemExport extends DSpaceRunnable<ItemExportScriptConfiguration> {
public static final String TEMP_DIR = "exportSAF";
public static final String ZIP_NAME = "exportSAFZip";
public static final String ZIP_FILENAME = "saf-export";
public static final String ZIP_EXT = "zip";
protected String typeString = null;
protected String destDirName = null;
protected String idString = null;
protected int seqStart = -1;
protected int type = -1;
protected Item item = null;
protected Collection collection = null;
protected boolean migrate = false;
protected boolean zip = false;
protected String zipFileName = "";
protected boolean excludeBitstreams = false;
protected boolean help = false;
protected static HandleService handleService = HandleServiceFactory.getInstance().getHandleService();
protected static ItemService itemService = ContentServiceFactory.getInstance().getItemService();
protected static CollectionService collectionService = ContentServiceFactory.getInstance().getCollectionService();
protected static final EPersonService epersonService =
EPersonServiceFactory.getInstance().getEPersonService();
@Override
public ItemExportScriptConfiguration getScriptConfiguration() {
return new DSpace().getServiceManager()
.getServiceByName("export", ItemExportScriptConfiguration.class);
}
@Override
public void setup() throws ParseException {
help = commandLine.hasOption('h');
if (commandLine.hasOption('t')) { // type
typeString = commandLine.getOptionValue('t');
if ("ITEM".equals(typeString)) {
type = Constants.ITEM;
} else if ("COLLECTION".equals(typeString)) {
type = Constants.COLLECTION;
}
}
if (commandLine.hasOption('i')) { // id
idString = commandLine.getOptionValue('i');
}
setNumber();
if (commandLine.hasOption('m')) { // number
migrate = true;
}
if (commandLine.hasOption('x')) {
excludeBitstreams = true;
}
}
@Override
public void internalRun() throws Exception {
if (help) {
printHelp();
return;
}
validate();
Context context = new Context();
context.turnOffAuthorisationSystem();
if (type == Constants.ITEM) {
// first, is myIDString a handle?
if (idString.indexOf('/') != -1) {
item = (Item) handleService.resolveToObject(context, idString);
if ((item == null) || (item.getType() != Constants.ITEM)) {
item = null;
}
} else {
item = itemService.find(context, UUID.fromString(idString));
}
if (item == null) {
handler.logError("The item cannot be found: " + idString + " (run with -h flag for details)");
throw new UnsupportedOperationException("The item cannot be found: " + idString);
}
} else {
if (idString.indexOf('/') != -1) {
// has a / must be a handle
collection = (Collection) handleService.resolveToObject(context,
idString);
// ensure it's a collection
if ((collection == null)
|| (collection.getType() != Constants.COLLECTION)) {
collection = null;
}
} else {
collection = collectionService.find(context, UUID.fromString(idString));
}
if (collection == null) {
handler.logError("The collection cannot be found: " + idString + " (run with -h flag for details)");
throw new UnsupportedOperationException("The collection cannot be found: " + idString);
}
}
ItemExportService itemExportService = ItemExportServiceFactory.getInstance()
.getItemExportService();
try {
itemExportService.setHandler(handler);
process(context, itemExportService);
context.complete();
} catch (Exception e) {
context.abort();
throw new Exception(e);
}
}
/**
* Validate the options
*/
protected void validate() {
if (type == -1) {
handler.logError("The type must be either COLLECTION or ITEM (run with -h flag for details)");
throw new UnsupportedOperationException("The type must be either COLLECTION or ITEM");
}
if (idString == null) {
handler.logError("The ID must be set to either a database ID or a handle (run with -h flag for details)");
throw new UnsupportedOperationException("The ID must be set to either a database ID or a handle");
}
}
/**
* Process the export
* @param context
* @throws Exception
*/
protected void process(Context context, ItemExportService itemExportService) throws Exception {
setEPerson(context);
setDestDirName(context, itemExportService);
setZip(context);
Iterator<Item> items;
if (item != null) {
List<Item> myItems = new ArrayList<>();
myItems.add(item);
items = myItems.iterator();
} else {
handler.logInfo("Exporting from collection: " + idString);
items = itemService.findByCollection(context, collection);
}
itemExportService.exportAsZip(context, items, destDirName, zipFileName,
seqStart, migrate, excludeBitstreams);
File zip = new File(destDirName + System.getProperty("file.separator") + zipFileName);
try (InputStream is = new FileInputStream(zip)) {
// write input stream on handler
handler.writeFilestream(context, ZIP_FILENAME + "." + ZIP_EXT, is, ZIP_NAME);
} finally {
PathUtils.deleteDirectory(Path.of(destDirName));
}
}
/**
* Set the destination directory option
*/
protected void setDestDirName(Context context, ItemExportService itemExportService) throws Exception {
destDirName = itemExportService.getExportWorkDirectory() + File.separator + TEMP_DIR;
}
/**
* Set the zip option
*/
protected void setZip(Context context) {
zip = true;
zipFileName = ZIP_FILENAME + "-" + context.getCurrentUser().getID() + "." + ZIP_EXT;
}
/**
* Set the number option
*/
protected void setNumber() {
seqStart = 1;
if (commandLine.hasOption('n')) { // number
seqStart = Integer.parseInt(commandLine.getOptionValue('n'));
}
}
private void setEPerson(Context context) throws SQLException {
EPerson myEPerson = epersonService.find(context, this.getEpersonIdentifier());
// check eperson
if (myEPerson == null) {
handler.logError("EPerson cannot be found: " + this.getEpersonIdentifier());
throw new UnsupportedOperationException("EPerson cannot be found: " + this.getEpersonIdentifier());
}
context.setCurrentUser(myEPerson);
}
}

View File

@@ -0,0 +1,96 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.itemexport;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Iterator;
import java.util.List;
import org.dspace.app.itemexport.service.ItemExportService;
import org.dspace.content.Item;
import org.dspace.core.Context;
/**
* CLI variant for the {@link ItemExport} class.
* This was done to specify the specific behaviors for the CLI.
*
* @author Francesco Pio Scognamiglio (francescopio.scognamiglio at 4science.com)
*/
public class ItemExportCLI extends ItemExport {
@Override
protected void validate() {
super.validate();
setDestDirName();
if (destDirName == null) {
handler.logError("The destination directory must be set (run with -h flag for details)");
throw new UnsupportedOperationException("The destination directory must be set");
}
if (seqStart == -1) {
handler.logError("The sequence start number must be set (run with -h flag for details)");
throw new UnsupportedOperationException("The sequence start number must be set");
}
}
@Override
protected void process(Context context, ItemExportService itemExportService) throws Exception {
setZip(context);
if (zip) {
Iterator<Item> items;
if (item != null) {
List<Item> myItems = new ArrayList<>();
myItems.add(item);
items = myItems.iterator();
} else {
handler.logInfo("Exporting from collection: " + idString);
items = itemService.findByCollection(context, collection);
}
itemExportService.exportAsZip(context, items, destDirName, zipFileName,
seqStart, migrate, excludeBitstreams);
} else {
if (item != null) {
// it's only a single item
itemExportService
.exportItem(context, Collections.singletonList(item).iterator(), destDirName,
seqStart, migrate, excludeBitstreams);
} else {
handler.logInfo("Exporting from collection: " + idString);
// it's a collection, so do a bunch of items
Iterator<Item> i = itemService.findByCollection(context, collection);
itemExportService.exportItem(context, i, destDirName, seqStart, migrate, excludeBitstreams);
}
}
}
protected void setDestDirName() {
if (commandLine.hasOption('d')) { // dest
destDirName = commandLine.getOptionValue('d');
}
}
@Override
protected void setZip(Context context) {
if (commandLine.hasOption('z')) {
zip = true;
zipFileName = commandLine.getOptionValue('z');
}
}
@Override
protected void setNumber() {
if (commandLine.hasOption('n')) { // number
seqStart = Integer.parseInt(commandLine.getOptionValue('n'));
}
}
}

View File

@@ -0,0 +1,56 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.itemexport;
import org.apache.commons.cli.Option;
import org.apache.commons.cli.Options;
import org.dspace.scripts.configuration.ScriptConfiguration;
/**
* The {@link ScriptConfiguration} for the {@link ItemExportCLI} script
*
* @author Francesco Pio Scognamiglio (francescopio.scognamiglio at 4science.com)
*/
public class ItemExportCLIScriptConfiguration extends ItemExportScriptConfiguration<ItemExportCLI> {
@Override
public Options getOptions() {
Options options = new Options();
options.addOption(Option.builder("t").longOpt("type")
.desc("type: COLLECTION or ITEM")
.hasArg().required().build());
options.addOption(Option.builder("i").longOpt("id")
.desc("ID or handle of thing to export")
.hasArg().required().build());
options.addOption(Option.builder("d").longOpt("dest")
.desc("destination where you want items to go")
.hasArg().required().build());
options.addOption(Option.builder("n").longOpt("number")
.desc("sequence number to begin exporting items with")
.hasArg().required().build());
options.addOption(Option.builder("z").longOpt("zip")
.desc("export as zip file (specify filename e.g. export.zip)")
.hasArg().required(false).build());
options.addOption(Option.builder("m").longOpt("migrate")
.desc("export for migration (remove handle and metadata that will be re-created in new system)")
.hasArg(false).required(false).build());
// as pointed out by Peter Dietz this provides similar functionality to export metadata
// but it is needed since it directly exports to Simple Archive Format (SAF)
options.addOption(Option.builder("x").longOpt("exclude-bitstreams")
.desc("do not export bitstreams")
.hasArg(false).required(false).build());
options.addOption(Option.builder("h").longOpt("help")
.desc("help")
.hasArg(false).required(false).build());
return options;
}
}

View File

@@ -1,246 +0,0 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.itemexport;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Iterator;
import java.util.List;
import java.util.UUID;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.CommandLineParser;
import org.apache.commons.cli.DefaultParser;
import org.apache.commons.cli.HelpFormatter;
import org.apache.commons.cli.Options;
import org.dspace.app.itemexport.factory.ItemExportServiceFactory;
import org.dspace.app.itemexport.service.ItemExportService;
import org.dspace.content.Collection;
import org.dspace.content.Item;
import org.dspace.content.factory.ContentServiceFactory;
import org.dspace.content.service.CollectionService;
import org.dspace.content.service.ItemService;
import org.dspace.core.Constants;
import org.dspace.core.Context;
import org.dspace.handle.factory.HandleServiceFactory;
import org.dspace.handle.service.HandleService;
/**
* Item exporter to create simple AIPs for DSpace content. Currently exports
* individual items, or entire collections. For instructions on use, see
* printUsage() method.
* <P>
* ItemExport creates the simple AIP package that the importer also uses. It
* consists of:
* <P>
* /exportdir/42/ (one directory per item) / dublin_core.xml - qualified dublin
* core in RDF schema / contents - text file, listing one file per line / file1
* - files contained in the item / file2 / ...
* <P>
* issues -doesn't handle special characters in metadata (needs to turn {@code &'s} into
* {@code &amp;}, etc.)
* <P>
* Modified by David Little, UCSD Libraries 12/21/04 to allow the registration
* of files (bitstreams) into DSpace.
*
* @author David Little
* @author Jay Paz
*/
public class ItemExportCLITool {
protected static ItemExportService itemExportService = ItemExportServiceFactory.getInstance()
.getItemExportService();
protected static HandleService handleService = HandleServiceFactory.getInstance().getHandleService();
protected static ItemService itemService = ContentServiceFactory.getInstance().getItemService();
protected static CollectionService collectionService = ContentServiceFactory.getInstance().getCollectionService();
/**
* Default constructor
*/
private ItemExportCLITool() { }
/*
*
*/
public static void main(String[] argv) throws Exception {
// create an options object and populate it
CommandLineParser parser = new DefaultParser();
Options options = new Options();
options.addOption("t", "type", true, "type: COLLECTION or ITEM");
options.addOption("i", "id", true, "ID or handle of thing to export");
options.addOption("d", "dest", true,
"destination where you want items to go");
options.addOption("m", "migrate", false,
"export for migration (remove handle and metadata that will be re-created in new system)");
options.addOption("n", "number", true,
"sequence number to begin exporting items with");
options.addOption("z", "zip", true, "export as zip file (specify filename e.g. export.zip)");
options.addOption("h", "help", false, "help");
// as pointed out by Peter Dietz this provides similar functionality to export metadata
// but it is needed since it directly exports to Simple Archive Format (SAF)
options.addOption("x", "exclude-bitstreams", false, "do not export bitstreams");
CommandLine line = parser.parse(options, argv);
String typeString = null;
String destDirName = null;
String myIDString = null;
int seqStart = -1;
int myType = -1;
Item myItem = null;
Collection mycollection = null;
if (line.hasOption('h')) {
HelpFormatter myhelp = new HelpFormatter();
myhelp.printHelp("ItemExport\n", options);
System.out
.println("\nfull collection: ItemExport -t COLLECTION -i ID -d dest -n number");
System.out
.println("singleitem: ItemExport -t ITEM -i ID -d dest -n number");
System.exit(0);
}
if (line.hasOption('t')) { // type
typeString = line.getOptionValue('t');
if ("ITEM".equals(typeString)) {
myType = Constants.ITEM;
} else if ("COLLECTION".equals(typeString)) {
myType = Constants.COLLECTION;
}
}
if (line.hasOption('i')) { // id
myIDString = line.getOptionValue('i');
}
if (line.hasOption('d')) { // dest
destDirName = line.getOptionValue('d');
}
if (line.hasOption('n')) { // number
seqStart = Integer.parseInt(line.getOptionValue('n'));
}
boolean migrate = false;
if (line.hasOption('m')) { // number
migrate = true;
}
boolean zip = false;
String zipFileName = "";
if (line.hasOption('z')) {
zip = true;
zipFileName = line.getOptionValue('z');
}
boolean excludeBitstreams = false;
if (line.hasOption('x')) {
excludeBitstreams = true;
}
// now validate the args
if (myType == -1) {
System.out
.println("type must be either COLLECTION or ITEM (-h for help)");
System.exit(1);
}
if (destDirName == null) {
System.out
.println("destination directory must be set (-h for help)");
System.exit(1);
}
if (seqStart == -1) {
System.out
.println("sequence start number must be set (-h for help)");
System.exit(1);
}
if (myIDString == null) {
System.out
.println("ID must be set to either a database ID or a handle (-h for help)");
System.exit(1);
}
Context c = new Context(Context.Mode.READ_ONLY);
c.turnOffAuthorisationSystem();
if (myType == Constants.ITEM) {
// first, is myIDString a handle?
if (myIDString.indexOf('/') != -1) {
myItem = (Item) handleService.resolveToObject(c, myIDString);
if ((myItem == null) || (myItem.getType() != Constants.ITEM)) {
myItem = null;
}
} else {
myItem = itemService.find(c, UUID.fromString(myIDString));
}
if (myItem == null) {
System.out
.println("Error, item cannot be found: " + myIDString);
}
} else {
if (myIDString.indexOf('/') != -1) {
// has a / must be a handle
mycollection = (Collection) handleService.resolveToObject(c,
myIDString);
// ensure it's a collection
if ((mycollection == null)
|| (mycollection.getType() != Constants.COLLECTION)) {
mycollection = null;
}
} else if (myIDString != null) {
mycollection = collectionService.find(c, UUID.fromString(myIDString));
}
if (mycollection == null) {
System.out.println("Error, collection cannot be found: "
+ myIDString);
System.exit(1);
}
}
if (zip) {
Iterator<Item> items;
if (myItem != null) {
List<Item> myItems = new ArrayList<>();
myItems.add(myItem);
items = myItems.iterator();
} else {
System.out.println("Exporting from collection: " + myIDString);
items = itemService.findByCollection(c, mycollection);
}
itemExportService.exportAsZip(c, items, destDirName, zipFileName, seqStart, migrate, excludeBitstreams);
} else {
if (myItem != null) {
// it's only a single item
itemExportService
.exportItem(c, Collections.singletonList(myItem).iterator(), destDirName, seqStart, migrate,
excludeBitstreams);
} else {
System.out.println("Exporting from collection: " + myIDString);
// it's a collection, so do a bunch of items
Iterator<Item> i = itemService.findByCollection(c, mycollection);
itemExportService.exportItem(c, i, destDirName, seqStart, migrate, excludeBitstreams);
}
}
c.complete();
}
}

View File

@@ -0,0 +1,79 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.itemexport;
import java.sql.SQLException;
import org.apache.commons.cli.Option;
import org.apache.commons.cli.Options;
import org.dspace.authorize.service.AuthorizeService;
import org.dspace.core.Context;
import org.dspace.scripts.configuration.ScriptConfiguration;
import org.springframework.beans.factory.annotation.Autowired;
/**
* The {@link ScriptConfiguration} for the {@link ItemExport} script
*
* @author Francesco Pio Scognamiglio (francescopio.scognamiglio at 4science.com)
*/
public class ItemExportScriptConfiguration<T extends ItemExport> extends ScriptConfiguration<T> {
@Autowired
private AuthorizeService authorizeService;
private Class<T> dspaceRunnableClass;
@Override
public Class<T> getDspaceRunnableClass() {
return dspaceRunnableClass;
}
@Override
public void setDspaceRunnableClass(Class<T> dspaceRunnableClass) {
this.dspaceRunnableClass = dspaceRunnableClass;
}
@Override
public boolean isAllowedToExecute(final Context context) {
try {
return authorizeService.isAdmin(context);
} catch (SQLException e) {
throw new RuntimeException("SQLException occurred when checking if the current user is an admin", e);
}
}
@Override
public Options getOptions() {
Options options = new Options();
options.addOption(Option.builder("t").longOpt("type")
.desc("type: COLLECTION or ITEM")
.hasArg().required().build());
options.addOption(Option.builder("i").longOpt("id")
.desc("ID or handle of thing to export")
.hasArg().required().build());
options.addOption(Option.builder("n").longOpt("number")
.desc("sequence number to begin exporting items with")
.hasArg().required(false).build());
options.addOption(Option.builder("m").longOpt("migrate")
.desc("export for migration (remove handle and metadata that will be re-created in new system)")
.hasArg(false).required(false).build());
// as pointed out by Peter Dietz this provides similar functionality to export metadata
// but it is needed since it directly exports to Simple Archive Format (SAF)
options.addOption(Option.builder("x").longOpt("exclude-bitstreams")
.desc("do not export bitstreams")
.hasArg(false).required(false).build());
options.addOption(Option.builder("h").longOpt("help")
.desc("help")
.hasArg(false).required(false).build());
return options;
}
}

View File

@@ -57,6 +57,7 @@ import org.dspace.core.Utils;
import org.dspace.eperson.EPerson;
import org.dspace.eperson.service.EPersonService;
import org.dspace.handle.service.HandleService;
import org.dspace.scripts.handler.DSpaceRunnableHandler;
import org.dspace.services.ConfigurationService;
import org.springframework.beans.factory.annotation.Autowired;
@@ -64,17 +65,21 @@ import org.springframework.beans.factory.annotation.Autowired;
* Item exporter to create simple AIPs for DSpace content. Currently exports
* individual items, or entire collections. For instructions on use, see
* printUsage() method.
* <P>
* <p>
* ItemExport creates the simple AIP package that the importer also uses. It
* consists of:
* <P>
* /exportdir/42/ (one directory per item) / dublin_core.xml - qualified dublin
* core in RDF schema / contents - text file, listing one file per line / file1
* - files contained in the item / file2 / ...
* <P>
* <pre>{@code
* /exportdir/42/ (one directory per item)
* / dublin_core.xml - qualified dublin core in RDF schema
* / contents - text file, listing one file per line
* / file1 - files contained in the item
* / file2
* / ...
* }</pre>
* <p>
* issues -doesn't handle special characters in metadata (needs to turn {@code &'s} into
* {@code &amp;}, etc.)
* <P>
* <p>
* Modified by David Little, UCSD Libraries 12/21/04 to allow the registration
* of files (bitstreams) into DSpace.
*
@@ -97,11 +102,12 @@ public class ItemExportServiceImpl implements ItemExportService {
@Autowired(required = true)
protected ConfigurationService configurationService;
/**
* log4j logger
*/
private final Logger log = org.apache.logging.log4j.LogManager.getLogger(ItemExportServiceImpl.class);
private final Logger log = org.apache.logging.log4j.LogManager.getLogger();
private DSpaceRunnableHandler handler;
protected ItemExportServiceImpl() {
@@ -126,7 +132,7 @@ public class ItemExportServiceImpl implements ItemExportService {
}
}
System.out.println("Beginning export");
logInfo("Beginning export");
while (i.hasNext()) {
if (SUBDIR_LIMIT > 0 && ++counter == SUBDIR_LIMIT) {
@@ -139,7 +145,7 @@ public class ItemExportServiceImpl implements ItemExportService {
}
}
System.out.println("Exporting item to " + mySequenceNumber);
logInfo("Exporting item to " + mySequenceNumber);
Item item = i.next();
exportItem(c, item, fullPath, mySequenceNumber, migrate, excludeBitstreams);
c.uncacheEntity(item);
@@ -155,7 +161,7 @@ public class ItemExportServiceImpl implements ItemExportService {
// now create a subdirectory
File itemDir = new File(destDir + "/" + seqStart);
System.out.println("Exporting Item " + myItem.getID() +
logInfo("Exporting Item " + myItem.getID() +
(myItem.getHandle() != null ? ", handle " + myItem.getHandle() : "") +
" to " + itemDir);
@@ -168,6 +174,7 @@ public class ItemExportServiceImpl implements ItemExportService {
// make it this far, now start exporting
writeMetadata(c, myItem, itemDir, migrate);
writeBitstreams(c, myItem, itemDir, excludeBitstreams);
writeCollections(myItem, itemDir);
if (!migrate) {
writeHandle(c, myItem, itemDir);
}
@@ -225,7 +232,7 @@ public class ItemExportServiceImpl implements ItemExportService {
File outFile = new File(destDir, filename);
System.out.println("Attempting to create file " + outFile);
logInfo("Attempting to create file " + outFile);
if (outFile.createNewFile()) {
BufferedOutputStream out = new BufferedOutputStream(
@@ -343,6 +350,33 @@ public class ItemExportServiceImpl implements ItemExportService {
}
}
/**
* Create the 'collections' file. List handles of all Collections which
* contain this Item. The "owning" Collection is listed first.
*
* @param item list collections holding this Item.
* @param destDir write the file here.
* @throws IOException if the file cannot be created or written.
*/
protected void writeCollections(Item item, File destDir)
throws IOException {
File outFile = new File(destDir, "collections");
if (outFile.createNewFile()) {
try (PrintWriter out = new PrintWriter(new FileWriter(outFile))) {
String ownerHandle = item.getOwningCollection().getHandle();
out.println(ownerHandle);
for (Collection collection : item.getCollections()) {
String collectionHandle = collection.getHandle();
if (!collectionHandle.equals(ownerHandle)) {
out.println(collectionHandle);
}
}
}
} else {
throw new IOException("Cannot create 'collections' in " + destDir);
}
}
/**
* Create both the bitstreams and the contents file. Any bitstreams that
* were originally registered will be marked in the contents file as such.
@@ -399,7 +433,7 @@ public class ItemExportServiceImpl implements ItemExportService {
File fdirs = new File(destDir + File.separator
+ dirs);
if (!fdirs.exists() && !fdirs.mkdirs()) {
log.error("Unable to create destination directory");
logError("Unable to create destination directory");
}
}
@@ -456,12 +490,12 @@ public class ItemExportServiceImpl implements ItemExportService {
File wkDir = new File(workDir);
if (!wkDir.exists() && !wkDir.mkdirs()) {
log.error("Unable to create working direcory");
logError("Unable to create working direcory");
}
File dnDir = new File(destDirName);
if (!dnDir.exists() && !dnDir.mkdirs()) {
log.error("Unable to create destination directory");
logError("Unable to create destination directory");
}
// export the items using normal export method
@@ -630,11 +664,9 @@ public class ItemExportServiceImpl implements ItemExportService {
Thread go = new Thread() {
@Override
public void run() {
Context context = null;
Context context = new Context();
Iterator<Item> iitems = null;
try {
// create a new dspace context
context = new Context();
// ignore auths
context.turnOffAuthorisationSystem();
@@ -646,7 +678,7 @@ public class ItemExportServiceImpl implements ItemExportService {
String downloadDir = getExportDownloadDirectory(eperson);
File dnDir = new File(downloadDir);
if (!dnDir.exists() && !dnDir.mkdirs()) {
log.error("Unable to create download directory");
logError("Unable to create download directory");
}
Iterator<String> iter = itemsMap.keySet().iterator();
@@ -665,7 +697,7 @@ public class ItemExportServiceImpl implements ItemExportService {
File wkDir = new File(workDir);
if (!wkDir.exists() && !wkDir.mkdirs()) {
log.error("Unable to create working directory");
logError("Unable to create working directory");
}
@@ -756,7 +788,8 @@ public class ItemExportServiceImpl implements ItemExportService {
throw new Exception(
"A dspace.cfg entry for 'org.dspace.app.itemexport.work.dir' does not exist.");
}
return exportDir;
// clean work dir path from duplicate separators
return StringUtils.replace(exportDir, File.separator + File.separator, File.separator);
}
@Override
@@ -884,7 +917,7 @@ public class ItemExportServiceImpl implements ItemExportService {
for (File file : files) {
if (file.lastModified() < now.getTimeInMillis()) {
if (!file.delete()) {
log.error("Unable to delete export file");
logError("Unable to delete export file");
}
}
}
@@ -908,7 +941,7 @@ public class ItemExportServiceImpl implements ItemExportService {
for (File file : files) {
if (file.lastModified() < now.getTimeInMillis()) {
if (!file.delete()) {
log.error("Unable to delete old files");
logError("Unable to delete old files");
}
}
}
@@ -916,7 +949,7 @@ public class ItemExportServiceImpl implements ItemExportService {
// If the directory is now empty then we delete it too.
if (dir.listFiles().length == 0) {
if (!dir.delete()) {
log.error("Unable to delete directory");
logError("Unable to delete directory");
}
}
}
@@ -937,14 +970,14 @@ public class ItemExportServiceImpl implements ItemExportService {
email.send();
} catch (Exception e) {
log.warn(LogHelper.getHeader(context, "emailSuccessMessage", "cannot notify user of export"), e);
logWarn(LogHelper.getHeader(context, "emailSuccessMessage", "cannot notify user of export"), e);
}
}
@Override
public void emailErrorMessage(EPerson eperson, String error)
throws MessagingException {
log.warn("An error occurred during item export, the user will be notified. " + error);
logWarn("An error occurred during item export, the user will be notified. " + error);
try {
Locale supportedLocale = I18nUtil.getEPersonLocale(eperson);
Email email = Email.getEmail(I18nUtil.getEmailFilename(supportedLocale, "export_error"));
@@ -954,7 +987,7 @@ public class ItemExportServiceImpl implements ItemExportService {
email.send();
} catch (Exception e) {
log.warn("error during item export error notification", e);
logWarn("error during item export error notification", e);
}
}
@@ -969,7 +1002,7 @@ public class ItemExportServiceImpl implements ItemExportService {
}
File targetFile = new File(tempFileName);
if (!targetFile.createNewFile()) {
log.warn("Target file already exists: " + targetFile.getName());
logWarn("Target file already exists: " + targetFile.getName());
}
FileOutputStream fos = new FileOutputStream(tempFileName);
@@ -985,7 +1018,7 @@ public class ItemExportServiceImpl implements ItemExportService {
deleteDirectory(cpFile);
if (!targetFile.renameTo(new File(target))) {
log.error("Unable to rename file");
logError("Unable to rename file");
}
} finally {
if (cpZipOutputStream != null) {
@@ -1018,8 +1051,11 @@ public class ItemExportServiceImpl implements ItemExportService {
return;
}
String strAbsPath = cpFile.getPath();
String strZipEntryName = strAbsPath.substring(strSource
.length() + 1, strAbsPath.length());
int startIndex = strSource.length();
if (!StringUtils.endsWith(strSource, File.separator)) {
startIndex++;
}
String strZipEntryName = strAbsPath.substring(startIndex, strAbsPath.length());
// byte[] b = new byte[ (int)(cpFile.length()) ];
@@ -1058,7 +1094,7 @@ public class ItemExportServiceImpl implements ItemExportService {
deleteDirectory(file);
} else {
if (!file.delete()) {
log.error("Unable to delete file: " + file.getName());
logError("Unable to delete file: " + file.getName());
}
}
}
@@ -1067,4 +1103,64 @@ public class ItemExportServiceImpl implements ItemExportService {
return (path.delete());
}
@Override
public void setHandler(DSpaceRunnableHandler handler) {
this.handler = handler;
}
private void logInfo(String message) {
logInfo(message, null);
}
private void logInfo(String message, Exception e) {
if (handler != null) {
handler.logInfo(message);
return;
}
if (e != null) {
log.info(message, e);
} else {
log.info(message);
}
}
private void logWarn(String message) {
logWarn(message, null);
}
private void logWarn(String message, Exception e) {
if (handler != null) {
handler.logWarning(message);
return;
}
if (e != null) {
log.warn(message, e);
} else {
log.warn(message);
}
}
private void logError(String message) {
logError(message, null);
}
private void logError(String message, Exception e) {
if (handler != null) {
if (e != null) {
handler.logError(message, e);
} else {
handler.logError(message);
}
return;
}
if (e != null) {
log.error(message, e);
} else {
log.error(message);
}
}
}

View File

@@ -17,6 +17,7 @@ import org.dspace.content.DSpaceObject;
import org.dspace.content.Item;
import org.dspace.core.Context;
import org.dspace.eperson.EPerson;
import org.dspace.scripts.handler.DSpaceRunnableHandler;
/**
* Item exporter to create simple AIPs for DSpace content. Currently exports
@@ -267,4 +268,10 @@ public interface ItemExportService {
*/
public void zip(String strSource, String target) throws Exception;
/**
* Set the DSpace Runnable Handler
* @param handler
*/
public void setHandler(DSpaceRunnableHandler handler);
}

View File

@@ -0,0 +1,378 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.itemimport;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.nio.file.Files;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import java.util.Optional;
import java.util.UUID;
import org.apache.commons.cli.ParseException;
import org.apache.commons.io.FileUtils;
import org.apache.commons.lang3.StringUtils;
import org.dspace.app.itemimport.factory.ItemImportServiceFactory;
import org.dspace.app.itemimport.service.ItemImportService;
import org.dspace.authorize.AuthorizeException;
import org.dspace.content.Collection;
import org.dspace.content.factory.ContentServiceFactory;
import org.dspace.content.service.CollectionService;
import org.dspace.core.Constants;
import org.dspace.core.Context;
import org.dspace.eperson.EPerson;
import org.dspace.eperson.factory.EPersonServiceFactory;
import org.dspace.eperson.service.EPersonService;
import org.dspace.handle.factory.HandleServiceFactory;
import org.dspace.handle.service.HandleService;
import org.dspace.scripts.DSpaceRunnable;
import org.dspace.utils.DSpace;
/**
* Import items into DSpace. The conventional use is upload files by copying
* them. DSpace writes the item's bitstreams into its assetstore. Metadata is
* also loaded to the DSpace database.
* <P>
* A second use assumes the bitstream files already exist in a storage
* resource accessible to DSpace. In this case the bitstreams are 'registered'.
* That is, the metadata is loaded to the DSpace database and DSpace is given
* the location of the file which is subsumed into DSpace.
* <P>
* The distinction is controlled by the format of lines in the 'contents' file.
* See comments in processContentsFile() below.
* <P>
* Modified by David Little, UCSD Libraries 12/21/04 to
* allow the registration of files (bitstreams) into DSpace.
*/
public class ItemImport extends DSpaceRunnable<ItemImportScriptConfiguration> {
public static String TEMP_DIR = "importSAF";
public static String MAPFILE_FILENAME = "mapfile";
public static String MAPFILE_BITSTREAM_TYPE = "importSAFMapfile";
protected boolean template = false;
protected String command = null;
protected String sourcedir = null;
protected String mapfile = null;
protected String eperson = null;
protected String[] collections = null;
protected boolean isTest = false;
protected boolean isExcludeContent = false;
protected boolean isResume = false;
protected boolean useWorkflow = false;
protected boolean useWorkflowSendEmail = false;
protected boolean isQuiet = false;
protected boolean commandLineCollections = false;
protected boolean zip = false;
protected String zipfilename = null;
protected boolean help = false;
protected File workDir = null;
private File workFile = null;
protected static final CollectionService collectionService =
ContentServiceFactory.getInstance().getCollectionService();
protected static final EPersonService epersonService =
EPersonServiceFactory.getInstance().getEPersonService();
protected static final HandleService handleService =
HandleServiceFactory.getInstance().getHandleService();
@Override
public ItemImportScriptConfiguration getScriptConfiguration() {
return new DSpace().getServiceManager()
.getServiceByName("import", ItemImportScriptConfiguration.class);
}
@Override
public void setup() throws ParseException {
help = commandLine.hasOption('h');
if (commandLine.hasOption('a')) {
command = "add";
}
if (commandLine.hasOption('r')) {
command = "replace";
}
if (commandLine.hasOption('d')) {
command = "delete";
}
if (commandLine.hasOption('w')) {
useWorkflow = true;
if (commandLine.hasOption('n')) {
useWorkflowSendEmail = true;
}
}
if (commandLine.hasOption('v')) {
isTest = true;
handler.logInfo("**Test Run** - not actually importing items.");
}
isExcludeContent = commandLine.hasOption('x');
if (commandLine.hasOption('p')) {
template = true;
}
if (commandLine.hasOption('c')) { // collections
collections = commandLine.getOptionValues('c');
commandLineCollections = true;
} else {
handler.logInfo("No collections given. Assuming 'collections' file inside item directory");
}
if (commandLine.hasOption('R')) {
isResume = true;
handler.logInfo("**Resume import** - attempting to import items not already imported");
}
if (commandLine.hasOption('q')) {
isQuiet = true;
}
setZip();
}
@Override
public void internalRun() throws Exception {
if (help) {
printHelp();
return;
}
Date startTime = new Date();
Context context = new Context(Context.Mode.BATCH_EDIT);
setMapFile();
validate(context);
setEPerson(context);
// check collection
List<Collection> mycollections = null;
// don't need to validate collections set if command is "delete"
// also if no collections are given in the command line
if (!"delete".equals(command) && commandLineCollections) {
handler.logInfo("Destination collections:");
mycollections = new ArrayList<>();
// validate each collection arg to see if it's a real collection
for (int i = 0; i < collections.length; i++) {
Collection collection = null;
if (collections[i] != null) {
// is the ID a handle?
if (collections[i].indexOf('/') != -1) {
// string has a / so it must be a handle - try and resolve
// it
collection = ((Collection) handleService
.resolveToObject(context, collections[i]));
} else {
// not a handle, try and treat it as an integer collection database ID
collection = collectionService.find(context, UUID.fromString(collections[i]));
}
}
// was the collection valid?
if (collection == null
|| collection.getType() != Constants.COLLECTION) {
throw new IllegalArgumentException("Cannot resolve "
+ collections[i] + " to collection");
}
// add resolved collection to list
mycollections.add(collection);
// print progress info
handler.logInfo((i == 0 ? "Owning " : "") + "Collection: " + collection.getName());
}
}
// end validation
// start
ItemImportService itemImportService = ItemImportServiceFactory.getInstance()
.getItemImportService();
try {
itemImportService.setTest(isTest);
itemImportService.setExcludeContent(isExcludeContent);
itemImportService.setResume(isResume);
itemImportService.setUseWorkflow(useWorkflow);
itemImportService.setUseWorkflowSendEmail(useWorkflowSendEmail);
itemImportService.setQuiet(isQuiet);
itemImportService.setHandler(handler);
try {
context.turnOffAuthorisationSystem();
readZip(context, itemImportService);
process(context, itemImportService, mycollections);
// complete all transactions
context.complete();
} catch (Exception e) {
context.abort();
throw new Exception(
"Error committing changes to database: " + e.getMessage() + ", aborting most recent changes", e);
}
if (isTest) {
handler.logInfo("***End of Test Run***");
}
} finally {
// clean work dir
if (zip) {
FileUtils.deleteDirectory(new File(sourcedir));
FileUtils.deleteDirectory(workDir);
}
Date endTime = new Date();
handler.logInfo("Started: " + startTime.getTime());
handler.logInfo("Ended: " + endTime.getTime());
handler.logInfo(
"Elapsed time: " + ((endTime.getTime() - startTime.getTime()) / 1000) + " secs (" + (endTime
.getTime() - startTime.getTime()) + " msecs)");
}
}
/**
* Validate the options
* @param context
*/
protected void validate(Context context) {
if (command == null) {
handler.logError("Must run with either add, replace, or remove (run with -h flag for details)");
throw new UnsupportedOperationException("Must run with either add, replace, or remove");
}
// can only resume for adds
if (isResume && !"add".equals(command)) {
handler.logError("Resume option only works with the --add command (run with -h flag for details)");
throw new UnsupportedOperationException("Resume option only works with the --add command");
}
if (isResume && StringUtils.isBlank(mapfile)) {
handler.logError("The mapfile does not exist. ");
throw new UnsupportedOperationException("The mapfile does not exist");
}
}
/**
* Process the import
* @param context
* @param itemImportService
* @param collections
* @throws Exception
*/
protected void process(Context context, ItemImportService itemImportService,
List<Collection> collections) throws Exception {
readMapfile(context);
if ("add".equals(command)) {
itemImportService.addItems(context, collections, sourcedir, mapfile, template);
} else if ("replace".equals(command)) {
itemImportService.replaceItems(context, collections, sourcedir, mapfile, template);
} else if ("delete".equals(command)) {
itemImportService.deleteItems(context, mapfile);
}
// write input stream on handler
File mapFile = new File(mapfile);
try (InputStream mapfileInputStream = new FileInputStream(mapFile)) {
handler.writeFilestream(context, MAPFILE_FILENAME, mapfileInputStream, MAPFILE_BITSTREAM_TYPE);
} finally {
mapFile.delete();
workFile.delete();
}
}
/**
* Read the ZIP archive in SAF format
* @param context
* @param itemImportService
* @throws Exception
*/
protected void readZip(Context context, ItemImportService itemImportService) throws Exception {
Optional<InputStream> optionalFileStream = handler.getFileStream(context, zipfilename);
if (optionalFileStream.isPresent()) {
workFile = new File(itemImportService.getTempWorkDir() + File.separator
+ zipfilename + "-" + context.getCurrentUser().getID());
FileUtils.copyInputStreamToFile(optionalFileStream.get(), workFile);
workDir = new File(itemImportService.getTempWorkDir() + File.separator + TEMP_DIR);
sourcedir = itemImportService.unzip(workFile, workDir.getAbsolutePath());
} else {
throw new IllegalArgumentException(
"Error reading file, the file couldn't be found for filename: " + zipfilename);
}
}
/**
* Read the mapfile
* @param context
*/
protected void readMapfile(Context context) {
if (isResume) {
try {
Optional<InputStream> optionalFileStream = handler.getFileStream(context, mapfile);
if (optionalFileStream.isPresent()) {
File tempFile = File.createTempFile(mapfile, "temp");
tempFile.deleteOnExit();
FileUtils.copyInputStreamToFile(optionalFileStream.get(), tempFile);
mapfile = tempFile.getAbsolutePath();
}
} catch (IOException | AuthorizeException e) {
throw new UnsupportedOperationException("The mapfile does not exist");
}
}
}
/**
* Set the mapfile option
* @throws IOException
*/
protected void setMapFile() throws IOException {
if (isResume && commandLine.hasOption('m')) {
mapfile = commandLine.getOptionValue('m');
} else {
mapfile = Files.createTempFile(MAPFILE_FILENAME, "temp").toString();
}
}
/**
* Set the zip option
*/
protected void setZip() {
zip = true;
zipfilename = commandLine.getOptionValue('z');
}
/**
* Set the eperson in the context
* @param context
* @throws SQLException
*/
protected void setEPerson(Context context) throws SQLException {
EPerson myEPerson = epersonService.find(context, this.getEpersonIdentifier());
// check eperson
if (myEPerson == null) {
handler.logError("EPerson cannot be found: " + this.getEpersonIdentifier());
throw new UnsupportedOperationException("EPerson cannot be found: " + this.getEpersonIdentifier());
}
context.setCurrentUser(myEPerson);
}
}

View File

@@ -0,0 +1,143 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.itemimport;
import java.io.File;
import java.sql.SQLException;
import java.util.List;
import java.util.UUID;
import org.apache.commons.lang3.StringUtils;
import org.dspace.app.itemimport.service.ItemImportService;
import org.dspace.content.Collection;
import org.dspace.core.Context;
import org.dspace.eperson.EPerson;
/**
* CLI variant for the {@link ItemImport} class.
* This was done to specify the specific behaviors for the CLI.
*
* @author Francesco Pio Scognamiglio (francescopio.scognamiglio at 4science.com)
*/
public class ItemImportCLI extends ItemImport {
@Override
protected void validate(Context context) {
// can only resume for adds
if (isResume && !"add".equals(command)) {
handler.logError("Resume option only works with the --add command (run with -h flag for details)");
throw new UnsupportedOperationException("Resume option only works with the --add command");
}
if (commandLine.hasOption('e')) {
eperson = commandLine.getOptionValue('e');
}
// check eperson identifier (email or id)
if (eperson == null) {
handler.logError("An eperson to do the importing must be specified (run with -h flag for details)");
throw new UnsupportedOperationException("An eperson to do the importing must be specified");
}
File myFile = null;
try {
myFile = new File(mapfile);
} catch (Exception e) {
throw new UnsupportedOperationException("The mapfile " + mapfile + " does not exist");
}
if (!isResume && "add".equals(command) && myFile.exists()) {
handler.logError("The mapfile " + mapfile + " already exists. "
+ "Either delete it or use --resume if attempting to resume an aborted import. "
+ "(run with -h flag for details)");
throw new UnsupportedOperationException("The mapfile " + mapfile + " already exists");
}
if (command == null) {
handler.logError("Must run with either add, replace, or remove (run with -h flag for details)");
throw new UnsupportedOperationException("Must run with either add, replace, or remove");
} else if ("add".equals(command) || "replace".equals(command)) {
if (sourcedir == null) {
handler.logError("A source directory containing items must be set (run with -h flag for details)");
throw new UnsupportedOperationException("A source directory containing items must be set");
}
if (mapfile == null) {
handler.logError(
"A map file to hold importing results must be specified (run with -h flag for details)");
throw new UnsupportedOperationException("A map file to hold importing results must be specified");
}
} else if ("delete".equals(command)) {
if (mapfile == null) {
handler.logError("A map file must be specified (run with -h flag for details)");
throw new UnsupportedOperationException("A map file must be specified");
}
}
}
@Override
protected void process(Context context, ItemImportService itemImportService,
List<Collection> collections) throws Exception {
if ("add".equals(command)) {
itemImportService.addItems(context, collections, sourcedir, mapfile, template);
} else if ("replace".equals(command)) {
itemImportService.replaceItems(context, collections, sourcedir, mapfile, template);
} else if ("delete".equals(command)) {
itemImportService.deleteItems(context, mapfile);
}
}
@Override
protected void readZip(Context context, ItemImportService itemImportService) throws Exception {
// If this is a zip archive, unzip it first
if (zip) {
workDir = new File(itemImportService.getTempWorkDir() + File.separator + TEMP_DIR
+ File.separator + context.getCurrentUser().getID());
sourcedir = itemImportService.unzip(
new File(sourcedir + File.separator + zipfilename), workDir.getAbsolutePath());
}
}
@Override
protected void setMapFile() {
if (commandLine.hasOption('m')) {
mapfile = commandLine.getOptionValue('m');
}
}
@Override
protected void setZip() {
if (commandLine.hasOption('s')) { // source
sourcedir = commandLine.getOptionValue('s');
}
if (commandLine.hasOption('z')) {
zip = true;
zipfilename = commandLine.getOptionValue('z');
}
}
@Override
protected void setEPerson(Context context) throws SQLException {
EPerson myEPerson = null;
if (StringUtils.contains(eperson, '@')) {
// @ sign, must be an email
myEPerson = epersonService.findByEmail(context, eperson);
} else {
myEPerson = epersonService.find(context, UUID.fromString(eperson));
}
// check eperson
if (myEPerson == null) {
handler.logError("EPerson cannot be found: " + eperson + " (run with -h flag for details)");
throw new UnsupportedOperationException("EPerson cannot be found: " + eperson);
}
context.setCurrentUser(myEPerson);
}
}

View File

@@ -0,0 +1,77 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.itemimport;
import org.apache.commons.cli.Option;
import org.apache.commons.cli.Options;
import org.dspace.scripts.configuration.ScriptConfiguration;
/**
* The {@link ScriptConfiguration} for the {@link ItemImportCLI} script
*
* @author Francesco Pio Scognamiglio (francescopio.scognamiglio at 4science.com)
*/
public class ItemImportCLIScriptConfiguration extends ItemImportScriptConfiguration<ItemImportCLI> {
@Override
public Options getOptions() {
Options options = new Options();
options.addOption(Option.builder("a").longOpt("add")
.desc("add items to DSpace")
.hasArg(false).required(false).build());
options.addOption(Option.builder("r").longOpt("replace")
.desc("replace items in mapfile")
.hasArg(false).required(false).build());
options.addOption(Option.builder("d").longOpt("delete")
.desc("delete items listed in mapfile")
.hasArg(false).required(false).build());
options.addOption(Option.builder("s").longOpt("source")
.desc("source of items (directory)")
.hasArg().required(false).build());
options.addOption(Option.builder("z").longOpt("zip")
.desc("name of zip file")
.hasArg().required(false).build());
options.addOption(Option.builder("c").longOpt("collection")
.desc("destination collection(s) Handle or database ID")
.hasArg().required(false).build());
options.addOption(Option.builder("m").longOpt("mapfile")
.desc("mapfile items in mapfile")
.hasArg().required().build());
options.addOption(Option.builder("e").longOpt("eperson")
.desc("email of eperson doing importing")
.hasArg().required().build());
options.addOption(Option.builder("w").longOpt("workflow")
.desc("send submission through collection's workflow")
.hasArg(false).required(false).build());
options.addOption(Option.builder("n").longOpt("notify")
.desc("if sending submissions through the workflow, send notification emails")
.hasArg(false).required(false).build());
options.addOption(Option.builder("v").longOpt("validate")
.desc("test run - do not actually import items")
.hasArg(false).required(false).build());
options.addOption(Option.builder("x").longOpt("exclude-bitstreams")
.desc("do not load or expect content bitstreams")
.hasArg(false).required(false).build());
options.addOption(Option.builder("p").longOpt("template")
.desc("apply template")
.hasArg(false).required(false).build());
options.addOption(Option.builder("R").longOpt("resume")
.desc("resume a failed import (add only)")
.hasArg(false).required(false).build());
options.addOption(Option.builder("q").longOpt("quiet")
.desc("don't display metadata")
.hasArg(false).required(false).build());
options.addOption(Option.builder("h").longOpt("help")
.desc("help")
.hasArg(false).required(false).build());
return options;
}
}

View File

@@ -1,395 +0,0 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.itemimport;
import java.io.File;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import java.util.UUID;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.CommandLineParser;
import org.apache.commons.cli.DefaultParser;
import org.apache.commons.cli.HelpFormatter;
import org.apache.commons.cli.Options;
import org.dspace.app.itemimport.factory.ItemImportServiceFactory;
import org.dspace.app.itemimport.service.ItemImportService;
import org.dspace.content.Collection;
import org.dspace.content.factory.ContentServiceFactory;
import org.dspace.content.service.CollectionService;
import org.dspace.core.Constants;
import org.dspace.core.Context;
import org.dspace.eperson.EPerson;
import org.dspace.eperson.factory.EPersonServiceFactory;
import org.dspace.eperson.service.EPersonService;
import org.dspace.handle.factory.HandleServiceFactory;
import org.dspace.handle.service.HandleService;
/**
* Import items into DSpace. The conventional use is upload files by copying
* them. DSpace writes the item's bitstreams into its assetstore. Metadata is
* also loaded to the DSpace database.
* <P>
* A second use assumes the bitstream files already exist in a storage
* resource accessible to DSpace. In this case the bitstreams are 'registered'.
* That is, the metadata is loaded to the DSpace database and DSpace is given
* the location of the file which is subsumed into DSpace.
* <P>
* The distinction is controlled by the format of lines in the 'contents' file.
* See comments in processContentsFile() below.
* <P>
* Modified by David Little, UCSD Libraries 12/21/04 to
* allow the registration of files (bitstreams) into DSpace.
*/
public class ItemImportCLITool {
private static boolean template = false;
private static final CollectionService collectionService = ContentServiceFactory.getInstance()
.getCollectionService();
private static final EPersonService epersonService = EPersonServiceFactory.getInstance().getEPersonService();
private static final HandleService handleService = HandleServiceFactory.getInstance().getHandleService();
/**
* Default constructor
*/
private ItemImportCLITool() { }
public static void main(String[] argv) throws Exception {
Date startTime = new Date();
int status = 0;
try {
// create an options object and populate it
CommandLineParser parser = new DefaultParser();
Options options = new Options();
options.addOption("a", "add", false, "add items to DSpace");
options.addOption("r", "replace", false, "replace items in mapfile");
options.addOption("d", "delete", false,
"delete items listed in mapfile");
options.addOption("s", "source", true, "source of items (directory)");
options.addOption("z", "zip", true, "name of zip file");
options.addOption("c", "collection", true,
"destination collection(s) Handle or database ID");
options.addOption("m", "mapfile", true, "mapfile items in mapfile");
options.addOption("e", "eperson", true,
"email of eperson doing importing");
options.addOption("w", "workflow", false,
"send submission through collection's workflow");
options.addOption("n", "notify", false,
"if sending submissions through the workflow, send notification emails");
options.addOption("t", "test", false,
"test run - do not actually import items");
options.addOption("p", "template", false, "apply template");
options.addOption("R", "resume", false,
"resume a failed import (add only)");
options.addOption("q", "quiet", false, "don't display metadata");
options.addOption("h", "help", false, "help");
CommandLine line = parser.parse(options, argv);
String command = null; // add replace remove, etc
String sourcedir = null;
String mapfile = null;
String eperson = null; // db ID or email
String[] collections = null; // db ID or handles
boolean isTest = false;
boolean isResume = false;
boolean useWorkflow = false;
boolean useWorkflowSendEmail = false;
boolean isQuiet = false;
if (line.hasOption('h')) {
HelpFormatter myhelp = new HelpFormatter();
myhelp.printHelp("ItemImport\n", options);
System.out
.println("\nadding items: ItemImport -a -e eperson -c collection -s sourcedir -m mapfile");
System.out
.println(
"\nadding items from zip file: ItemImport -a -e eperson -c collection -s sourcedir -z " +
"filename.zip -m mapfile");
System.out
.println("replacing items: ItemImport -r -e eperson -c collection -s sourcedir -m mapfile");
System.out
.println("deleting items: ItemImport -d -e eperson -m mapfile");
System.out
.println(
"If multiple collections are specified, the first collection will be the one that owns the " +
"item.");
System.exit(0);
}
if (line.hasOption('a')) {
command = "add";
}
if (line.hasOption('r')) {
command = "replace";
}
if (line.hasOption('d')) {
command = "delete";
}
if (line.hasOption('w')) {
useWorkflow = true;
if (line.hasOption('n')) {
useWorkflowSendEmail = true;
}
}
if (line.hasOption('t')) {
isTest = true;
System.out.println("**Test Run** - not actually importing items.");
}
if (line.hasOption('p')) {
template = true;
}
if (line.hasOption('s')) { // source
sourcedir = line.getOptionValue('s');
}
if (line.hasOption('m')) { // mapfile
mapfile = line.getOptionValue('m');
}
if (line.hasOption('e')) { // eperson
eperson = line.getOptionValue('e');
}
if (line.hasOption('c')) { // collections
collections = line.getOptionValues('c');
}
if (line.hasOption('R')) {
isResume = true;
System.out
.println("**Resume import** - attempting to import items not already imported");
}
if (line.hasOption('q')) {
isQuiet = true;
}
boolean zip = false;
String zipfilename = "";
if (line.hasOption('z')) {
zip = true;
zipfilename = line.getOptionValue('z');
}
//By default assume collections will be given on the command line
boolean commandLineCollections = true;
// now validate
// must have a command set
if (command == null) {
System.out
.println("Error - must run with either add, replace, or remove (run with -h flag for details)");
System.exit(1);
} else if ("add".equals(command) || "replace".equals(command)) {
if (sourcedir == null) {
System.out
.println("Error - a source directory containing items must be set");
System.out.println(" (run with -h flag for details)");
System.exit(1);
}
if (mapfile == null) {
System.out
.println("Error - a map file to hold importing results must be specified");
System.out.println(" (run with -h flag for details)");
System.exit(1);
}
if (eperson == null) {
System.out
.println("Error - an eperson to do the importing must be specified");
System.out.println(" (run with -h flag for details)");
System.exit(1);
}
if (collections == null) {
System.out.println("No collections given. Assuming 'collections' file inside item directory");
commandLineCollections = false;
}
} else if ("delete".equals(command)) {
if (eperson == null) {
System.out
.println("Error - an eperson to do the importing must be specified");
System.exit(1);
}
if (mapfile == null) {
System.out.println("Error - a map file must be specified");
System.exit(1);
}
}
// can only resume for adds
if (isResume && !"add".equals(command)) {
System.out
.println("Error - resume option only works with the --add command");
System.exit(1);
}
// do checks around mapfile - if mapfile exists and 'add' is selected,
// resume must be chosen
File myFile = new File(mapfile);
if (!isResume && "add".equals(command) && myFile.exists()) {
System.out.println("Error - the mapfile " + mapfile
+ " already exists.");
System.out
.println("Either delete it or use --resume if attempting to resume an aborted import.");
System.exit(1);
}
ItemImportService myloader = ItemImportServiceFactory.getInstance().getItemImportService();
myloader.setTest(isTest);
myloader.setResume(isResume);
myloader.setUseWorkflow(useWorkflow);
myloader.setUseWorkflowSendEmail(useWorkflowSendEmail);
myloader.setQuiet(isQuiet);
// create a context
Context c = new Context(Context.Mode.BATCH_EDIT);
// find the EPerson, assign to context
EPerson myEPerson = null;
if (eperson.indexOf('@') != -1) {
// @ sign, must be an email
myEPerson = epersonService.findByEmail(c, eperson);
} else {
myEPerson = epersonService.find(c, UUID.fromString(eperson));
}
if (myEPerson == null) {
System.out.println("Error, eperson cannot be found: " + eperson);
System.exit(1);
}
c.setCurrentUser(myEPerson);
// find collections
List<Collection> mycollections = null;
// don't need to validate collections set if command is "delete"
// also if no collections are given in the command line
if (!"delete".equals(command) && commandLineCollections) {
System.out.println("Destination collections:");
mycollections = new ArrayList<>();
// validate each collection arg to see if it's a real collection
for (int i = 0; i < collections.length; i++) {
Collection resolved = null;
if (collections[i] != null) {
// is the ID a handle?
if (collections[i].indexOf('/') != -1) {
// string has a / so it must be a handle - try and resolve
// it
resolved = ((Collection) handleService
.resolveToObject(c, collections[i]));
} else {
// not a handle, try and treat it as an integer collection database ID
resolved = collectionService.find(c, UUID.fromString(collections[i]));
}
}
// was the collection valid?
if ((resolved == null)
|| (resolved.getType() != Constants.COLLECTION)) {
throw new IllegalArgumentException("Cannot resolve "
+ collections[i] + " to collection");
}
// add resolved collection to list
mycollections.add(resolved);
// print progress info
String owningPrefix = "";
if (i == 0) {
owningPrefix = "Owning ";
}
System.out.println(owningPrefix + " Collection: "
+ resolved.getName());
}
} // end of validating collections
try {
// If this is a zip archive, unzip it first
if (zip) {
sourcedir = myloader.unzip(sourcedir, zipfilename);
}
c.turnOffAuthorisationSystem();
if ("add".equals(command)) {
myloader.addItems(c, mycollections, sourcedir, mapfile, template);
} else if ("replace".equals(command)) {
myloader.replaceItems(c, mycollections, sourcedir, mapfile, template);
} else if ("delete".equals(command)) {
myloader.deleteItems(c, mapfile);
}
// complete all transactions
c.complete();
} catch (Exception e) {
c.abort();
e.printStackTrace();
System.out.println(e);
status = 1;
}
// Delete the unzipped file
try {
if (zip) {
System.gc();
System.out.println(
"Deleting temporary zip directory: " + myloader.getTempWorkDirFile().getAbsolutePath());
myloader.cleanupZipTemp();
}
} catch (IOException ex) {
System.out.println("Unable to delete temporary zip archive location: " + myloader.getTempWorkDirFile()
.getAbsolutePath());
}
if (isTest) {
System.out.println("***End of Test Run***");
}
} finally {
Date endTime = new Date();
System.out.println("Started: " + startTime.getTime());
System.out.println("Ended: " + endTime.getTime());
System.out.println(
"Elapsed time: " + ((endTime.getTime() - startTime.getTime()) / 1000) + " secs (" + (endTime
.getTime() - startTime.getTime()) + " msecs)");
}
System.exit(status);
}
}

View File

@@ -0,0 +1,103 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.itemimport;
import java.io.InputStream;
import java.sql.SQLException;
import org.apache.commons.cli.Option;
import org.apache.commons.cli.Options;
import org.dspace.authorize.service.AuthorizeService;
import org.dspace.core.Context;
import org.dspace.scripts.configuration.ScriptConfiguration;
import org.springframework.beans.factory.annotation.Autowired;
/**
* The {@link ScriptConfiguration} for the {@link ItemImport} script
*
* @author Francesco Pio Scognamiglio (francescopio.scognamiglio at 4science.com)
*/
public class ItemImportScriptConfiguration<T extends ItemImport> extends ScriptConfiguration<T> {
@Autowired
private AuthorizeService authorizeService;
private Class<T> dspaceRunnableClass;
@Override
public Class<T> getDspaceRunnableClass() {
return dspaceRunnableClass;
}
@Override
public void setDspaceRunnableClass(Class<T> dspaceRunnableClass) {
this.dspaceRunnableClass = dspaceRunnableClass;
}
@Override
public boolean isAllowedToExecute(final Context context) {
try {
return authorizeService.isAdmin(context);
} catch (SQLException e) {
throw new RuntimeException("SQLException occurred when checking if the current user is an admin", e);
}
}
@Override
public Options getOptions() {
Options options = new Options();
options.addOption(Option.builder("a").longOpt("add")
.desc("add items to DSpace")
.hasArg(false).required(false).build());
options.addOption(Option.builder("r").longOpt("replace")
.desc("replace items in mapfile")
.hasArg(false).required(false).build());
options.addOption(Option.builder("d").longOpt("delete")
.desc("delete items listed in mapfile")
.hasArg(false).required(false).build());
options.addOption(Option.builder("z").longOpt("zip")
.desc("name of zip file")
.type(InputStream.class)
.hasArg().required().build());
options.addOption(Option.builder("c").longOpt("collection")
.desc("destination collection(s) Handle or database ID")
.hasArg().required(false).build());
options.addOption(Option.builder("m").longOpt("mapfile")
.desc("mapfile items in mapfile")
.type(InputStream.class)
.hasArg().required(false).build());
options.addOption(Option.builder("w").longOpt("workflow")
.desc("send submission through collection's workflow")
.hasArg(false).required(false).build());
options.addOption(Option.builder("n").longOpt("notify")
.desc("if sending submissions through the workflow, send notification emails")
.hasArg(false).required(false).build());
options.addOption(Option.builder("v").longOpt("validate")
.desc("test run - do not actually import items")
.hasArg(false).required(false).build());
options.addOption(Option.builder("x").longOpt("exclude-bitstreams")
.desc("do not load or expect content bitstreams")
.hasArg(false).required(false).build());
options.addOption(Option.builder("p").longOpt("template")
.desc("apply template")
.hasArg(false).required(false).build());
options.addOption(Option.builder("R").longOpt("resume")
.desc("resume a failed import (add only)")
.hasArg(false).required(false).build());
options.addOption(Option.builder("q").longOpt("quiet")
.desc("don't display metadata")
.hasArg(false).required(false).build());
options.addOption(Option.builder("h").longOpt("help")
.desc("help")
.hasArg(false).required(false).build());
return options;
}
}

View File

@@ -16,6 +16,7 @@ import org.dspace.app.itemimport.BatchUpload;
import org.dspace.content.Collection;
import org.dspace.core.Context;
import org.dspace.eperson.EPerson;
import org.dspace.scripts.handler.DSpaceRunnableHandler;
/**
* Import items into DSpace. The conventional use is upload files by copying
@@ -210,6 +211,13 @@ public interface ItemImportService {
*/
public void setTest(boolean isTest);
/**
* Set exclude-content flag.
*
* @param isExcludeContent true or false
*/
public void setExcludeContent(boolean isExcludeContent);
/**
* Set resume flag
*
@@ -235,4 +243,10 @@ public interface ItemImportService {
* @param isQuiet true or false
*/
public void setQuiet(boolean isQuiet);
/**
* Set the DSpace Runnable Handler
* @param handler
*/
public void setHandler(DSpaceRunnableHandler handler);
}

View File

@@ -77,7 +77,7 @@ public class AddBitstreamsAction extends UpdateBitstreamsAction {
ItemUpdate.pr("Contents bitstream count: " + contents.size());
String[] files = dir.list(ItemUpdate.fileFilter);
List<String> fileList = new ArrayList<String>();
List<String> fileList = new ArrayList<>();
for (String filename : files) {
fileList.add(filename);
ItemUpdate.pr("file: " + filename);
@@ -134,9 +134,6 @@ public class AddBitstreamsAction extends UpdateBitstreamsAction {
ItemUpdate.pr("contents entry for bitstream: " + ce.toString());
File f = new File(dir, ce.filename);
// get an input stream
BufferedInputStream bis = new BufferedInputStream(new FileInputStream(f));
Bitstream bs = null;
String newBundleName = ce.bundlename;
@@ -173,7 +170,9 @@ public class AddBitstreamsAction extends UpdateBitstreamsAction {
targetBundle = bundles.iterator().next();
}
bs = bitstreamService.create(context, targetBundle, bis);
try (BufferedInputStream bis = new BufferedInputStream(new FileInputStream(f));) {
bs = bitstreamService.create(context, targetBundle, bis);
}
bs.setName(context, ce.filename);
// Identify the format

View File

@@ -39,29 +39,34 @@ import org.dspace.handle.factory.HandleServiceFactory;
import org.dspace.handle.service.HandleService;
/**
* Provides some batch editing capabilities for items in DSpace:
* Metadata fields - Add, Delete
* Bitstreams - Add, Delete
* Provides some batch editing capabilities for items in DSpace.
* <ul>
* <li>Metadata fields - Add, Delete</li>
* <li>Bitstreams - Add, Delete</li>
* </ul>
*
* The design has been for compatibility with ItemImporter
* <p>
* The design has been for compatibility with
* {@link org.dspace.app.itemimport.service.ItemImportService}
* in the use of the DSpace archive format which is used to
* specify changes on a per item basis. The directory names
* to correspond to each item are arbitrary and will only be
* used for logging purposes. The reference to the item is
* from a required dc.identifier with the item handle to be
* included in the dublin_core.xml (or similar metadata) file.
* from a required {@code dc.identifier} with the item handle to be
* included in the {@code dublin_core.xml} (or similar metadata) file.
*
* Any combination of these actions is permitted in a single run of this class
* <p>
* Any combination of these actions is permitted in a single run of this class.
* The order of actions is important when used in combination.
* It is the responsibility of the calling class (here, ItemUpdate)
* to register UpdateAction classes in the order to which they are
* It is the responsibility of the calling class (here, {@code ItemUpdate})
* to register {@link UpdateAction} classes in the order which they are
* to be performed.
*
*
* It is unfortunate that so much code needs to be borrowed
* from ItemImport as it is not reusable in private methods, etc.
* Some of this has been placed into the MetadataUtilities class
* for possible reuse elsewhere.
* <p>
* It is unfortunate that so much code needs to be borrowed from
* {@link org.dspace.app.itemimport.service.ItemImportService} as it is not
* reusable in private methods, etc. Some of this has been placed into the
* {@link MetadataUtilities} class for possible reuse elsewhere.
*
* @author W. Hays based on a conceptual design by R. Rodgers
*/
@@ -73,7 +78,7 @@ public class ItemUpdate {
public static final String DELETE_CONTENTS_FILE = "delete_contents";
public static String HANDLE_PREFIX = null;
public static final Map<String, String> filterAliases = new HashMap<String, String>();
public static final Map<String, String> filterAliases = new HashMap<>();
public static boolean verbose = false;
@@ -375,7 +380,7 @@ public class ItemUpdate {
// open and process the source directory
File sourceDir = new File(sourceDirPath);
if ((sourceDir == null) || !sourceDir.exists() || !sourceDir.isDirectory()) {
if (!sourceDir.exists() || !sourceDir.isDirectory()) {
pr("Error, cannot open archive source directory " + sourceDirPath);
throw new Exception("error with archive source directory " + sourceDirPath);
}

View File

@@ -27,10 +27,12 @@ import javax.xml.transform.TransformerConfigurationException;
import javax.xml.transform.TransformerException;
import javax.xml.transform.dom.DOMSource;
import javax.xml.transform.stream.StreamResult;
import javax.xml.xpath.XPath;
import javax.xml.xpath.XPathConstants;
import javax.xml.xpath.XPathExpressionException;
import javax.xml.xpath.XPathFactory;
import org.apache.commons.lang3.StringUtils;
import org.apache.xpath.XPathAPI;
import org.dspace.authorize.AuthorizeException;
import org.dspace.content.Item;
import org.dspace.content.MetadataField;
import org.dspace.content.MetadataSchema;
@@ -170,24 +172,21 @@ public class MetadataUtilities {
* @param docBuilder DocumentBuilder
* @param is - InputStream of dublin_core.xml
* @return list of DtoMetadata representing the metadata fields relating to an Item
* @throws SQLException if database error
* @throws IOException if IO error
* @throws ParserConfigurationException if parser config error
* @throws SAXException if XML error
* @throws TransformerException if transformer error
* @throws AuthorizeException if authorization error
*/
public static List<DtoMetadata> loadDublinCore(DocumentBuilder docBuilder, InputStream is)
throws SQLException, IOException, ParserConfigurationException,
SAXException, TransformerException, AuthorizeException {
throws IOException, XPathExpressionException, SAXException {
Document document = docBuilder.parse(is);
List<DtoMetadata> dtomList = new ArrayList<DtoMetadata>();
// Get the schema, for backward compatibility we will default to the
// dublin core schema if the schema name is not available in the import file
String schema = null;
NodeList metadata = XPathAPI.selectNodeList(document, "/dublin_core");
String schema;
XPath xPath = XPathFactory.newInstance().newXPath();
NodeList metadata = (NodeList) xPath.compile("/dublin_core").evaluate(document, XPathConstants.NODESET);
Node schemaAttr = metadata.item(0).getAttributes().getNamedItem("schema");
if (schemaAttr == null) {
schema = MetadataSchemaEnum.DC.getName();
@@ -196,7 +195,7 @@ public class MetadataUtilities {
}
// Get the nodes corresponding to formats
NodeList dcNodes = XPathAPI.selectNodeList(document, "/dublin_core/dcvalue");
NodeList dcNodes = (NodeList) xPath.compile("/dublin_core/dcvalue").evaluate(document, XPathConstants.NODESET);
for (int i = 0; i < dcNodes.getLength(); i++) {
Node n = dcNodes.item(i);

View File

@@ -16,7 +16,7 @@ import java.io.StreamTokenizer;
import java.util.ArrayList;
import java.util.List;
import org.jdom.Document;
import org.jdom2.Document;
/**
* @author mwood

View File

@@ -29,9 +29,9 @@ import org.dspace.scripts.service.ScriptService;
import org.dspace.servicemanager.DSpaceKernelImpl;
import org.dspace.servicemanager.DSpaceKernelInit;
import org.dspace.services.RequestService;
import org.jdom.Document;
import org.jdom.Element;
import org.jdom.input.SAXBuilder;
import org.jdom2.Document;
import org.jdom2.Element;
import org.jdom2.input.SAXBuilder;
/**
* A DSpace script launcher.

View File

@@ -21,10 +21,10 @@ import java.awt.image.BufferedImage;
*/
public class Brand {
private int brandWidth;
private int brandHeight;
private Font font;
private int xOffset;
private final int brandWidth;
private final int brandHeight;
private final Font font;
private final int xOffset;
/**
* Constructor to set up footer image attributes.
@@ -92,7 +92,7 @@ public class Brand {
* do the text placements and preparatory work for the brand image generation
*
* @param brandImage a BufferedImage object where the image is created
* @param identifier and Identifier object describing what text is to be placed in what
* @param brandText an Identifier object describing what text is to be placed in what
* position within the brand
*/
private void drawImage(BufferedImage brandImage,

View File

@@ -39,7 +39,7 @@ class BrandText {
* its location within a rectangular area.
*
* @param location one of the class location constants e.g. <code>Identifier.BL</code>
* @param the text associated with the location
* @param text text associated with the location
*/
public BrandText(String location, String text) {
this.location = location;

View File

@@ -1,99 +0,0 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.mediafilter;
import java.io.InputStream;
import java.nio.charset.StandardCharsets;
import org.apache.commons.io.IOUtils;
import org.apache.logging.log4j.Logger;
import org.apache.poi.POITextExtractor;
import org.apache.poi.extractor.ExtractorFactory;
import org.apache.poi.hssf.extractor.ExcelExtractor;
import org.apache.poi.xssf.extractor.XSSFExcelExtractor;
import org.dspace.content.Item;
/*
* ExcelFilter
*
* Entries you must add to dspace.cfg:
*
* filter.plugins = blah, \
* Excel Text Extractor
*
* plugin.named.org.dspace.app.mediafilter.FormatFilter = \
* blah = blah, \
* org.dspace.app.mediafilter.ExcelFilter = Excel Text Extractor
*
* #Configure each filter's input Formats
* filter.org.dspace.app.mediafilter.ExcelFilter.inputFormats = Microsoft Excel, Microsoft Excel XML
*
*/
public class ExcelFilter extends MediaFilter {
private static Logger log = org.apache.logging.log4j.LogManager.getLogger(ExcelFilter.class);
public String getFilteredName(String oldFilename) {
return oldFilename + ".txt";
}
/**
* @return String bundle name
*/
public String getBundleName() {
return "TEXT";
}
/**
* @return String bitstream format
*/
public String getFormatString() {
return "Text";
}
/**
* @return String description
*/
public String getDescription() {
return "Extracted text";
}
/**
* @param item item
* @param source source input stream
* @param verbose verbose mode
* @return InputStream the resulting input stream
* @throws Exception if error
*/
@Override
public InputStream getDestinationStream(Item item, InputStream source, boolean verbose)
throws Exception {
String extractedText = null;
try {
POITextExtractor theExtractor = ExtractorFactory.createExtractor(source);
if (theExtractor instanceof ExcelExtractor) {
// for xls file
extractedText = (theExtractor).getText();
} else if (theExtractor instanceof XSSFExcelExtractor) {
// for xlsx file
extractedText = (theExtractor).getText();
}
} catch (Exception e) {
log.error("Error filtering bitstream: " + e.getMessage(), e);
throw e;
}
if (extractedText != null) {
// generate an input stream with the extracted text
return IOUtils.toInputStream(extractedText, StandardCharsets.UTF_8);
}
return null;
}
}

View File

@@ -1,82 +0,0 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.mediafilter;
import java.io.ByteArrayInputStream;
import java.io.InputStream;
import java.nio.charset.StandardCharsets;
import javax.swing.text.Document;
import javax.swing.text.html.HTMLEditorKit;
import org.dspace.content.Item;
/*
*
* to do: helpful error messages - can't find mediafilter.cfg - can't
* instantiate filter - bitstream format doesn't exist
*
*/
public class HTMLFilter extends MediaFilter {
@Override
public String getFilteredName(String oldFilename) {
return oldFilename + ".txt";
}
/**
* @return String bundle name
*/
@Override
public String getBundleName() {
return "TEXT";
}
/**
* @return String bitstream format
*/
@Override
public String getFormatString() {
return "Text";
}
/**
* @return String description
*/
@Override
public String getDescription() {
return "Extracted text";
}
/**
* @param currentItem item
* @param source source input stream
* @param verbose verbose mode
* @return InputStream the resulting input stream
* @throws Exception if error
*/
@Override
public InputStream getDestinationStream(Item currentItem, InputStream source, boolean verbose)
throws Exception {
// try and read the document - set to ignore character set directive,
// assuming that the input stream is already set properly (I hope)
HTMLEditorKit kit = new HTMLEditorKit();
Document doc = kit.createDefaultDocument();
doc.putProperty("IgnoreCharsetDirective", Boolean.TRUE);
kit.read(source, doc, 0);
String extractedText = doc.getText(0, doc.getLength());
// generate an input stream with the extracted text
byte[] textBytes = extractedText.getBytes(StandardCharsets.UTF_8);
ByteArrayInputStream bais = new ByteArrayInputStream(textBytes);
return bais;
}
}

View File

@@ -14,6 +14,9 @@ import java.io.InputStream;
import java.util.regex.Pattern;
import java.util.regex.PatternSyntaxException;
import org.apache.pdfbox.pdmodel.PDDocument;
import org.apache.pdfbox.pdmodel.PDPage;
import org.apache.pdfbox.pdmodel.common.PDRectangle;
import org.dspace.content.Bitstream;
import org.dspace.content.Bundle;
import org.dspace.content.Item;
@@ -119,6 +122,39 @@ public abstract class ImageMagickThumbnailFilter extends MediaFilter {
f2.deleteOnExit();
ConvertCmd cmd = new ConvertCmd();
IMOperation op = new IMOperation();
// Optionally override ImageMagick's default density of 72 DPI to use a
// "supersample" when creating the PDF thumbnail. Note that I prefer to
// use the getProperty() method here instead of getIntPropert() because
// the latter always returns an integer (0 in the case it's not set). I
// would prefer to keep ImageMagick's default to itself rather than for
// us to set one. Also note that the density option *must* come before
// we open the input file.
String density = configurationService.getProperty(PRE + ".density");
if (density != null) {
op.density(Integer.valueOf(density));
}
// Check the PDF's MediaBox and CropBox to see if they are the same.
// If not, then tell ImageMagick to use the CropBox when generating
// the thumbnail because the CropBox is generally used to define the
// area displayed when a user opens the PDF on a screen, whereas the
// MediaBox is used for print. Not all PDFs set these correctly, so
// we can use ImageMagick's default behavior unless we see an explit
// CropBox. Note: we don't need to do anything special to detect if
// the CropBox is missing or empty because pdfbox will set it to the
// same size as the MediaBox if it doesn't exist. Also note that we
// only need to check the first page, since that's what we use for
// generating the thumbnail (PDDocument uses a zero-based index).
PDPage pdfPage = PDDocument.load(f).getPage(0);
PDRectangle pdfPageMediaBox = pdfPage.getMediaBox();
PDRectangle pdfPageCropBox = pdfPage.getCropBox();
// This option must come *before* we open the input file.
if (pdfPageCropBox != pdfPageMediaBox) {
op.define("pdf:use-cropbox=true");
}
String s = "[" + page + "]";
op.addImage(f.getAbsolutePath() + s);
if (configurationService.getBooleanProperty(PRE + ".flatten", true)) {

View File

@@ -50,15 +50,11 @@ public class MediaFilterScriptConfiguration<T extends MediaFilterScript> extends
public Options getOptions() {
Options options = new Options();
options.addOption("v", "verbose", false, "print all extracted text and other details to STDOUT");
options.getOption("v").setType(boolean.class);
options.addOption("q", "quiet", false, "do not print anything except in the event of errors.");
options.getOption("q").setType(boolean.class);
options.addOption("f", "force", false, "force all bitstreams to be processed");
options.getOption("f").setType(boolean.class);
options.addOption("i", "identifier", true, "ONLY process bitstreams belonging to identifier");
options.addOption("m", "maximum", true, "process no more than maximum items");
options.addOption("h", "help", false, "help");
options.getOption("h").setType(boolean.class);
Option pluginOption = Option.builder("p")
.longOpt("plugins")

View File

@@ -1,137 +0,0 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.mediafilter;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.InputStream;
import java.io.OutputStreamWriter;
import java.io.Writer;
import org.apache.logging.log4j.Logger;
import org.apache.pdfbox.pdmodel.PDDocument;
import org.apache.pdfbox.pdmodel.encryption.InvalidPasswordException;
import org.apache.pdfbox.text.PDFTextStripper;
import org.dspace.content.Item;
import org.dspace.services.ConfigurationService;
import org.dspace.services.factory.DSpaceServicesFactory;
/*
*
* to do: helpful error messages - can't find mediafilter.cfg - can't
* instantiate filter - bitstream format doesn't exist
*
*/
public class PDFFilter extends MediaFilter {
private static Logger log = org.apache.logging.log4j.LogManager.getLogger(PDFFilter.class);
@Override
public String getFilteredName(String oldFilename) {
return oldFilename + ".txt";
}
/**
* @return String bundle name
*/
@Override
public String getBundleName() {
return "TEXT";
}
/**
* @return String bitstreamformat
*/
@Override
public String getFormatString() {
return "Text";
}
/**
* @return String description
*/
@Override
public String getDescription() {
return "Extracted text";
}
/**
* @param currentItem item
* @param source source input stream
* @param verbose verbose mode
* @return InputStream the resulting input stream
* @throws Exception if error
*/
@Override
public InputStream getDestinationStream(Item currentItem, InputStream source, boolean verbose)
throws Exception {
ConfigurationService configurationService
= DSpaceServicesFactory.getInstance().getConfigurationService();
try {
boolean useTemporaryFile = configurationService.getBooleanProperty("pdffilter.largepdfs", false);
// get input stream from bitstream
// pass to filter, get string back
PDFTextStripper pts = new PDFTextStripper();
pts.setSortByPosition(true);
PDDocument pdfDoc = null;
Writer writer = null;
File tempTextFile = null;
ByteArrayOutputStream byteStream = null;
if (useTemporaryFile) {
tempTextFile = File.createTempFile("dspacepdfextract" + source.hashCode(), ".txt");
tempTextFile.deleteOnExit();
writer = new OutputStreamWriter(new FileOutputStream(tempTextFile));
} else {
byteStream = new ByteArrayOutputStream();
writer = new OutputStreamWriter(byteStream);
}
try {
pdfDoc = PDDocument.load(source);
pts.writeText(pdfDoc, writer);
} catch (InvalidPasswordException ex) {
log.error("PDF is encrypted. Cannot extract text (item: {})",
() -> currentItem.getHandle());
return null;
} finally {
try {
if (pdfDoc != null) {
pdfDoc.close();
}
} catch (Exception e) {
log.error("Error closing PDF file: " + e.getMessage(), e);
}
try {
writer.close();
} catch (Exception e) {
log.error("Error closing temporary extract file: " + e.getMessage(), e);
}
}
if (useTemporaryFile) {
return new FileInputStream(tempTextFile);
} else {
byte[] bytes = byteStream.toByteArray();
return new ByteArrayInputStream(bytes);
}
} catch (OutOfMemoryError oome) {
log.error("Error parsing PDF document " + oome.getMessage(), oome);
if (!configurationService.getBooleanProperty("pdffilter.skiponmemoryexception", false)) {
throw oome;
}
}
return null;
}
}

View File

@@ -1,72 +0,0 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.mediafilter;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.nio.charset.StandardCharsets;
import org.apache.poi.POITextExtractor;
import org.apache.poi.extractor.ExtractorFactory;
import org.apache.poi.openxml4j.exceptions.OpenXML4JException;
import org.apache.xmlbeans.XmlException;
import org.dspace.content.Item;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Extract flat text from Microsoft Word documents (.doc, .docx).
*/
public class PoiWordFilter
extends MediaFilter {
private static final Logger LOG = LoggerFactory.getLogger(PoiWordFilter.class);
@Override
public String getFilteredName(String oldFilename) {
return oldFilename + ".txt";
}
@Override
public String getBundleName() {
return "TEXT";
}
@Override
public String getFormatString() {
return "Text";
}
@Override
public String getDescription() {
return "Extracted text";
}
@Override
public InputStream getDestinationStream(Item currentItem, InputStream source, boolean verbose)
throws Exception {
String text;
try {
// get input stream from bitstream, pass to filter, get string back
POITextExtractor extractor = ExtractorFactory.createExtractor(source);
text = extractor.getText();
} catch (IOException | OpenXML4JException | XmlException e) {
System.err.format("Invalid File Format: %s%n", e.getMessage());
LOG.error("Unable to parse the bitstream: ", e);
throw e;
}
// if verbose flag is set, print out extracted text to STDOUT
if (verbose) {
System.out.println(text);
}
// return the extracted text as a stream.
return new ByteArrayInputStream(text.getBytes(StandardCharsets.UTF_8));
}
}

View File

@@ -1,113 +0,0 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.mediafilter;
import java.io.ByteArrayInputStream;
import java.io.InputStream;
import org.apache.logging.log4j.Logger;
import org.apache.poi.POITextExtractor;
import org.apache.poi.extractor.ExtractorFactory;
import org.apache.poi.hslf.extractor.PowerPointExtractor;
import org.apache.poi.xslf.extractor.XSLFPowerPointExtractor;
import org.dspace.content.Item;
/*
* TODO: Allow user to configure extraction of only text or only notes
*
*/
public class PowerPointFilter extends MediaFilter {
private static Logger log = org.apache.logging.log4j.LogManager.getLogger(PowerPointFilter.class);
@Override
public String getFilteredName(String oldFilename) {
return oldFilename + ".txt";
}
/**
* @return String bundle name
*/
@Override
public String getBundleName() {
return "TEXT";
}
/**
* @return String bitstream format
*
* TODO: Check that this is correct
*/
@Override
public String getFormatString() {
return "Text";
}
/**
* @return String description
*/
@Override
public String getDescription() {
return "Extracted text";
}
/**
* @param currentItem item
* @param source source input stream
* @param verbose verbose mode
* @return InputStream the resulting input stream
* @throws Exception if error
*/
@Override
public InputStream getDestinationStream(Item currentItem, InputStream source, boolean verbose)
throws Exception {
try {
String extractedText = null;
new ExtractorFactory();
POITextExtractor pptExtractor = ExtractorFactory
.createExtractor(source);
// PowerPoint XML files and legacy format PowerPoint files
// require different classes and APIs for text extraction
// If this is a PowerPoint XML file, extract accordingly
if (pptExtractor instanceof XSLFPowerPointExtractor) {
// The true method arguments indicate that text from
// the slides and the notes is desired
extractedText = ((XSLFPowerPointExtractor) pptExtractor)
.getText(true, true);
} else if (pptExtractor instanceof PowerPointExtractor) { // Legacy PowerPoint files
extractedText = ((PowerPointExtractor) pptExtractor).getText()
+ " " + ((PowerPointExtractor) pptExtractor).getNotes();
}
if (extractedText != null) {
// if verbose flag is set, print out extracted text
// to STDOUT
if (verbose) {
System.out.println(extractedText);
}
// generate an input stream with the extracted text
byte[] textBytes = extractedText.getBytes();
ByteArrayInputStream bais = new ByteArrayInputStream(textBytes);
return bais;
}
} catch (Exception e) {
log.error("Error filtering bitstream: " + e.getMessage(), e);
throw e;
}
return null;
}
}

View File

@@ -0,0 +1,183 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.mediafilter;
import java.io.ByteArrayInputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileWriter;
import java.io.IOException;
import java.io.InputStream;
import java.nio.charset.StandardCharsets;
import org.apache.commons.lang.StringUtils;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.apache.tika.Tika;
import org.apache.tika.exception.TikaException;
import org.apache.tika.metadata.Metadata;
import org.apache.tika.parser.AutoDetectParser;
import org.apache.tika.sax.BodyContentHandler;
import org.apache.tika.sax.ContentHandlerDecorator;
import org.dspace.content.Item;
import org.dspace.services.ConfigurationService;
import org.dspace.services.factory.DSpaceServicesFactory;
import org.xml.sax.SAXException;
/**
* Text Extraction media filter which uses Apache Tika to extract text from a large number of file formats (including
* all Microsoft formats, PDF, HTML, Text, etc). For a more complete list of file formats supported by Tika see the
* Tika documentation: https://tika.apache.org/2.3.0/formats.html
*/
public class TikaTextExtractionFilter
extends MediaFilter {
private final static Logger log = LogManager.getLogger();
@Override
public String getFilteredName(String oldFilename) {
return oldFilename + ".txt";
}
@Override
public String getBundleName() {
return "TEXT";
}
@Override
public String getFormatString() {
return "Text";
}
@Override
public String getDescription() {
return "Extracted text";
}
@Override
public InputStream getDestinationStream(Item currentItem, InputStream source, boolean verbose)
throws Exception {
ConfigurationService configurationService = DSpaceServicesFactory.getInstance().getConfigurationService();
boolean useTemporaryFile = configurationService.getBooleanProperty("textextractor.use-temp-file", false);
if (useTemporaryFile) {
// Extract text out of source file using a temp file, returning results as InputStream
return extractUsingTempFile(source, verbose);
}
// Not using temporary file. We'll use Tika's default in-memory parsing.
// Get maximum characters to extract. Default is 100,000 chars, which is also Tika's default setting.
String extractedText;
int maxChars = configurationService.getIntProperty("textextractor.max-chars", 100000);
try {
// Use Tika to extract text from input. Tika will automatically detect the file type.
Tika tika = new Tika();
tika.setMaxStringLength(maxChars); // Tell Tika the maximum number of characters to extract
extractedText = tika.parseToString(source);
} catch (IOException e) {
System.err.format("Unable to extract text from bitstream in Item %s%n", currentItem.getID().toString());
e.printStackTrace();
log.error("Unable to extract text from bitstream in Item {}", currentItem.getID().toString(), e);
throw e;
} catch (OutOfMemoryError oe) {
System.err.format("OutOfMemoryError occurred when extracting text from bitstream in Item %s. " +
"You may wish to enable 'textextractor.use-temp-file'.%n", currentItem.getID().toString());
oe.printStackTrace();
log.error("OutOfMemoryError occurred when extracting text from bitstream in Item {}. " +
"You may wish to enable 'textextractor.use-temp-file'.", currentItem.getID().toString(), oe);
throw oe;
}
if (StringUtils.isNotEmpty(extractedText)) {
// if verbose flag is set, print out extracted text to STDOUT
if (verbose) {
System.out.println("(Verbose mode) Extracted text:");
System.out.println(extractedText);
}
// return the extracted text as a UTF-8 stream.
return new ByteArrayInputStream(extractedText.getBytes(StandardCharsets.UTF_8));
}
return null;
}
/**
* Extracts the text out of a given source InputStream, using a temporary file. This decreases the amount of memory
* necessary for text extraction, but can be slower as it requires writing extracted text to a temporary file.
* @param source source InputStream
* @param verbose verbose mode enabled/disabled
* @return InputStream for temporary file containing extracted text
* @throws IOException
* @throws SAXException
* @throws TikaException
*/
private InputStream extractUsingTempFile(InputStream source, boolean verbose)
throws IOException, TikaException, SAXException {
File tempExtractedTextFile = File.createTempFile("dspacetextextract" + source.hashCode(), ".txt");
if (verbose) {
System.out.println("(Verbose mode) Extracted text was written to temporary file at " +
tempExtractedTextFile.getAbsolutePath());
} else {
tempExtractedTextFile.deleteOnExit();
}
// Open temp file for writing
try (FileWriter writer = new FileWriter(tempExtractedTextFile, StandardCharsets.UTF_8)) {
// Initialize a custom ContentHandlerDecorator which is a BodyContentHandler.
// This mimics the behavior of Tika().parseToString(), which only extracts text from the body of the file.
// This custom Handler writes any extracted text to the temp file.
ContentHandlerDecorator handler = new BodyContentHandler(new ContentHandlerDecorator() {
/**
* Write all extracted characters directly to the temp file.
*/
@Override
public void characters(char[] ch, int start, int length) throws SAXException {
try {
writer.append(new String(ch), start, length);
} catch (IOException e) {
String errorMsg = String.format("Could not append to temporary file at %s " +
"when performing text extraction",
tempExtractedTextFile.getAbsolutePath());
log.error(errorMsg, e);
throw new SAXException(errorMsg, e);
}
}
/**
* Write all ignorable whitespace directly to the temp file.
* This mimics the behaviour of Tika().parseToString() which extracts ignorableWhitespace characters
* (like blank lines, indentations, etc.), so that we get the same extracted text either way.
*/
@Override
public void ignorableWhitespace(char[] ch, int start, int length) throws SAXException {
try {
writer.append(new String(ch), start, length);
} catch (IOException e) {
String errorMsg = String.format("Could not append to temporary file at %s " +
"when performing text extraction",
tempExtractedTextFile.getAbsolutePath());
log.error(errorMsg, e);
throw new SAXException(errorMsg, e);
}
}
});
AutoDetectParser parser = new AutoDetectParser();
Metadata metadata = new Metadata();
// parse our source InputStream using the above custom handler
parser.parse(source, handler, metadata);
}
// At this point, all extracted text is written to our temp file. So, return a FileInputStream for that file
return new FileInputStream(tempExtractedTextFile);
}
}

View File

@@ -631,7 +631,7 @@ public class Packager {
//otherwise, just disseminate a single object to a single package file
dip.disseminate(context, dso, pkgParams, pkgFile);
if (pkgFile != null && pkgFile.exists()) {
if (pkgFile.exists()) {
System.out.println("\nCREATED package file: " + pkgFile.getCanonicalPath());
}
}

View File

@@ -0,0 +1,46 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.requestitem;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.List;
import org.dspace.content.Collection;
import org.dspace.content.Item;
import org.dspace.core.Context;
import org.dspace.eperson.EPerson;
import org.springframework.lang.NonNull;
/**
* Derive request recipients from groups of the Collection which owns an Item.
* The list will include all members of the administrators group. If the
* resulting list is empty, delegates to {@link RequestItemHelpdeskStrategy}.
*
* @author Mark H. Wood <mwood@iupui.edu>
*/
public class CollectionAdministratorsRequestItemStrategy
extends RequestItemHelpdeskStrategy {
@Override
@NonNull
public List<RequestItemAuthor> getRequestItemAuthor(Context context,
Item item)
throws SQLException {
List<RequestItemAuthor> recipients = new ArrayList<>();
Collection collection = item.getOwningCollection();
for (EPerson admin : collection.getAdministrators().getMembers()) {
recipients.add(new RequestItemAuthor(admin));
}
if (recipients.isEmpty()) {
return super.getRequestItemAuthor(context, item);
} else {
return recipients;
}
}
}

View File

@@ -0,0 +1,61 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.requestitem;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.List;
import org.dspace.content.Item;
import org.dspace.core.Context;
import org.springframework.lang.NonNull;
import org.springframework.util.Assert;
/**
* Assemble a list of recipients from the results of other strategies.
* The list of strategy classes is injected as the constructor argument
* {@code strategies}.
* If the strategy list is not configured, returns an empty List.
*
* @author Mark H. Wood <mwood@iupui.edu>
*/
public class CombiningRequestItemStrategy
implements RequestItemAuthorExtractor {
/** The strategies to combine. */
private final List<RequestItemAuthorExtractor> strategies;
/**
* Initialize a combination of strategies.
* @param strategies the author extraction strategies to combine.
*/
public CombiningRequestItemStrategy(@NonNull List<RequestItemAuthorExtractor> strategies) {
Assert.notNull(strategies, "Strategy list may not be null");
this.strategies = strategies;
}
/**
* Do not call.
* @throws IllegalArgumentException always
*/
private CombiningRequestItemStrategy() {
throw new IllegalArgumentException();
}
@Override
@NonNull
public List<RequestItemAuthor> getRequestItemAuthor(Context context, Item item)
throws SQLException {
List<RequestItemAuthor> recipients = new ArrayList<>();
for (RequestItemAuthorExtractor strategy : strategies) {
recipients.addAll(strategy.getRequestItemAuthor(context, item));
}
return recipients;
}
}

View File

@@ -27,7 +27,7 @@ import org.dspace.core.Context;
import org.dspace.core.ReloadableEntity;
/**
* Object representing an Item Request
* Object representing an Item Request.
*/
@Entity
@Table(name = "requestitem")
@@ -94,6 +94,9 @@ public class RequestItem implements ReloadableEntity<Integer> {
this.allfiles = allfiles;
}
/**
* @return {@code true} if all of the Item's files are requested.
*/
public boolean isAllfiles() {
return allfiles;
}
@@ -102,6 +105,9 @@ public class RequestItem implements ReloadableEntity<Integer> {
this.reqMessage = reqMessage;
}
/**
* @return a message from the requester.
*/
public String getReqMessage() {
return reqMessage;
}
@@ -110,6 +116,9 @@ public class RequestItem implements ReloadableEntity<Integer> {
this.reqName = reqName;
}
/**
* @return Human-readable name of the user requesting access.
*/
public String getReqName() {
return reqName;
}
@@ -118,6 +127,9 @@ public class RequestItem implements ReloadableEntity<Integer> {
this.reqEmail = reqEmail;
}
/**
* @return address of the user requesting access.
*/
public String getReqEmail() {
return reqEmail;
}
@@ -126,6 +138,9 @@ public class RequestItem implements ReloadableEntity<Integer> {
this.token = token;
}
/**
* @return a unique request identifier which can be emailed.
*/
public String getToken() {
return token;
}

View File

@@ -11,20 +11,31 @@ import org.dspace.eperson.EPerson;
/**
* Simple DTO to transfer data about the corresponding author for the Request
* Copy feature
* Copy feature.
*
* @author Andrea Bollini
*/
public class RequestItemAuthor {
private String fullName;
private String email;
private final String fullName;
private final String email;
/**
* Construct an author record from given data.
*
* @param fullName the author's full name.
* @param email the author's email address.
*/
public RequestItemAuthor(String fullName, String email) {
super();
this.fullName = fullName;
this.email = email;
}
/**
* Construct an author from an EPerson's metadata.
*
* @param ePerson the EPerson.
*/
public RequestItemAuthor(EPerson ePerson) {
super();
this.fullName = ePerson.getFullName();

View File

@@ -8,26 +8,28 @@
package org.dspace.app.requestitem;
import java.sql.SQLException;
import java.util.List;
import org.dspace.content.Item;
import org.dspace.core.Context;
import org.springframework.lang.NonNull;
/**
* Interface to abstract the strategy for select the author to contact for
* request copy
* Interface to abstract the strategy for selecting the author to contact for
* request copy.
*
* @author Andrea Bollini
*/
public interface RequestItemAuthorExtractor {
/**
* Retrieve the auhtor to contact for a request copy of the give item.
* Retrieve the author to contact for requesting a copy of the given item.
*
* @param context DSpace context object
* @param item item to request
* @return An object containing name an email address to send the request to
* or null if no valid email address was found.
* @return Names and email addresses to send the request to.
* @throws SQLException if database error
*/
public RequestItemAuthor getRequestItemAuthor(Context context, Item item) throws SQLException;
@NonNull
public List<RequestItemAuthor> getRequestItemAuthor(Context context, Item item)
throws SQLException;
}

View File

@@ -72,28 +72,48 @@ public class RequestItemEmailNotifier {
static public void sendRequest(Context context, RequestItem ri, String responseLink)
throws IOException, SQLException {
// Who is making this request?
RequestItemAuthor author = requestItemAuthorExtractor
List<RequestItemAuthor> authors = requestItemAuthorExtractor
.getRequestItemAuthor(context, ri.getItem());
String authorEmail = author.getEmail();
String authorName = author.getFullName();
// Build an email to the approver.
Email email = Email.getEmail(I18nUtil.getEmailFilename(context.getCurrentLocale(),
"request_item.author"));
email.addRecipient(authorEmail);
for (RequestItemAuthor author : authors) {
email.addRecipient(author.getEmail());
}
email.setReplyTo(ri.getReqEmail()); // Requester's address
email.addArgument(ri.getReqName()); // {0} Requester's name
email.addArgument(ri.getReqEmail()); // {1} Requester's address
email.addArgument(ri.isAllfiles() // {2} All bitstreams or just one?
? I18nUtil.getMessage("itemRequest.all") : ri.getBitstream().getName());
email.addArgument(handleService.getCanonicalForm(ri.getItem().getHandle()));
email.addArgument(handleService.getCanonicalForm(ri.getItem().getHandle())); // {3}
email.addArgument(ri.getItem().getName()); // {4} requested item's title
email.addArgument(ri.getReqMessage()); // {5} message from requester
email.addArgument(responseLink); // {6} Link back to DSpace for action
email.addArgument(authorName); // {7} corresponding author name
email.addArgument(authorEmail); // {8} corresponding author email
email.addArgument(configurationService.getProperty("dspace.name"));
email.addArgument(configurationService.getProperty("mail.helpdesk"));
StringBuilder names = new StringBuilder();
StringBuilder addresses = new StringBuilder();
for (RequestItemAuthor author : authors) {
if (names.length() > 0) {
names.append("; ");
addresses.append("; ");
}
names.append(author.getFullName());
addresses.append(author.getEmail());
}
email.addArgument(names.toString()); // {7} corresponding author name
email.addArgument(addresses.toString()); // {8} corresponding author email
email.addArgument(configurationService.getProperty("dspace.name")); // {9}
email.addArgument(configurationService.getProperty("mail.helpdesk")); // {10}
// Send the email.
try {
@@ -134,9 +154,9 @@ public class RequestItemEmailNotifier {
email.setContent("body", message);
email.setSubject(subject);
email.addRecipient(ri.getReqEmail());
if (ri.isAccept_request()) {
// Attach bitstreams.
try {
// Attach bitstreams.
try {
if (ri.isAccept_request()) {
if (ri.isAllfiles()) {
Item item = ri.getItem();
List<Bundle> bundles = item.getBundles("ORIGINAL");
@@ -159,11 +179,19 @@ public class RequestItemEmailNotifier {
bitstream.getFormat(context).getMIMEType());
}
email.send();
} catch (MessagingException | IOException | SQLException | AuthorizeException e) {
LOG.warn(LogHelper.getHeader(context,
"error_mailing_requestItem", e.getMessage()));
throw new IOException("Reply not sent: " + e.getMessage());
} else {
boolean sendRejectEmail = configurationService
.getBooleanProperty("request.item.reject.email", true);
// Not all sites want the "refusal" to be sent back to the requester via
// email. However, by default, the rejection email is sent back.
if (sendRejectEmail) {
email.send();
}
}
} catch (MessagingException | IOException | SQLException | AuthorizeException e) {
LOG.warn(LogHelper.getHeader(context,
"error_mailing_requestItem", e.getMessage()));
throw new IOException("Reply not sent: " + e.getMessage());
}
LOG.info(LogHelper.getHeader(context,
"sent_attach_requestItem", "token={}"), ri.getToken());

View File

@@ -8,6 +8,8 @@
package org.dspace.app.requestitem;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.List;
import org.apache.commons.lang3.StringUtils;
import org.dspace.content.Item;
@@ -16,11 +18,11 @@ import org.dspace.core.I18nUtil;
import org.dspace.eperson.EPerson;
import org.dspace.eperson.service.EPersonService;
import org.dspace.services.ConfigurationService;
import org.dspace.services.factory.DSpaceServicesFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.lang.NonNull;
/**
* RequestItem strategy to allow DSpace support team's helpdesk to receive requestItem request
* RequestItem strategy to allow DSpace support team's helpdesk to receive requestItem request.
* With this enabled, then the Item author/submitter doesn't receive the request, but the helpdesk instead does.
*
* Failover to the RequestItemSubmitterStrategy, which means the submitter would get the request if there is no
@@ -33,19 +35,24 @@ public class RequestItemHelpdeskStrategy extends RequestItemSubmitterStrategy {
@Autowired(required = true)
protected EPersonService ePersonService;
@Autowired(required = true)
private ConfigurationService configuration;
public RequestItemHelpdeskStrategy() {
}
@Override
public RequestItemAuthor getRequestItemAuthor(Context context, Item item) throws SQLException {
ConfigurationService configurationService
= DSpaceServicesFactory.getInstance().getConfigurationService();
boolean helpdeskOverridesSubmitter = configurationService
@NonNull
public List<RequestItemAuthor> getRequestItemAuthor(Context context, Item item)
throws SQLException {
boolean helpdeskOverridesSubmitter = configuration
.getBooleanProperty("request.item.helpdesk.override", false);
String helpDeskEmail = configurationService.getProperty("mail.helpdesk");
String helpDeskEmail = configuration.getProperty("mail.helpdesk");
if (helpdeskOverridesSubmitter && StringUtils.isNotBlank(helpDeskEmail)) {
return getHelpDeskPerson(context, helpDeskEmail);
List<RequestItemAuthor> authors = new ArrayList<>(1);
authors.add(getHelpDeskPerson(context, helpDeskEmail));
return authors;
} else {
//Fallback to default logic (author of Item) if helpdesk isn't fully enabled or setup
return super.getRequestItemAuthor(context, item);

View File

@@ -8,6 +8,8 @@
package org.dspace.app.requestitem;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import org.apache.commons.lang3.StringUtils;
@@ -16,12 +18,13 @@ import org.dspace.content.MetadataValue;
import org.dspace.content.service.ItemService;
import org.dspace.core.Context;
import org.dspace.core.I18nUtil;
import org.dspace.services.factory.DSpaceServicesFactory;
import org.dspace.services.ConfigurationService;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.lang.NonNull;
/**
* Try to look to an item metadata for the corresponding author name and email.
* Failover to the RequestItemSubmitterStrategy
* Failover to the RequestItemSubmitterStrategy.
*
* @author Andrea Bollini
*/
@@ -30,6 +33,9 @@ public class RequestItemMetadataStrategy extends RequestItemSubmitterStrategy {
protected String emailMetadata;
protected String fullNameMetadata;
@Autowired(required = true)
protected ConfigurationService configurationService;
@Autowired(required = true)
protected ItemService itemService;
@@ -37,59 +43,72 @@ public class RequestItemMetadataStrategy extends RequestItemSubmitterStrategy {
}
@Override
public RequestItemAuthor getRequestItemAuthor(Context context, Item item)
@NonNull
public List<RequestItemAuthor> getRequestItemAuthor(Context context, Item item)
throws SQLException {
RequestItemAuthor author = null;
List<RequestItemAuthor> authors;
if (emailMetadata != null) {
List<MetadataValue> vals = itemService.getMetadataByMetadataString(item, emailMetadata);
if (vals.size() > 0) {
String email = vals.iterator().next().getValue();
String fullname = null;
if (fullNameMetadata != null) {
List<MetadataValue> nameVals = itemService.getMetadataByMetadataString(item, fullNameMetadata);
if (nameVals.size() > 0) {
fullname = nameVals.iterator().next().getValue();
List<MetadataValue> nameVals;
if (null != fullNameMetadata) {
nameVals = itemService.getMetadataByMetadataString(item, fullNameMetadata);
} else {
nameVals = Collections.EMPTY_LIST;
}
boolean useNames = vals.size() == nameVals.size();
if (!vals.isEmpty()) {
authors = new ArrayList<>(vals.size());
for (int authorIndex = 0; authorIndex < vals.size(); authorIndex++) {
String email = vals.get(authorIndex).getValue();
String fullname = null;
if (useNames) {
fullname = nameVals.get(authorIndex).getValue();
}
if (StringUtils.isBlank(fullname)) {
fullname = I18nUtil.getMessage(
"org.dspace.app.requestitem.RequestItemMetadataStrategy.unnamed",
context);
}
RequestItemAuthor author = new RequestItemAuthor(
fullname, email);
authors.add(author);
}
if (StringUtils.isBlank(fullname)) {
fullname = I18nUtil
.getMessage(
"org.dspace.app.requestitem.RequestItemMetadataStrategy.unnamed",
context);
}
author = new RequestItemAuthor(fullname, email);
return author;
return authors;
} else {
return Collections.EMPTY_LIST;
}
} else {
// Uses the basic strategy to look for the original submitter
author = super.getRequestItemAuthor(context, item);
// Is the author or his email null, so get the help desk or admin name and email
if (null == author || null == author.getEmail()) {
String email = null;
String name = null;
authors = super.getRequestItemAuthor(context, item);
// Remove from the list authors that do not have email addresses.
for (RequestItemAuthor author : authors) {
if (null == author.getEmail()) {
authors.remove(author);
}
}
if (authors.isEmpty()) { // No author email addresses! Fall back
//First get help desk name and email
email = DSpaceServicesFactory.getInstance()
.getConfigurationService().getProperty("mail.helpdesk");
name = DSpaceServicesFactory.getInstance()
.getConfigurationService().getProperty("mail.helpdesk.name");
String email = configurationService.getProperty("mail.helpdesk");
String name = configurationService.getProperty("mail.helpdesk.name");
// If help desk mail is null get the mail and name of admin
if (email == null) {
email = DSpaceServicesFactory.getInstance()
.getConfigurationService().getProperty("mail.admin");
name = DSpaceServicesFactory.getInstance()
.getConfigurationService().getProperty("mail.admin.name");
email = configurationService.getProperty("mail.admin");
name = configurationService.getProperty("mail.admin.name");
}
author = new RequestItemAuthor(name, email);
authors.add(new RequestItemAuthor(name, email));
}
return authors;
}
return author;
}
public void setEmailMetadata(String emailMetadata) {
public void setEmailMetadata(@NonNull String emailMetadata) {
this.emailMetadata = emailMetadata;
}
public void setFullNameMetadata(String fullNameMetadata) {
public void setFullNameMetadata(@NonNull String fullNameMetadata) {
this.fullNameMetadata = fullNameMetadata;
}

View File

@@ -8,10 +8,13 @@
package org.dspace.app.requestitem;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.List;
import org.dspace.content.Item;
import org.dspace.core.Context;
import org.dspace.eperson.EPerson;
import org.springframework.lang.NonNull;
/**
* Basic strategy that looks to the original submitter.
@@ -24,21 +27,23 @@ public class RequestItemSubmitterStrategy implements RequestItemAuthorExtractor
}
/**
* Returns the submitter of an Item as RequestItemAuthor or null if the
* Submitter is deleted.
* Returns the submitter of an Item as RequestItemAuthor or an empty List if
* the Submitter is deleted.
*
* @return The submitter of the item or null if the submitter is deleted
* @return The submitter of the item or empty List if the submitter is deleted
* @throws SQLException if database error
*/
@Override
public RequestItemAuthor getRequestItemAuthor(Context context, Item item)
@NonNull
public List<RequestItemAuthor> getRequestItemAuthor(Context context, Item item)
throws SQLException {
EPerson submitter = item.getSubmitter();
RequestItemAuthor author = null;
List<RequestItemAuthor> authors = new ArrayList<>(1);
if (null != submitter) {
author = new RequestItemAuthor(
submitter.getFullName(), submitter.getEmail());
RequestItemAuthor author = new RequestItemAuthor(
submitter.getFullName(), submitter.getEmail());
authors.add(author);
}
return author;
return authors;
}
}

View File

@@ -31,6 +31,7 @@ import org.dspace.app.sherpa.v2.SHERPAResponse;
import org.dspace.app.sherpa.v2.SHERPAUtils;
import org.dspace.services.ConfigurationService;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.cache.annotation.Cacheable;
/**
* SHERPAService is responsible for making the HTTP call to the SHERPA v2 API
@@ -43,6 +44,7 @@ import org.springframework.beans.factory.annotation.Autowired;
* @author Kim Shepherd
*/
public class SHERPAService {
private CloseableHttpClient client = null;
private int maxNumberOfTries;
@@ -91,6 +93,7 @@ public class SHERPAService {
* @param query ISSN string to pass in an "issn equals" API query
* @return SHERPAResponse containing an error or journal policies
*/
@Cacheable(key = "#query", cacheNames = "sherpa.searchByJournalISSN")
public SHERPAResponse searchByJournalISSN(String query) {
return performRequest("publication", "issn", "equals", query, 0, 1);
}
@@ -413,4 +416,5 @@ public class SHERPAService {
public void setTimeout(int timeout) {
this.timeout = timeout;
}
}
}

View File

@@ -0,0 +1,71 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.sherpa.cache;
import java.util.Objects;
import java.util.Set;
import org.dspace.app.sherpa.submit.SHERPASubmitService;
import org.dspace.content.Item;
import org.dspace.core.Context;
import org.springframework.cache.CacheManager;
/**
* This service is responsible to deal with the SherpaService cache.
*
* @author Mykhaylo Boychuk (mykhaylo.boychuk at 4science.com)
*/
public class SherpaCacheEvictService {
// The cache that is managed by this service.
static final String CACHE_NAME = "sherpa.searchByJournalISSN";
private CacheManager cacheManager;
private SHERPASubmitService sherpaSubmitService;
/**
* Remove immediately from the cache all the response that are related to a specific item
* extracting the ISSNs from the item
*
* @param context The DSpace context
* @param item an Item
*/
public void evictCacheValues(Context context, Item item) {
Set<String> ISSNs = sherpaSubmitService.getISSNs(context, item);
for (String issn : ISSNs) {
Objects.requireNonNull(cacheManager.getCache(CACHE_NAME)).evictIfPresent(issn);
}
}
/**
* Invalidate immediately the Sherpa cache
*/
public void evictAllCacheValues() {
Objects.requireNonNull(cacheManager.getCache(CACHE_NAME)).invalidate();
}
/**
* Set the reference to the cacheManager
*
* @param cacheManager
*/
public void setCacheManager(CacheManager cacheManager) {
this.cacheManager = cacheManager;
}
/**
* Set the reference to the SherpaSubmitService
*
* @param sherpaSubmitService
*/
public void setSherpaSubmitService(SHERPASubmitService sherpaSubmitService) {
this.sherpaSubmitService = sherpaSubmitService;
}
}

View File

@@ -0,0 +1,34 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.sherpa.cache;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.ehcache.event.CacheEvent;
import org.ehcache.event.CacheEventListener;
/**
* This is a EHCache listner responsible for logging sherpa cache events. It is
* bound to the sherpa cache via the dspace/config/ehcache.xml file. We need a
* dedicated Logger for each cache as the CacheEvent doesn't include details
* about where the event occur
*
* @author Mykhaylo Boychuk (mykhaylo.boychuk at 4science.com)
*
*/
public class SherpaCacheLogger implements CacheEventListener<Object, Object> {
private static final Logger log = LogManager.getLogger(SherpaCacheLogger.class);
@Override
public void onEvent(CacheEvent<?, ?> cacheEvent) {
log.debug("Sherpa Cache Event Type: {} | Key: {} ",
cacheEvent.getType(), cacheEvent.getKey());
}
}

View File

@@ -9,7 +9,6 @@ package org.dspace.app.sherpa.submit;
import java.util.Iterator;
import java.util.LinkedHashSet;
import java.util.LinkedList;
import java.util.List;
import java.util.Set;
@@ -63,19 +62,19 @@ public class SHERPASubmitService {
* issnItemExtractor(s) in the SHERPA spring configuration.
* The ISSNs are not validated with a regular expression or other rules - any values
* extracted will be included in API queries.
* Return the first not empty response from Sherpa
* @see "dspace-dspace-addon-sherpa-configuration-services.xml"
* @param context DSpace context
* @param item DSpace item containing ISSNs to be checked
* @return SHERPA v2 API response (policy data)
*/
public List<SHERPAResponse> searchRelatedJournals(Context context, Item item) {
public SHERPAResponse searchRelatedJournals(Context context, Item item) {
Set<String> issns = getISSNs(context, item);
if (issns == null || issns.size() == 0) {
return null;
} else {
// SHERPA v2 API no longer supports "OR'd" ISSN search, perform individual searches instead
Iterator<String> issnIterator = issns.iterator();
List<SHERPAResponse> responses = new LinkedList<>();
while (issnIterator.hasNext()) {
String issn = issnIterator.next();
SHERPAResponse response = sherpaService.searchByJournalISSN(issn);
@@ -83,14 +82,13 @@ public class SHERPASubmitService {
// Continue with loop
log.warn("Failed to look up SHERPA ROMeO result for ISSN: " + issn
+ ": " + response.getMessage());
return response;
} else if (!response.getJournals().isEmpty()) {
// return this response, if it is not empty
return response;
}
// Store this response, even if it has an error (useful for UI reporting)
responses.add(response);
}
if (responses.isEmpty()) {
responses.add(new SHERPAResponse("SHERPA ROMeO lookup failed"));
}
return responses;
return new SHERPAResponse();
}
}

View File

@@ -0,0 +1,45 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.sherpa.v2;
import java.io.Serializable;
/**
* Model class for the Embargo of SHERPAv2 API (JSON)
*
* @author Mykhaylo Boychuk (mykhaylo.boychuk@4science.com)
*/
public class SHERPAEmbargo implements Serializable {
private static final long serialVersionUID = 6140668058547523656L;
private int amount;
private String units;
public SHERPAEmbargo(int amount, String units) {
this.amount = amount;
this.units = units;
}
public int getAmount() {
return amount;
}
public void setAmount(int amount) {
this.amount = amount;
}
public String getUnits() {
return units;
}
public void setUnits(String units) {
this.units = units;
}
}

View File

@@ -7,6 +7,7 @@
*/
package org.dspace.app.sherpa.v2;
import java.io.Serializable;
import java.util.List;
/**
@@ -21,7 +22,7 @@ import java.util.List;
*
* @author Kim Shepherd
*/
public class SHERPAJournal {
public class SHERPAJournal implements Serializable {
private List<String> titles;
private String url;

View File

@@ -7,6 +7,7 @@
*/
package org.dspace.app.sherpa.v2;
import java.io.Serializable;
import java.util.List;
/**
@@ -28,7 +29,9 @@ import java.util.List;
*
* @see SHERPAPublisherPolicy
*/
public class SHERPAPermittedVersion {
public class SHERPAPermittedVersion implements Serializable {
private static final long serialVersionUID = 4992181606327727442L;
// Version (submitted, accepted, published)
private String articleVersion;
@@ -47,11 +50,6 @@ public class SHERPAPermittedVersion {
// Embargo
private SHERPAEmbargo embargo;
protected static class SHERPAEmbargo {
String units;
int amount;
}
public String getArticleVersion() {
return articleVersion;
}

View File

@@ -7,6 +7,8 @@
*/
package org.dspace.app.sherpa.v2;
import java.io.Serializable;
/**
* Plain java representation of a SHERPA Publisher object, based on SHERPA API v2 responses.
*
@@ -18,7 +20,7 @@ package org.dspace.app.sherpa.v2;
* @see SHERPAJournal
* @see SHERPAPublisherResponse
*/
public class SHERPAPublisher {
public class SHERPAPublisher implements Serializable {
private String name = null;
private String relationshipType;
private String country;

View File

@@ -7,6 +7,7 @@
*/
package org.dspace.app.sherpa.v2;
import java.io.Serializable;
import java.util.List;
import java.util.Map;
@@ -22,7 +23,7 @@ import java.util.Map;
* @see SHERPAJournal
* @see SHERPAPermittedVersion
*/
public class SHERPAPublisherPolicy {
public class SHERPAPublisherPolicy implements Serializable {
private int id;
private boolean openAccessPermitted;

View File

@@ -10,12 +10,15 @@ package org.dspace.app.sherpa.v2;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.Serializable;
import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import java.util.Map;
import java.util.TreeMap;
import com.fasterxml.jackson.annotation.JsonIgnore;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.json.JSONArray;
@@ -33,7 +36,10 @@ import org.json.JSONTokener;
* @author Kim Shepherd
*
*/
public class SHERPAResponse {
public class SHERPAResponse implements Serializable {
private static final long serialVersionUID = 2732963970169240597L;
// Is this response to be treated as an error?
private boolean error;
@@ -52,6 +58,9 @@ public class SHERPAResponse {
// SHERPA URI (the human page version of this API response)
private String uri;
@JsonIgnore
private Date retrievalTime = new Date();
// Format enum - currently only JSON is supported
public enum SHERPAFormat {
JSON, XML
@@ -71,6 +80,11 @@ public class SHERPAResponse {
}
}
/**
* Create an empty SHERPAResponse representation
*/
public SHERPAResponse() {}
/**
* Parse the SHERPA v2 API JSON and construct Romeo policy data for display
* This method does not return a value, but rather populates the metadata and journals objects
@@ -479,6 +493,12 @@ public class SHERPAResponse {
}
permittedVersion.setLicenses(sherpaLicenses);
if (permitted.has("embargo")) {
JSONObject embargo = permitted.getJSONObject("embargo");
SHERPAEmbargo SHERPAEmbargo = new SHERPAEmbargo(embargo.getInt("amount"), embargo.getString("units"));
permittedVersion.setEmbargo(SHERPAEmbargo);
}
return permittedVersion;
}
@@ -542,4 +562,8 @@ public class SHERPAResponse {
public SHERPASystemMetadata getMetadata() {
return metadata;
}
public Date getRetrievalTime() {
return retrievalTime;
}
}

View File

@@ -7,6 +7,8 @@
*/
package org.dspace.app.sherpa.v2;
import java.io.Serializable;
/**
* Plain java representation of a SHERPA System Metadata object, based on SHERPA API v2 responses.
*
@@ -18,7 +20,7 @@ package org.dspace.app.sherpa.v2;
*
* @author Kim Shepherd
*/
public class SHERPASystemMetadata {
public class SHERPASystemMetadata implements Serializable {
private int id;
private String uri;

View File

@@ -0,0 +1,175 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.solrdatabaseresync;
import static org.dspace.discovery.indexobject.ItemIndexFactoryImpl.STATUS_FIELD;
import static org.dspace.discovery.indexobject.ItemIndexFactoryImpl.STATUS_FIELD_PREDB;
import java.io.IOException;
import java.sql.SQLException;
import java.util.Calendar;
import java.util.HashMap;
import java.util.Map;
import java.util.Optional;
import org.apache.commons.cli.ParseException;
import org.apache.logging.log4j.Logger;
import org.apache.solr.client.solrj.SolrQuery;
import org.apache.solr.client.solrj.SolrServerException;
import org.apache.solr.client.solrj.response.QueryResponse;
import org.apache.solr.common.SolrDocument;
import org.dspace.core.Context;
import org.dspace.discovery.IndexableObject;
import org.dspace.discovery.IndexingService;
import org.dspace.discovery.SearchServiceException;
import org.dspace.discovery.SearchUtils;
import org.dspace.discovery.SolrSearchCore;
import org.dspace.discovery.indexobject.IndexableItem;
import org.dspace.discovery.indexobject.factory.IndexObjectFactoryFactory;
import org.dspace.scripts.DSpaceRunnable;
import org.dspace.services.ConfigurationService;
import org.dspace.services.factory.DSpaceServicesFactory;
import org.dspace.util.SolrUtils;
import org.dspace.utils.DSpace;
/**
* {@link DSpaceRunnable} implementation to update solr items with "predb" status to either:
* - Delete them from solr if they're not present in the database
* - Remove their status if they're present in the database
*/
public class SolrDatabaseResyncCli extends DSpaceRunnable<SolrDatabaseResyncCliScriptConfiguration> {
/* Log4j logger */
private static final Logger log = org.apache.logging.log4j.LogManager.getLogger(SolrDatabaseResyncCli.class);
public static final String TIME_UNTIL_REINDEX_PROPERTY = "solr-database-resync.time-until-reindex";
private IndexingService indexingService;
private SolrSearchCore solrSearchCore;
private IndexObjectFactoryFactory indexObjectServiceFactory;
private ConfigurationService configurationService;
private int timeUntilReindex = 0;
private String maxTime;
@Override
public SolrDatabaseResyncCliScriptConfiguration getScriptConfiguration() {
return new DSpace().getServiceManager()
.getServiceByName("solr-database-resync", SolrDatabaseResyncCliScriptConfiguration.class);
}
public static void runScheduled() throws Exception {
SolrDatabaseResyncCli script = new SolrDatabaseResyncCli();
script.setup();
script.internalRun();
}
@Override
public void setup() throws ParseException {
indexingService = DSpaceServicesFactory.getInstance().getServiceManager()
.getServiceByName(IndexingService.class.getName(), IndexingService.class);
solrSearchCore = DSpaceServicesFactory.getInstance().getServiceManager()
.getServicesByType(SolrSearchCore.class).get(0);
indexObjectServiceFactory = IndexObjectFactoryFactory.getInstance();
configurationService = DSpaceServicesFactory.getInstance().getConfigurationService();
}
@Override
public void internalRun() throws Exception {
logInfoAndOut("Starting Item resync of Solr and Database...");
timeUntilReindex = getTimeUntilReindex();
maxTime = getMaxTime();
Context context = new Context();
try {
context.turnOffAuthorisationSystem();
performStatusUpdate(context);
} finally {
context.restoreAuthSystemState();
context.complete();
}
}
private void performStatusUpdate(Context context) throws SearchServiceException, SolrServerException, IOException {
SolrQuery solrQuery = new SolrQuery();
solrQuery.setQuery(STATUS_FIELD + ":" + STATUS_FIELD_PREDB);
solrQuery.addFilterQuery(SearchUtils.RESOURCE_TYPE_FIELD + ":" + IndexableItem.TYPE);
String dateRangeFilter = SearchUtils.LAST_INDEXED_FIELD + ":[* TO " + maxTime + "]";
logDebugAndOut("Date range filter used; " + dateRangeFilter);
solrQuery.addFilterQuery(dateRangeFilter);
solrQuery.addField(SearchUtils.RESOURCE_ID_FIELD);
solrQuery.addField(SearchUtils.RESOURCE_UNIQUE_ID);
QueryResponse response = solrSearchCore.getSolr().query(solrQuery, solrSearchCore.REQUEST_METHOD);
if (response != null) {
logInfoAndOut(response.getResults().size() + " items found to process");
for (SolrDocument doc : response.getResults()) {
String uuid = (String) doc.getFirstValue(SearchUtils.RESOURCE_ID_FIELD);
String uniqueId = (String) doc.getFirstValue(SearchUtils.RESOURCE_UNIQUE_ID);
logDebugAndOut("Processing item with UUID: " + uuid);
Optional<IndexableObject> indexableObject = Optional.empty();
try {
indexableObject = indexObjectServiceFactory
.getIndexableObjectFactory(uniqueId).findIndexableObject(context, uuid);
} catch (SQLException e) {
log.warn("An exception occurred when attempting to retrieve item with UUID \"" + uuid +
"\" from the database, removing related solr document", e);
}
try {
if (indexableObject.isPresent()) {
logDebugAndOut("Item exists in DB, updating solr document");
updateItem(context, indexableObject.get());
} else {
logDebugAndOut("Item doesn't exist in DB, removing solr document");
removeItem(context, uniqueId);
}
} catch (SQLException | IOException e) {
log.error(e.getMessage(), e);
}
}
}
indexingService.commit();
}
private void updateItem(Context context, IndexableObject indexableObject) throws SolrServerException, IOException {
Map<String,Object> fieldModifier = new HashMap<>(1);
fieldModifier.put("remove", STATUS_FIELD_PREDB);
indexingService.atomicUpdate(context, indexableObject.getUniqueIndexID(), STATUS_FIELD, fieldModifier);
}
private void removeItem(Context context, String uniqueId) throws IOException, SQLException {
indexingService.unIndexContent(context, uniqueId);
}
private String getMaxTime() {
Calendar cal = Calendar.getInstance();
if (timeUntilReindex > 0) {
cal.add(Calendar.MILLISECOND, -timeUntilReindex);
}
return SolrUtils.getDateFormatter().format(cal.getTime());
}
private int getTimeUntilReindex() {
return configurationService.getIntProperty(TIME_UNTIL_REINDEX_PROPERTY, 0);
}
private void logInfoAndOut(String message) {
log.info(message);
System.out.println(message);
}
private void logDebugAndOut(String message) {
log.debug(message);
System.out.println(message);
}
}

View File

@@ -0,0 +1,42 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.solrdatabaseresync;
import org.apache.commons.cli.Options;
import org.dspace.core.Context;
import org.dspace.scripts.configuration.ScriptConfiguration;
/**
* The {@link ScriptConfiguration} for the {@link SolrDatabaseResyncCli} script.
*/
public class SolrDatabaseResyncCliScriptConfiguration extends ScriptConfiguration<SolrDatabaseResyncCli> {
private Class<SolrDatabaseResyncCli> dspaceRunnableClass;
@Override
public Class<SolrDatabaseResyncCli> getDspaceRunnableClass() {
return dspaceRunnableClass;
}
@Override
public void setDspaceRunnableClass(Class<SolrDatabaseResyncCli> dspaceRunnableClass) {
this.dspaceRunnableClass = dspaceRunnableClass;
}
@Override
public boolean isAllowedToExecute(Context context) {
return true;
}
@Override
public Options getOptions() {
if (options == null) {
options = new Options();
}
return options;
}
}

View File

@@ -29,6 +29,10 @@ import java.util.TimeZone;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.DefaultParser;
import org.apache.commons.cli.Option;
import org.apache.commons.cli.Options;
import org.apache.commons.lang3.StringUtils;
import org.dspace.core.Context;
import org.dspace.core.LogHelper;
@@ -44,6 +48,7 @@ import org.dspace.services.factory.DSpaceServicesFactory;
* files. Most input can be configured; use the -help flag for a full list
* of usage information.
*
* <p>
* The output of this file is plain text and forms an "aggregation" file which
* can then be used for display purposes using the related ReportGenerator
* class.
@@ -167,7 +172,7 @@ public class LogAnalyser {
/**
* the average number of views per item
*/
private static int views = 0;
private static long views = 0;
///////////////////////
// regular expressions
@@ -236,12 +241,12 @@ public class LogAnalyser {
/**
* pattern to match commented out lines from the config file
*/
private static final Pattern comment = Pattern.compile("^#");
private static final Pattern COMMENT = Pattern.compile("^#");
/**
* pattern to match genuine lines from the config file
*/
private static final Pattern real = Pattern.compile("^(.+)=(.+)");
private static final Pattern REAL = Pattern.compile("^(.+)=(.+)");
/**
* pattern to match all search types
@@ -337,44 +342,73 @@ public class LogAnalyser {
Date myEndDate = null;
boolean myLookUp = false;
// read in our command line options
for (int i = 0; i < argv.length; i++) {
if (argv[i].equals("-log")) {
myLogDir = argv[i + 1];
}
// Define command line options.
Options options = new Options();
Option option;
if (argv[i].equals("-file")) {
myFileTemplate = argv[i + 1];
}
option = Option.builder().longOpt("log").hasArg().build();
options.addOption(option);
if (argv[i].equals("-cfg")) {
myConfigFile = argv[i + 1];
}
option = Option.builder().longOpt("file").hasArg().build();
options.addOption(option);
if (argv[i].equals("-out")) {
myOutFile = argv[i + 1];
}
option = Option.builder().longOpt("cfg").hasArg().build();
options.addOption(option);
if (argv[i].equals("-help")) {
LogAnalyser.usage();
System.exit(0);
}
option = Option.builder().longOpt("out").hasArg().build();
options.addOption(option);
if (argv[i].equals("-start")) {
myStartDate = parseDate(argv[i + 1]);
}
option = Option.builder().longOpt("help").build();
options.addOption(option);
if (argv[i].equals("-end")) {
myEndDate = parseDate(argv[i + 1]);
}
option = Option.builder().longOpt("start").hasArg().build();
options.addOption(option);
if (argv[i].equals("-lookup")) {
myLookUp = true;
}
option = Option.builder().longOpt("end").hasArg().build();
options.addOption(option);
option = Option.builder().longOpt("lookup").build();
options.addOption(option);
// Parse the command.
DefaultParser cmdParser = new DefaultParser();
CommandLine cmd = cmdParser.parse(options, argv);
// Analyze the command.
if (cmd.hasOption("help")) {
LogAnalyser.usage();
System.exit(0);
}
if (cmd.hasOption("log")) {
myLogDir = cmd.getOptionValue("log");
}
if (cmd.hasOption("file")) {
myFileTemplate = cmd.getOptionValue("file");
}
if (cmd.hasOption("cfg")) {
myConfigFile = cmd.getOptionValue("cfg");
}
if (cmd.hasOption("out")) {
myOutFile = cmd.getOptionValue("out");
}
if (cmd.hasOption("start")) {
myStartDate = parseDate(cmd.getOptionValue("start"));
}
if (cmd.hasOption("end")) {
myEndDate = parseDate(cmd.getOptionValue("end"));
}
myLookUp = cmd.hasOption("lookup");
// now call the method which actually processes the logs
processLogs(context, myLogDir, myFileTemplate, myConfigFile, myOutFile, myStartDate, myEndDate, myLookUp);
processLogs(context, myLogDir, myFileTemplate, myConfigFile, myOutFile,
myStartDate, myEndDate, myLookUp);
}
/**
@@ -406,18 +440,18 @@ public class LogAnalyser {
startTime = new GregorianCalendar();
//instantiate aggregators
actionAggregator = new HashMap<String, Integer>();
searchAggregator = new HashMap<String, Integer>();
userAggregator = new HashMap<String, Integer>();
itemAggregator = new HashMap<String, Integer>();
archiveStats = new HashMap<String, Integer>();
actionAggregator = new HashMap<>();
searchAggregator = new HashMap<>();
userAggregator = new HashMap<>();
itemAggregator = new HashMap<>();
archiveStats = new HashMap<>();
//instantiate lists
generalSummary = new ArrayList<String>();
excludeWords = new ArrayList<String>();
excludeTypes = new ArrayList<String>();
excludeChars = new ArrayList<String>();
itemTypes = new ArrayList<String>();
generalSummary = new ArrayList<>();
excludeWords = new ArrayList<>();
excludeTypes = new ArrayList<>();
excludeChars = new ArrayList<>();
itemTypes = new ArrayList<>();
// set the parameters for this analysis
setParameters(myLogDir, myFileTemplate, myConfigFile, myOutFile, myStartDate, myEndDate, myLookUp);
@@ -529,10 +563,11 @@ public class LogAnalyser {
// for each search word add to the aggregator or
// increment the aggregator's counter
for (int j = 0; j < words.length; j++) {
for (String word : words) {
// FIXME: perhaps aggregators ought to be objects
// themselves
searchAggregator.put(words[j], increment(searchAggregator, words[j]));
searchAggregator.put(word,
increment(searchAggregator, word));
}
}
@@ -591,13 +626,13 @@ public class LogAnalyser {
}
// do the average views analysis
if ((archiveStats.get("All Items")).intValue() != 0) {
if ((archiveStats.get("All Items")) != 0) {
// FIXME: this is dependent on their being a query on the db, which
// there might not always be if it becomes configurable
Double avg = Math.ceil(
double avg = Math.ceil(
(actionAggregator.get("view_item")).doubleValue() /
(archiveStats.get("All Items")).doubleValue());
views = avg.intValue();
views = Math.round(avg);
}
// finally, write the output
@@ -672,55 +707,55 @@ public class LogAnalyser {
Iterator<String> keys = null;
// output the number of lines parsed
summary.append("log_lines=" + Integer.toString(lineCount) + "\n");
summary.append("log_lines=").append(Integer.toString(lineCount)).append("\n");
// output the number of warnings encountered
summary.append("warnings=" + Integer.toString(warnCount) + "\n");
summary.append("exceptions=" + Integer.toString(excCount) + "\n");
summary.append("warnings=").append(Integer.toString(warnCount)).append("\n");
summary.append("exceptions=").append(Integer.toString(excCount)).append("\n");
// set the general summary config up in the aggregator file
for (int i = 0; i < generalSummary.size(); i++) {
summary.append("general_summary=" + generalSummary.get(i) + "\n");
summary.append("general_summary=").append(generalSummary.get(i)).append("\n");
}
// output the host name
summary.append("server_name=" + hostName + "\n");
summary.append("server_name=").append(hostName).append("\n");
// output the service name
summary.append("service_name=" + name + "\n");
summary.append("service_name=").append(name).append("\n");
// output the date information if necessary
SimpleDateFormat sdf = new SimpleDateFormat("dd'/'MM'/'yyyy");
if (startDate != null) {
summary.append("start_date=" + sdf.format(startDate) + "\n");
summary.append("start_date=").append(sdf.format(startDate)).append("\n");
} else if (logStartDate != null) {
summary.append("start_date=" + sdf.format(logStartDate) + "\n");
summary.append("start_date=").append(sdf.format(logStartDate)).append("\n");
}
if (endDate != null) {
summary.append("end_date=" + sdf.format(endDate) + "\n");
summary.append("end_date=").append(sdf.format(endDate)).append("\n");
} else if (logEndDate != null) {
summary.append("end_date=" + sdf.format(logEndDate) + "\n");
summary.append("end_date=").append(sdf.format(logEndDate)).append("\n");
}
// write out the archive stats
keys = archiveStats.keySet().iterator();
while (keys.hasNext()) {
String key = keys.next();
summary.append("archive." + key + "=" + archiveStats.get(key) + "\n");
summary.append("archive.").append(key).append("=").append(archiveStats.get(key)).append("\n");
}
// write out the action aggregation results
keys = actionAggregator.keySet().iterator();
while (keys.hasNext()) {
String key = keys.next();
summary.append("action." + key + "=" + actionAggregator.get(key) + "\n");
summary.append("action.").append(key).append("=").append(actionAggregator.get(key)).append("\n");
}
// depending on the config settings for reporting on emails output the
// login information
summary.append("user_email=" + userEmail + "\n");
summary.append("user_email=").append(userEmail).append("\n");
int address = 1;
keys = userAggregator.keySet().iterator();
@@ -731,9 +766,10 @@ public class LogAnalyser {
String key = keys.next();
summary.append("user.");
if (userEmail.equals("on")) {
summary.append(key + "=" + userAggregator.get(key) + "\n");
summary.append(key).append("=").append(userAggregator.get(key)).append("\n");
} else if (userEmail.equals("alias")) {
summary.append("Address " + Integer.toString(address++) + "=" + userAggregator.get(key) + "\n");
summary.append("Address ").append(Integer.toString(address++))
.append("=").append(userAggregator.get(key)).append("\n");
}
}
@@ -742,12 +778,13 @@ public class LogAnalyser {
// the listing there are
// output the search word information
summary.append("search_floor=" + searchFloor + "\n");
summary.append("search_floor=").append(searchFloor).append("\n");
keys = searchAggregator.keySet().iterator();
while (keys.hasNext()) {
String key = keys.next();
if ((searchAggregator.get(key)).intValue() >= searchFloor) {
summary.append("search." + key + "=" + searchAggregator.get(key) + "\n");
if ((searchAggregator.get(key)) >= searchFloor) {
summary.append("search.").append(key).append("=")
.append(searchAggregator.get(key)).append("\n");
}
}
@@ -759,35 +796,35 @@ public class LogAnalyser {
// be the same thing.
// item viewing information
summary.append("item_floor=" + itemFloor + "\n");
summary.append("host_url=" + url + "\n");
summary.append("item_lookup=" + itemLookup + "\n");
summary.append("item_floor=").append(itemFloor).append("\n");
summary.append("host_url=").append(url).append("\n");
summary.append("item_lookup=").append(itemLookup).append("\n");
// write out the item access information
keys = itemAggregator.keySet().iterator();
while (keys.hasNext()) {
String key = keys.next();
if ((itemAggregator.get(key)).intValue() >= itemFloor) {
summary.append("item." + key + "=" + itemAggregator.get(key) + "\n");
if ((itemAggregator.get(key)) >= itemFloor) {
summary.append("item.").append(key).append("=")
.append(itemAggregator.get(key)).append("\n");
}
}
// output the average views per item
if (views > 0) {
summary.append("avg_item_views=" + views + "\n");
summary.append("avg_item_views=").append(views).append("\n");
}
// insert the analysis processing time information
Calendar endTime = new GregorianCalendar();
long timeInMillis = (endTime.getTimeInMillis() - startTime.getTimeInMillis());
summary.append("analysis_process_time=" + Long.toString(timeInMillis / 1000) + "\n");
summary.append("analysis_process_time=")
.append(Long.toString(timeInMillis / 1000)).append("\n");
// finally write the string into the output file
try {
BufferedWriter out = new BufferedWriter(new FileWriter(outFile));
try (BufferedWriter out = new BufferedWriter(new FileWriter(outFile));) {
out.write(summary.toString());
out.flush();
out.close();
} catch (IOException e) {
System.out.println("Unable to write to output file " + outFile);
System.exit(0);
@@ -891,11 +928,11 @@ public class LogAnalyser {
if (i > 0) {
wordRXString.append("|");
}
wordRXString.append(" " + excludeWords.get(i) + " ");
wordRXString.append(" ").append(excludeWords.get(i)).append(" ");
wordRXString.append("|");
wordRXString.append("^" + excludeWords.get(i) + " ");
wordRXString.append("^").append(excludeWords.get(i)).append(" ");
wordRXString.append("|");
wordRXString.append(" " + excludeWords.get(i) + "$");
wordRXString.append(" ").append(excludeWords.get(i)).append("$");
}
wordRXString.append(")");
wordRX = Pattern.compile(wordRXString.toString());
@@ -956,8 +993,8 @@ public class LogAnalyser {
// read in the config file and set up our instance variables
while ((record = br.readLine()) != null) {
// check to see what kind of line we have
Matcher matchComment = comment.matcher(record);
Matcher matchReal = real.matcher(record);
Matcher matchComment = COMMENT.matcher(record);
Matcher matchReal = REAL.matcher(record);
// if the line is not a comment and is real, read it in
if (!matchComment.matches() && matchReal.matches()) {
@@ -968,7 +1005,7 @@ public class LogAnalyser {
// read the config values into our instance variables (see
// documentation for more info on config params)
if (key.equals("general.summary")) {
actionAggregator.put(value, Integer.valueOf(0));
actionAggregator.put(value, 0);
generalSummary.add(value);
}
@@ -1022,9 +1059,9 @@ public class LogAnalyser {
Integer newValue = null;
if (map.containsKey(key)) {
// FIXME: this seems like a ridiculous way to add Integers
newValue = Integer.valueOf((map.get(key)).intValue() + 1);
newValue = (map.get(key)) + 1;
} else {
newValue = Integer.valueOf(1);
newValue = 1;
}
return newValue;
}

View File

@@ -27,6 +27,10 @@ import java.util.StringTokenizer;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.DefaultParser;
import org.apache.commons.cli.Option;
import org.apache.commons.cli.Options;
import org.dspace.content.Item;
import org.dspace.content.MetadataSchemaEnum;
import org.dspace.content.MetadataValue;
@@ -162,7 +166,7 @@ public class ReportGenerator {
/**
* pattern that matches an unqualified aggregator property
*/
private static final Pattern real = Pattern.compile("^(.+)=(.+)");
private static final Pattern REAL = Pattern.compile("^(.+)=(.+)");
//////////////////////////
// Miscellaneous variables
@@ -221,28 +225,46 @@ public class ReportGenerator {
String myOutput = null;
String myMap = null;
// read in our command line options
for (int i = 0; i < argv.length; i++) {
if (argv[i].equals("-format")) {
myFormat = argv[i + 1].toLowerCase();
}
Options options = new Options();
Option option;
if (argv[i].equals("-in")) {
myInput = argv[i + 1];
}
option = Option.builder().longOpt("format").hasArg().build();
options.addOption(option);
if (argv[i].equals("-out")) {
myOutput = argv[i + 1];
}
option = Option.builder().longOpt("in").hasArg().build();
options.addOption(option);
if (argv[i].equals("-map")) {
myMap = argv[i + 1];
}
option = Option.builder().longOpt("out").hasArg().build();
options.addOption(option);
if (argv[i].equals("-help")) {
usage();
System.exit(0);
}
option = Option.builder().longOpt("map").hasArg().build();
options.addOption(option);
option = Option.builder().longOpt("help").build();
options.addOption(option);
DefaultParser parser = new DefaultParser();
CommandLine cmd = parser.parse(options, argv);
if (cmd.hasOption("help")) {
usage();
System.exit(0);
}
if (cmd.hasOption("format")) {
myFormat = cmd.getOptionValue("format");
}
if (cmd.hasOption("in")) {
myInput = cmd.getOptionValue("in");
}
if (cmd.hasOption("out")) {
myOutput = cmd.getOptionValue("out");
}
if (cmd.hasOption("map")) {
myMap = cmd.getOptionValue("map");
}
processReport(context, myFormat, myInput, myOutput, myMap);
@@ -576,7 +598,7 @@ public class ReportGenerator {
// loop through the map file and read in the values
while ((record = br.readLine()) != null) {
Matcher matchReal = real.matcher(record);
Matcher matchReal = REAL.matcher(record);
// if the line is real then read it in
if (matchReal.matches()) {
@@ -650,7 +672,7 @@ public class ReportGenerator {
// loop through the aggregator file and read in the values
while ((record = br.readLine()) != null) {
// match real lines
Matcher matchReal = real.matcher(record);
Matcher matchReal = REAL.matcher(record);
// pre-prepare our input strings
String section = null;

View File

@@ -324,11 +324,7 @@ public class StatisticsLoader {
ConfigurationService configurationService
= DSpaceServicesFactory.getInstance().getConfigurationService();
File reportDir = new File(configurationService.getProperty("log.report.dir"));
if (reportDir != null) {
return reportDir.listFiles(new AnalysisAndReportFilter());
}
return null;
return reportDir.listFiles(new AnalysisAndReportFilter());
}
/**

View File

@@ -1,58 +0,0 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.util;
import net.sf.ehcache.Cache;
import net.sf.ehcache.CacheManager;
import org.dspace.core.Context;
import org.dspace.servicemanager.DSpaceKernelImpl;
import org.dspace.servicemanager.DSpaceKernelInit;
import org.dspace.services.CachingService;
/**
* List all EhCache CacheManager and Cache instances.
*
* <p>This is a debugging tool, not used in the daily operation of DSpace.
* Just run it from the installed instance using
* {@code bin/dspace dsrun org.dspace.app.util.CacheSnooper}
* to check that the cache configuration is what you expect it to be,
* given your configuration.
*
* <p>This was created to prove a specific cache configuration patch,
* but I leave it here in the hope that it may be useful to others.
*
* @author Mark H. Wood <mwood@iupui.edu>
*/
public class CacheSnooper {
private CacheSnooper() { }
public static void main(String[] argv) {
// Ensure that the DSpace kernel is started.
DSpaceKernelImpl kernel = DSpaceKernelInit.getKernel(null);
// Ensure that the services cache manager is started.
CachingService serviceCaches = kernel.getServiceManager()
.getServiceByName(null, CachingService.class);
// Ensure that the database layer is started.
Context ctx = new Context();
// List those caches!
for (CacheManager manager : CacheManager.ALL_CACHE_MANAGERS) {
System.out.format("CacheManager: %s%n", manager);
for (String cacheName : manager.getCacheNames()) {
Cache cache = manager.getCache(cacheName);
System.out.format(" Cache: '%s'; maxHeap: %d; maxDisk: %d%n",
cacheName,
cache.getCacheConfiguration().getMaxEntriesLocalHeap(),
cache.getCacheConfiguration().getMaxEntriesLocalDisk());
}
}
}
}

View File

@@ -37,6 +37,7 @@ public class Configuration {
* <li>{@code --property name} prints the value of the DSpace configuration
* property {@code name} to the standard output.</li>
* <li>{@code --raw} suppresses parameter substitution in the output.</li>
* <li>{@code --first} print only the first of multiple values.</li>
* <li>{@code --help} describes these options.</li>
* </ul>
* If the property does not exist, nothing is written.
@@ -51,6 +52,8 @@ public class Configuration {
"optional name of the module in which 'property' exists");
options.addOption("r", "raw", false,
"do not do property substitution on the value");
options.addOption("f", "first", false,
"display only the first value of an array property");
options.addOption("?", "Get help");
options.addOption("h", "help", false, "Get help");
@@ -90,19 +93,36 @@ public class Configuration {
propNameBuilder.append(cmd.getOptionValue('p'));
String propName = propNameBuilder.toString();
// Print the property's value, if it exists
// Print the property's value(s), if it exists
ConfigurationService cfg = DSpaceServicesFactory.getInstance().getConfigurationService();
if (!cfg.hasProperty(propName)) {
System.out.println();
} else {
String val;
if (cmd.hasOption('r')) {
val = cfg.getPropertyValue(propName).toString();
// Print "raw" values (without property substitutions)
Object rawValue = cfg.getPropertyValue(propName);
if (rawValue.getClass().isArray()) {
for (Object value : (Object[]) rawValue) {
System.out.println(value.toString());
if (cmd.hasOption('f')) {
break; // If --first print only one value
}
}
} else { // Not an array
System.out.println(rawValue.toString());
}
} else {
val = cfg.getProperty(propName);
// Print values with property substitutions
String[] values = cfg.getArrayProperty(propName);
for (String value : values) {
System.out.println(value);
if (cmd.hasOption('f')) {
break; // If --first print only one value
}
}
}
System.out.println(val);
}
System.exit(0);
}
}

View File

@@ -10,6 +10,7 @@ package org.dspace.app.util;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.regex.Pattern;
import java.util.regex.PatternSyntaxException;
import javax.annotation.Nullable;
@@ -131,10 +132,15 @@ public class DCInput {
private boolean closedVocabulary = false;
/**
* the regex to comply with, null if nothing
* the regex in ECMAScript standard format, usable also by rests.
*/
private String regex = null;
/**
* the computed pattern, null if nothing
*/
private Pattern pattern = null;
/**
* allowed document types
*/
@@ -144,8 +150,8 @@ public class DCInput {
private boolean isMetadataField = false;
private String relationshipType = null;
private String searchConfiguration = null;
private String filter;
private List<String> externalSources;
private final String filter;
private final List<String> externalSources;
/**
* The scope of the input sets, this restricts hidden metadata fields from
@@ -178,7 +184,7 @@ public class DCInput {
//check if the input have a language tag
language = Boolean.valueOf(fieldMap.get("language"));
valueLanguageList = new ArrayList();
valueLanguageList = new ArrayList<>();
if (language) {
String languageNameTmp = fieldMap.get("value-pairs-name");
if (StringUtils.isBlank(languageNameTmp)) {
@@ -191,7 +197,7 @@ public class DCInput {
repeatable = "true".equalsIgnoreCase(repStr)
|| "yes".equalsIgnoreCase(repStr);
String nameVariantsString = fieldMap.get("name-variants");
nameVariants = (StringUtils.isNotBlank(nameVariantsString)) ?
nameVariants = StringUtils.isNotBlank(nameVariantsString) ?
nameVariantsString.equalsIgnoreCase("true") : false;
label = fieldMap.get("label");
inputType = fieldMap.get("input-type");
@@ -203,11 +209,11 @@ public class DCInput {
}
hint = fieldMap.get("hint");
warning = fieldMap.get("required");
required = (warning != null && warning.length() > 0);
required = warning != null && warning.length() > 0;
visibility = fieldMap.get("visibility");
readOnly = fieldMap.get("readonly");
vocabulary = fieldMap.get("vocabulary");
regex = fieldMap.get("regex");
this.initRegex(fieldMap.get("regex"));
String closedVocabularyStr = fieldMap.get("closedVocabulary");
closedVocabulary = "true".equalsIgnoreCase(closedVocabularyStr)
|| "yes".equalsIgnoreCase(closedVocabularyStr);
@@ -238,6 +244,22 @@ public class DCInput {
}
protected void initRegex(String regex) {
this.regex = null;
this.pattern = null;
if (regex != null) {
try {
Optional.ofNullable(RegexPatternUtils.computePattern(regex))
.ifPresent(pattern -> {
this.pattern = pattern;
this.regex = regex;
});
} catch (PatternSyntaxException e) {
log.warn("The regex field of input {} with value {} is invalid!", this.label, regex);
}
}
}
/**
* Is this DCInput for display in the given scope? The scope should be
* either "workflow" or "submit", as per the input forms definition. If the
@@ -248,7 +270,7 @@ public class DCInput {
* @return whether the input should be displayed or not
*/
public boolean isVisible(String scope) {
return (visibility == null || visibility.equals(scope));
return visibility == null || visibility.equals(scope);
}
/**
@@ -381,7 +403,7 @@ public class DCInput {
/**
* Get the style for this form field
*
*
* @return the style
*/
public String getStyle() {
@@ -512,8 +534,12 @@ public class DCInput {
return visibility;
}
public Pattern getPattern() {
return this.pattern;
}
public String getRegex() {
return regex;
return this.regex;
}
public String getFieldName() {
@@ -546,34 +572,45 @@ public class DCInput {
public boolean validate(String value) {
if (StringUtils.isNotBlank(value)) {
try {
if (StringUtils.isNotBlank(regex)) {
Pattern pattern = Pattern.compile(regex);
if (this.pattern != null) {
if (!pattern.matcher(value).matches()) {
return false;
}
}
} catch (PatternSyntaxException ex) {
log.error("Regex validation failed!", ex.getMessage());
log.error("Regex validation failed! {}", ex.getMessage());
}
}
return true;
}
/**
* Verify whether the current field contains an entity relationship
* This also implies a relationship type is defined for this field
* The field can contain both an entity relationship and a metadata field simultaneously
* Get the type bind list for use in determining whether
* to display this field in angular dynamic form building
* @return list of bound types
*/
public List<String> getTypeBindList() {
return typeBind;
}
/**
* Verify whether the current field contains an entity relationship.
* This also implies a relationship type is defined for this field.
* The field can contain both an entity relationship and a metadata field
* simultaneously.
* @return true if the field contains a relationship.
*/
public boolean isRelationshipField() {
return isRelationshipField;
}
/**
* Verify whether the current field contains a metadata field
* This also implies a field type is defined for this field
* The field can contain both an entity relationship and a metadata field simultaneously
* Verify whether the current field contains a metadata field.
* This also implies a field type is defined for this field.
* The field can contain both an entity relationship and a metadata field
* simultaneously.
* @return true if the field contains a metadata field.
*/
public boolean isMetadataField() {
return isMetadataField;

Some files were not shown because too many files have changed in this diff Show More