Compare commits

..

1 Commits

Author SHA1 Message Date
Tim Donohue
817c9abfb3 [maven-scm] copy for tag dspace-1.7.0
git-svn-id: http://scm.dspace.org/svn/repo/dspace/tags/dspace-1.7.0@5990 9c30dcfa-912a-0410-8fc2-9e0234be79fd
2010-12-16 18:38:09 +00:00
5773 changed files with 388267 additions and 661658 deletions

View File

@@ -1,28 +0,0 @@
# DSpace configuration for Codecov.io coverage reports
# These override the default YAML settings at
# https://docs.codecov.io/docs/codecov-yaml#section-default-yaml
# Can be validated via instructions at:
# https://docs.codecov.io/docs/codecov-yaml#validate-your-repository-yaml
# Settings related to code coverage analysis
coverage:
status:
# Configuration for project-level checks. This checks how the PR changes overall coverage.
project:
default:
# For each PR, auto compare coverage to previous commit.
# Require that overall (project) coverage does NOT drop more than 0.5%
target: auto
threshold: 0.5%
# Configuration for patch-level checks. This checks the relative coverage of the new PR code ONLY.
patch:
default:
# Enable informational mode, which just provides info to reviewers & always passes
# https://docs.codecov.io/docs/commit-status#section-informational
informational: true
# Turn PR comments "off". This feature adds the code coverage summary as a
# comment on each PR. See https://docs.codecov.io/docs/pull-request-comments
# However, this same info is available from the Codecov checks in the PR's
# "Checks" tab in GitHub. So, the comment is unnecessary.
comment: false

View File

@@ -1,9 +0,0 @@
.git/
.idea/
.settings/
*/target/
dspace/modules/*/target/
Dockerfile.*
dspace/src/main/docker/dspace-postgres-loadsql
dspace/src/main/docker/README.md
dspace/src/main/docker-compose/

20
.gitattributes vendored
View File

@@ -1,20 +0,0 @@
# Auto detect text files and perform LF normalization
* text=auto
# Ensure Unix files always keep Unix line endings
*.sh text eol=lf
# Ensure Windows files always keep Windows line endings
*.bat text eol=crlf
# Standard to msysgit
*.doc diff=astextplain
*.DOC diff=astextplain
*.docx diff=astextplain
*.DOCX diff=astextplain
*.dot diff=astextplain
*.DOT diff=astextplain
*.pdf diff=astextplain
*.PDF diff=astextplain
*.rtf diff=astextplain
*.RTF diff=astextplain

View File

@@ -1,22 +0,0 @@
---
name: Bug report
about: Create a report to help us improve
title: ''
labels: bug, needs triage
assignees: ''
---
## Describe the bug
A clear and concise description of what the bug is. Include the version(s) of DSpace where you've seen this problem. Link to examples if they are public.
## To Reproduce
Steps to reproduce the behavior:
1. Do this
2. Then this...
## Expected behavior
A clear and concise description of what you expected to happen.
## Related work
Link to any related tickets or PRs here.

View File

@@ -1,20 +0,0 @@
---
name: Feature request
about: Suggest a new feature for this project
title: ''
labels: new feature, needs triage
assignees: ''
---
## Is your feature request related to a problem? Please describe.
A clear and concise description of what the problem or use case is. For example, I'm always frustrated when [...]
## Describe the solution you'd like
A clear and concise description of what you want to happen.
## Describe alternatives or workarounds you've considered
A clear and concise description of any alternative solutions or features you've considered.
## Additional information
Add any other information, related tickets or screenshots about the feature request here.

511
.github/dependabot.yml vendored
View File

@@ -1,511 +0,0 @@
#-------------------
# DSpace's dependabot rules. Enables maven updates for all dependencies on a weekly basis
# for main and any maintenance branches. Security updates only apply to main.
#-------------------
version: 2
updates:
###############
## Main branch
###############
# NOTE: At this time, "security-updates" rules only apply if "target-branch" is unspecified
# So, only this first section can include "applies-to: security-updates"
- package-ecosystem: "maven"
directory: "/"
# Monthly dependency updates (NOTE: "schedule" doesn't apply to security updates)
schedule:
interval: "monthly"
time: "02:00"
# Allow up to 10 open PRs for dependencies
open-pull-requests-limit: 10
# Group together some upgrades in a single PR
groups:
# Group together all Build Tools in a single PR
build-tools:
applies-to: version-updates
patterns:
- "org.apache.maven.plugins:*"
- "*:*-maven-plugin"
- "*:maven-*-plugin"
- "com.github.spotbugs:spotbugs"
- "com.google.code.findbugs:*"
- "com.google.errorprone:*"
- "com.puppycrawl.tools:checkstyle"
- "org.sonatype.*:*"
exclude-patterns:
# Exclude anything from Spring, as that is in a separate group
- "org.springframework.*:*"
update-types:
- "minor"
- "patch"
test-tools:
applies-to: version-updates
patterns:
- "junit:*"
- "com.github.stefanbirker:system-rules"
- "com.h2database:*"
- "io.findify:s3mock*"
- "io.netty:*"
- "org.apache.httpcomponents.client5:*"
- "org.hamcrest:*"
- "org.mock-server:*"
- "org.mockito:*"
- "org.xmlunit:*"
update-types:
- "minor"
- "patch"
# Group together all Apache Commons deps in a single PR
apache-commons:
applies-to: version-updates
patterns:
- "org.apache.commons:*"
- "commons-*:commons-*"
update-types:
- "minor"
- "patch"
# Group together all fasterxml deps in a single PR
fasterxml:
applies-to: version-updates
patterns:
- "com.fasterxml:*"
- "com.fasterxml.*:*"
update-types:
- "minor"
- "patch"
# Group together all Hibernate deps in a single PR
hibernate:
applies-to: version-updates
patterns:
- "org.hibernate.*:*"
update-types:
- "patch"
# Group together all Jakarta deps in a single PR
jakarta:
applies-to: version-updates
patterns:
- "jakarta.*:*"
- "org.eclipse.angus:jakarta.mail"
- "org.glassfish.jaxb:jaxb-runtime"
update-types:
- "minor"
- "patch"
# Group together all Spring deps in a single PR
spring:
applies-to: version-updates
patterns:
- "org.springframework:*"
- "org.springframework.*:*"
update-types:
- "minor"
- "patch"
# Group together all WebJARs deps in a single PR
webjars:
applies-to: version-updates
patterns:
- "org.webjars:*"
- "org.webjars.*:*"
update-types:
- "minor"
- "patch"
# Group Tika, bouncycastle, and asm because they are tightly integrated
# and we theoretically want to keep them in sync.
tika:
applies-to: version-updates
patterns:
- "org.apache.tika:*:*"
- "org.bouncycastle:*:*"
- "org.ow2.asm:*:*"
update-types:
- "minor"
- "patch"
ignore:
# Don't try to auto-update any DSpace dependencies
- dependency-name: "org.dspace:*"
- dependency-name: "org.dspace.*:*"
# Ignore major/minor updates for Hibernate. Only patch updates can be automated.
- dependency-name: "org.hibernate.*:*"
update-types: ["version-update:semver-major", "version-update:semver-minor"]
# Ignore all major version updates for all dependencies. We'll only automate minor/patch updates.
- dependency-name: "*"
update-types: ["version-update:semver-major"]
######################
## dspace-9_x branch
######################
- package-ecosystem: "maven"
directory: "/"
target-branch: dspace-9_x
schedule:
interval: "monthly"
time: "02:00"
# Allow up to 10 open PRs for dependencies
open-pull-requests-limit: 10
# Group together some upgrades in a single PR
groups:
# Group together all Build Tools in a single PR
build-tools:
applies-to: version-updates
patterns:
- "org.apache.maven.plugins:*"
- "*:*-maven-plugin"
- "*:maven-*-plugin"
- "com.github.spotbugs:spotbugs"
- "com.google.code.findbugs:*"
- "com.google.errorprone:*"
- "com.puppycrawl.tools:checkstyle"
- "org.sonatype.*:*"
exclude-patterns:
# Exclude anything from Spring, as that is in a separate group
- "org.springframework.*:*"
update-types:
- "minor"
- "patch"
test-tools:
applies-to: version-updates
patterns:
- "junit:*"
- "com.github.stefanbirker:system-rules"
- "com.h2database:*"
- "io.findify:s3mock*"
- "io.netty:*"
- "org.apache.httpcomponents.client5:*"
- "org.hamcrest:*"
- "org.mock-server:*"
- "org.mockito:*"
- "org.xmlunit:*"
update-types:
- "minor"
- "patch"
# Group together all Apache Commons deps in a single PR
apache-commons:
applies-to: version-updates
patterns:
- "org.apache.commons:*"
- "commons-*:commons-*"
update-types:
- "minor"
- "patch"
# Group together all fasterxml deps in a single PR
fasterxml:
applies-to: version-updates
patterns:
- "com.fasterxml:*"
- "com.fasterxml.*:*"
update-types:
- "minor"
- "patch"
# Group together all Hibernate deps in a single PR
hibernate:
applies-to: version-updates
patterns:
- "org.hibernate.*:*"
update-types:
- "patch"
# Group together all Jakarta deps in a single PR
jakarta:
applies-to: version-updates
patterns:
- "jakarta.*:*"
- "org.eclipse.angus:jakarta.mail"
- "org.glassfish.jaxb:jaxb-runtime"
update-types:
- "minor"
- "patch"
# Group together all Spring deps in a single PR
spring:
applies-to: version-updates
patterns:
- "org.springframework:*"
- "org.springframework.*:*"
update-types:
- "minor"
- "patch"
# Group together all WebJARs deps in a single PR
webjars:
applies-to: version-updates
patterns:
- "org.webjars:*"
- "org.webjars.*:*"
update-types:
- "minor"
- "patch"
# Group Tika, bouncycastle, and asm because they are tightly integrated
# and we theoretically want to keep them in sync.
tika:
applies-to: version-updates
patterns:
- "org.apache.tika:*:*"
- "org.bouncycastle:*:*"
- "org.ow2.asm:*:*"
update-types:
- "minor"
- "patch"
ignore:
# Don't try to auto-update any DSpace dependencies
- dependency-name: "org.dspace:*"
- dependency-name: "org.dspace.*:*"
# Ignore major/minor updates for Hibernate. Only patch updates can be automated.
- dependency-name: "org.hibernate.*:*"
update-types: ["version-update:semver-major", "version-update:semver-minor"]
# Ignore all major version updates for all dependencies. We'll only automate minor/patch updates.
- dependency-name: "*"
update-types: [ "version-update:semver-major" ]
######################
## dspace-8_x branch
######################
- package-ecosystem: "maven"
directory: "/"
target-branch: dspace-8_x
schedule:
interval: "monthly"
time: "02:00"
# Allow up to 10 open PRs for dependencies
open-pull-requests-limit: 10
# Group together some upgrades in a single PR
groups:
# Group together all Build Tools in a single PR
build-tools:
applies-to: version-updates
patterns:
- "org.apache.maven.plugins:*"
- "*:*-maven-plugin"
- "*:maven-*-plugin"
- "com.github.spotbugs:spotbugs"
- "com.google.code.findbugs:*"
- "com.google.errorprone:*"
- "com.puppycrawl.tools:checkstyle"
- "org.sonatype.*:*"
exclude-patterns:
# Exclude anything from Spring, as that is in a separate group
- "org.springframework.*:*"
update-types:
- "minor"
- "patch"
test-tools:
applies-to: version-updates
patterns:
- "junit:*"
- "com.github.stefanbirker:system-rules"
- "com.h2database:*"
- "io.findify:s3mock*"
- "io.netty:*"
- "org.apache.httpcomponents.client5:*"
- "org.hamcrest:*"
- "org.mock-server:*"
- "org.mockito:*"
- "org.xmlunit:*"
update-types:
- "minor"
- "patch"
# Group together all Apache Commons deps in a single PR
apache-commons:
applies-to: version-updates
patterns:
- "org.apache.commons:*"
- "commons-*:commons-*"
update-types:
- "minor"
- "patch"
# Group together all fasterxml deps in a single PR
fasterxml:
applies-to: version-updates
patterns:
- "com.fasterxml:*"
- "com.fasterxml.*:*"
update-types:
- "minor"
- "patch"
# Group together all Hibernate deps in a single PR
hibernate:
applies-to: version-updates
patterns:
- "org.hibernate.*:*"
update-types:
- "patch"
# Group together all Jakarta deps in a single PR
jakarta:
applies-to: version-updates
patterns:
- "jakarta.*:*"
- "org.eclipse.angus:jakarta.mail"
- "org.glassfish.jaxb:jaxb-runtime"
update-types:
- "minor"
- "patch"
# Group together all Spring deps in a single PR
spring:
applies-to: version-updates
patterns:
- "org.springframework:*"
- "org.springframework.*:*"
update-types:
- "minor"
- "patch"
# Group together all WebJARs deps in a single PR
webjars:
applies-to: version-updates
patterns:
- "org.webjars:*"
- "org.webjars.*:*"
update-types:
- "minor"
- "patch"
# Group Tika, bouncycastle, and asm because they are tightly integrated
# and we theoretically want to keep them in sync.
tika:
applies-to: version-updates
patterns:
- "org.apache.tika:*:*"
- "org.bouncycastle:*:*"
- "org.ow2.asm:*:*"
update-types:
- "minor"
- "patch"
ignore:
# Don't try to auto-update any DSpace dependencies
- dependency-name: "org.dspace:*"
- dependency-name: "org.dspace.*:*"
# Ignore major/minor updates for Hibernate. Only patch updates can be automated.
- dependency-name: "org.hibernate.*:*"
update-types: ["version-update:semver-major", "version-update:semver-minor"]
# Ignore all major version updates for all dependencies. We'll only automate minor/patch updates.
- dependency-name: "*"
update-types: [ "version-update:semver-major" ]
######################
## dspace-7_x branch
######################
- package-ecosystem: "maven"
directory: "/"
target-branch: dspace-7_x
schedule:
interval: "monthly"
time: "02:00"
# Allow up to 10 open PRs for dependencies
open-pull-requests-limit: 10
# Group together some upgrades in a single PR
groups:
# Group together all Build Tools in a single PR
build-tools:
applies-to: version-updates
patterns:
- "org.apache.maven.plugins:*"
- "*:*-maven-plugin"
- "*:maven-*-plugin"
- "com.github.spotbugs:spotbugs"
- "com.google.code.findbugs:*"
- "com.google.errorprone:*"
- "com.puppycrawl.tools:checkstyle"
- "org.sonatype.*:*"
exclude-patterns:
# Exclude anything from Spring, as that is in a separate group
- "org.springframework.*:*"
update-types:
- "minor"
- "patch"
test-tools:
applies-to: version-updates
patterns:
- "junit:*"
- "com.github.stefanbirker:system-rules"
- "com.h2database:*"
- "io.findify:s3mock*"
- "io.netty:*"
- "org.hamcrest:*"
- "org.mock-server:*"
- "org.mockito:*"
- "org.xmlunit:*"
update-types:
- "minor"
- "patch"
# Group together all Apache Commons deps in a single PR
apache-commons:
applies-to: version-updates
patterns:
- "org.apache.commons:*"
- "commons-*:commons-*"
update-types:
- "minor"
- "patch"
# Group together all fasterxml deps in a single PR
fasterxml:
applies-to: version-updates
patterns:
- "com.fasterxml:*"
- "com.fasterxml.*:*"
update-types:
- "minor"
- "patch"
# Group together all Hibernate deps in a single PR
hibernate:
applies-to: version-updates
patterns:
- "org.hibernate.*:*"
update-types:
- "patch"
# Group together all Javax deps in a single PR
# NOTE: Javax is only used in 7.x and has been replaced by Jakarta in 8.x and later
jakarta:
applies-to: version-updates
patterns:
- "javax.*:*"
- "*:javax.mail"
- "org.glassfish.jaxb:jaxb-runtime"
update-types:
- "minor"
- "patch"
# Group together all Google deps in a single PR
# NOTE: These Google deps are only used in 7.x and have been removed in 8.x and later
google-apis:
applies-to: version-updates
patterns:
- "com.google.apis:*"
- "com.google.api-client:*"
- "com.google.http-client:*"
- "com.google.oauth-client:*"
update-types:
- "minor"
- "patch"
# Group together all Spring deps in a single PR
spring:
applies-to: version-updates
patterns:
- "org.springframework:*"
- "org.springframework.*:*"
update-types:
- "minor"
- "patch"
# Group together all WebJARs deps in a single PR
webjars:
applies-to: version-updates
patterns:
- "org.webjars:*"
- "org.webjars.*:*"
update-types:
- "minor"
- "patch"
# Group Tika, bouncycastle, and asm because they are tightly integrated
# and we theoretically want to keep them in sync.
tika:
applies-to: version-updates
patterns:
- "org.apache.tika:*:*"
- "org.bouncycastle:*:*"
- "org.ow2.asm:*:*"
update-types:
- "minor"
- "patch"
ignore:
# Don't try to auto-update any DSpace dependencies
- dependency-name: "org.dspace:*"
- dependency-name: "org.dspace.*:*"
# Last version of errorprone to support JDK 11 is 2.31.0
- dependency-name: "com.google.errorprone:*"
versions: [">=2.32.0"]
# Spring Security 5.8 changes the behavior of CSRF Tokens in a way which is incompatible with DSpace 7
# See https://github.com/DSpace/DSpace/pull/9888#issuecomment-2408165545
- dependency-name: "org.springframework.security:*"
versions: [">=5.8.0"]
# Ignore major/minor updates for Hibernate. Only patch updates can be automated.
- dependency-name: "org.hibernate.*:*"
update-types: ["version-update:semver-major", "version-update:semver-minor"]
# Ignore all major version updates for all dependencies. We'll only automate minor/patch updates.
- dependency-name: "*"
update-types: [ "version-update:semver-major" ]

View File

@@ -1,31 +0,0 @@
## References
_Add references/links to any related issues or PRs. These may include:_
* Fixes #issue-number (if this fixes an issue ticket)
* Related to DSpace/RestContract#pr-number (if a corresponding REST Contract PR exists)
## Description
Short summary of changes (1-2 sentences).
## Instructions for Reviewers
Please add a more detailed description of the changes made by your PR. At a minimum, providing a bulleted list of changes in your PR is helpful to reviewers.
List of changes in this PR:
* First, ...
* Second, ...
**Include guidance for how to test or review your PR.** This may include: steps to reproduce a bug, screenshots or description of a new feature, or reasons behind specific changes.
## Checklist
_This checklist provides a reminder of what we are going to look for when reviewing your PR. You need not complete this checklist prior to creating your PR (draft PRs are always welcome).
However, reviewers may request that you complete any actions in this list if you have not done so. If you are unsure about an item in the checklist, don't hesitate to ask. We're here to help!_
- [ ] My PR is **created against the `main` branch** of code (unless it is a backport or is fixing an issue specific to an older branch).
- [ ] My PR is **small in size** (e.g. less than 1,000 lines of code, not including comments & integration tests). Exceptions may be made if previously agreed upon.
- [ ] My PR **passes Checkstyle** validation based on the [Code Style Guide](https://wiki.lyrasis.org/display/DSPACE/Code+Style+Guide).
- [ ] My PR **includes Javadoc** for _all new (or modified) public methods and classes_. It also includes Javadoc for large or complex private methods.
- [ ] My PR **passes all tests and includes new/updated Unit or Integration Tests** based on the [Code Testing Guide](https://wiki.lyrasis.org/display/DSPACE/Code+Testing+Guide).
- [ ] My PR **includes details on how to test it**. I've provided clear instructions to reviewers on how to successfully test this fix or feature.
- [ ] If my PR includes new libraries/dependencies (in any `pom.xml`), I've made sure their licenses align with the [DSpace BSD License](https://github.com/DSpace/DSpace/blob/main/LICENSE) based on the [Licensing of Contributions](https://wiki.lyrasis.org/display/DSPACE/Code+Contribution+Guidelines#CodeContributionGuidelines-LicensingofContributions) documentation.
- [ ] If my PR modifies REST API endpoints, I've opened a separate [REST Contract](https://github.com/DSpace/RestContract/blob/main/README.md) PR related to this change.
- [ ] If my PR includes new configurations, I've provided basic technical documentation in the PR itself.
- [ ] If my PR fixes an issue ticket, I've [linked them together](https://docs.github.com/en/issues/tracking-your-work-with-issues/linking-a-pull-request-to-an-issue).

View File

@@ -1,112 +0,0 @@
# DSpace Continuous Integration/Build via GitHub Actions
# Concepts borrowed from
# https://docs.github.com/en/free-pro-team@latest/actions/guides/building-and-testing-java-with-maven
name: Build
# Run this Build for all pushes / PRs to current branch
on: [push, pull_request]
permissions:
contents: read # to fetch code (actions/checkout)
jobs:
tests:
runs-on: ubuntu-latest
env:
# Give Maven 1GB of memory to work with
MAVEN_OPTS: "-Xmx1024M"
strategy:
# Create a matrix of two separate configurations for Unit vs Integration Tests
# This will ensure those tasks are run in parallel
# Also specify version of Java to use (this can allow us to optionally run tests on multiple JDKs in future)
matrix:
include:
# NOTE: Unit Tests include a retry for occasionally failing tests
# - surefire.rerunFailingTestsCount => try again for flakey tests, and keep track of/report on number of retries
- type: "Unit Tests"
java: 17
mvnflags: "-DskipUnitTests=false -Dsurefire.rerunFailingTestsCount=2"
resultsdir: "**/target/surefire-reports/**"
# NOTE: ITs skip all code validation checks, as they are already done by Unit Test job.
# - enforcer.skip => Skip maven-enforcer-plugin rules
# - checkstyle.skip => Skip all checkstyle checks by maven-checkstyle-plugin
# - license.skip => Skip all license header checks by license-maven-plugin
# - xml.skip => Skip all XML/XSLT validation by xml-maven-plugin
# - failsafe.rerunFailingTestsCount => try again for flakey tests, and keep track of/report on number of retries
- type: "Integration Tests"
java: 17
mvnflags: "-DskipIntegrationTests=false -Denforcer.skip=true -Dcheckstyle.skip=true -Dlicense.skip=true -Dxml.skip=true -Dfailsafe.rerunFailingTestsCount=2"
resultsdir: "**/target/failsafe-reports/**"
# Do NOT exit immediately if one matrix job fails
# This ensures ITs continue running even if Unit Tests fail, or visa versa
fail-fast: false
name: Run ${{ matrix.type }}
# These are the actual CI steps to perform per job
steps:
# https://github.com/actions/checkout
- name: Checkout codebase
uses: actions/checkout@v4
# https://github.com/actions/setup-java
- name: Install JDK ${{ matrix.java }}
uses: actions/setup-java@v4
with:
java-version: ${{ matrix.java }}
distribution: 'temurin'
cache: 'maven'
# Run parallel Maven builds based on the above 'strategy.matrix'
- name: Run Maven ${{ matrix.type }}
env:
TEST_FLAGS: ${{ matrix.mvnflags }}
run: mvn --no-transfer-progress -V install -P-assembly -Pcoverage-report $TEST_FLAGS
# If previous step failed, save results of tests to downloadable artifact for this job
# (This artifact is downloadable at the bottom of any job's summary page)
- name: Upload Results of ${{ matrix.type }} to Artifact
if: ${{ failure() }}
uses: actions/upload-artifact@v4
with:
name: ${{ matrix.type }} results
path: ${{ matrix.resultsdir }}
# Upload code coverage report to artifact, so that it can be shared with the 'codecov' job (see below)
- name: Upload code coverage report to Artifact
uses: actions/upload-artifact@v4
with:
name: ${{ matrix.type }} coverage report
path: 'dspace/target/site/jacoco-aggregate/jacoco.xml'
retention-days: 14
# Codecov upload is a separate job in order to allow us to restart this separate from the entire build/test
# job above. This is necessary because Codecov uploads seem to randomly fail at times.
# See https://community.codecov.com/t/upload-issues-unable-to-locate-build-via-github-actions-api/3954
codecov:
# Must run after 'tests' job above
needs: tests
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v4
# Download artifacts from previous 'tests' job
- name: Download coverage artifacts
uses: actions/download-artifact@v4
# Now attempt upload to Codecov using its action.
# NOTE: We use a retry action to retry the Codecov upload if it fails the first time.
#
# Retry action: https://github.com/marketplace/actions/retry-action
# Codecov action: https://github.com/codecov/codecov-action
- name: Upload coverage to Codecov.io
uses: Wandalen/wretry.action@v1.3.0
with:
action: codecov/codecov-action@v4
# Ensure codecov-action throws an error when it fails to upload
with: |
fail_ci_if_error: true
token: ${{ secrets.CODECOV_TOKEN }}
# Try re-running action 5 times max
attempt_limit: 5
# Run again in 30 seconds
attempt_delay: 30000

View File

@@ -1,63 +0,0 @@
# DSpace CodeQL code scanning configuration for GitHub
# https://docs.github.com/en/code-security/code-scanning
#
# NOTE: Code scanning must be run separate from our default build.yml
# because CodeQL requires a fresh build with all tests *disabled*.
name: "Code Scanning"
# Run this code scan for all pushes / PRs to main or maintenance branches. Also run once a week.
on:
push:
branches:
- main
- 'dspace-**'
pull_request:
branches:
- main
- 'dspace-**'
# Don't run if PR is only updating static documentation
paths-ignore:
- '**/*.md'
- '**/*.txt'
schedule:
- cron: "37 0 * * 1"
jobs:
analyze:
name: Analyze Code
runs-on: ubuntu-latest
# Limit permissions of this GitHub action. Can only write to security-events
permissions:
actions: read
contents: read
security-events: write
steps:
# https://github.com/actions/checkout
- name: Checkout repository
uses: actions/checkout@v4
# https://github.com/actions/setup-java
- name: Install JDK
uses: actions/setup-java@v4
with:
java-version: 17
distribution: 'temurin'
# Initializes the CodeQL tools for scanning.
# https://github.com/github/codeql-action
- name: Initialize CodeQL
uses: github/codeql-action/init@v3
with:
# Codescan Javascript as well since a few JS files exist in REST API's interface
languages: java, javascript
# Autobuild attempts to build any compiled languages
# NOTE: Based on testing, this autobuild process works well for DSpace. A custom
# DSpace build w/caching (like in build.yml) was about the same speed as autobuild.
- name: Autobuild
uses: github/codeql-action/autobuild@v3
# Perform GitHub Code Scanning.
- name: Perform CodeQL Analysis
uses: github/codeql-action/analyze@v3

View File

@@ -1,242 +0,0 @@
# DSpace Docker image build for hub.docker.com
name: Docker images
# Run this Build for all pushes to 'main' or maintenance branches, or tagged releases.
# Also run for PRs to ensure PR doesn't break Docker build process
# NOTE: uses "reusable-docker-build.yml" to actually build each of the Docker images.
on:
push:
branches:
- main
- 'dspace-**'
tags:
- 'dspace-**'
pull_request:
permissions:
contents: read # to fetch code (actions/checkout)
packages: write # to write images to GitHub Container Registry (GHCR)
jobs:
####################################################
# Build/Push the 'dspace/dspace-dependencies' image.
# This image is used by all other DSpace build jobs.
####################################################
dspace-dependencies:
# Ensure this job never runs on forked repos. It's only executed for 'dspace/dspace'
if: github.repository == 'dspace/dspace'
uses: ./.github/workflows/reusable-docker-build.yml
with:
build_id: dspace-dependencies
image_name: dspace/dspace-dependencies
dockerfile_path: ./Dockerfile.dependencies
secrets:
DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }}
DOCKER_ACCESS_TOKEN: ${{ secrets.DOCKER_ACCESS_TOKEN }}
#######################################
# Build/Push the 'dspace/dspace' image
#######################################
dspace:
# Ensure this job never runs on forked repos. It's only executed for 'dspace/dspace'
if: github.repository == 'dspace/dspace'
# Must run after 'dspace-dependencies' job above
needs: dspace-dependencies
uses: ./.github/workflows/reusable-docker-build.yml
with:
build_id: dspace-prod
image_name: dspace/dspace
dockerfile_path: ./Dockerfile
secrets:
DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }}
DOCKER_ACCESS_TOKEN: ${{ secrets.DOCKER_ACCESS_TOKEN }}
# Enable redeploy of sandbox & demo if the branch for this image matches the deployment branch of
# these sites as specified in reusable-docker-build.xml
REDEPLOY_SANDBOX_URL: ${{ secrets.REDEPLOY_SANDBOX_URL }}
REDEPLOY_DEMO_URL: ${{ secrets.REDEPLOY_DEMO_URL }}
#############################################################
# Build/Push the 'dspace/dspace' image ('-test' tag)
#############################################################
dspace-test:
# Ensure this job never runs on forked repos. It's only executed for 'dspace/dspace'
if: github.repository == 'dspace/dspace'
# Must run after 'dspace-dependencies' job above
needs: dspace-dependencies
uses: ./.github/workflows/reusable-docker-build.yml
with:
build_id: dspace-test
image_name: dspace/dspace
dockerfile_path: ./Dockerfile.test
# As this is a test/development image, its tags are all suffixed with "-test". Otherwise, it uses the same
# tagging logic as the primary 'dspace/dspace' image above.
tags_flavor: suffix=-test
secrets:
DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }}
DOCKER_ACCESS_TOKEN: ${{ secrets.DOCKER_ACCESS_TOKEN }}
###########################################
# Build/Push the 'dspace/dspace-cli' image
###########################################
dspace-cli:
# Ensure this job never runs on forked repos. It's only executed for 'dspace/dspace'
if: github.repository == 'dspace/dspace'
# Must run after 'dspace-dependencies' job above
needs: dspace-dependencies
uses: ./.github/workflows/reusable-docker-build.yml
with:
build_id: dspace-cli
image_name: dspace/dspace-cli
dockerfile_path: ./Dockerfile.cli
secrets:
DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }}
DOCKER_ACCESS_TOKEN: ${{ secrets.DOCKER_ACCESS_TOKEN }}
###########################################
# Build/Push the 'dspace/dspace-solr' image
###########################################
dspace-solr:
# Ensure this job never runs on forked repos. It's only executed for 'dspace/dspace'
if: github.repository == 'dspace/dspace'
uses: ./.github/workflows/reusable-docker-build.yml
with:
build_id: dspace-solr
image_name: dspace/dspace-solr
dockerfile_path: ./dspace/src/main/docker/dspace-solr/Dockerfile
# Must pass solrconfigs to the Dockerfile so that it can find the required Solr config files
dockerfile_additional_contexts: 'solrconfigs=./dspace/solr/'
secrets:
DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }}
DOCKER_ACCESS_TOKEN: ${{ secrets.DOCKER_ACCESS_TOKEN }}
# Enable redeploy of sandbox & demo SOLR instance whenever dspace-solr image changes for deployed branch.
# These URLs MUST use different secrets than 'dspace/dspace' image build above as they are deployed separately.
REDEPLOY_SANDBOX_URL: ${{ secrets.REDEPLOY_SANDBOX_SOLR_URL }}
REDEPLOY_DEMO_URL: ${{ secrets.REDEPLOY_DEMO_SOLR_URL }}
########################################################
# Build/Push the 'dspace/dspace-postgres-loadsql' image
########################################################
dspace-postgres-loadsql:
# Ensure this job never runs on forked repos. It's only executed for 'dspace/dspace'
if: github.repository == 'dspace/dspace'
uses: ./.github/workflows/reusable-docker-build.yml
with:
build_id: dspace-postgres-loadsql
image_name: dspace/dspace-postgres-loadsql
# Must build out of subdirectory to have access to install script.
# NOTE: this context will build the image based on the Dockerfile in the specified directory
dockerfile_context: ./dspace/src/main/docker/dspace-postgres-loadsql/
secrets:
DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }}
DOCKER_ACCESS_TOKEN: ${{ secrets.DOCKER_ACCESS_TOKEN }}
#################################################################################
# Test Deployment via Docker to ensure newly built images are working properly
#################################################################################
docker-deploy:
# Ensure this job never runs on forked repos. It's only executed for 'dspace/dspace'
if: github.repository == 'dspace/dspace'
runs-on: ubuntu-latest
# Must run after all major images are built
needs: [dspace, dspace-test, dspace-cli, dspace-solr]
env:
# Override defaults dspace.server.url because backend starts at http://127.0.0.1:8080
dspace__P__server__P__url: http://127.0.0.1:8080/server
# Enable all optional modules / controllers for this test deployment.
# This helps check for errors in deploying these modules via Spring Boot
iiif__P__enabled: true
ldn__P__enabled: true
oai__P__enabled: true
rdf__P__enabled: true
signposting__P__enabled: true
sword__D__server__P__enabled: true
swordv2__D__server__P__enabled: true
# If this is a PR against main (default branch), use "latest".
# Else if this is a PR against a different branch, used the base branch name.
# Else if this is a commit on main (default branch), use the "latest" tag.
# Else, just use the branch name.
# NOTE: DSPACE_VER is used because our docker compose scripts default to using the "-test" image.
DSPACE_VER: ${{ (github.event_name == 'pull_request' && github.event.pull_request.base.ref == github.event.repository.default_branch && 'latest') || (github.event_name == 'pull_request' && github.event.pull_request.base.ref) || (github.ref_name == github.event.repository.default_branch && 'latest') || github.ref_name }}
# Docker Registry to use for Docker compose scripts below.
# We use GitHub's Container Registry to avoid aggressive rate limits at DockerHub.
DOCKER_REGISTRY: ghcr.io
steps:
# Checkout our codebase (to get access to Docker Compose scripts)
- name: Checkout codebase
uses: actions/checkout@v4
# Download Docker image artifacts (which were just built by reusable-docker-build.yml)
- name: Download Docker image artifacts
uses: actions/download-artifact@v4
with:
# Download all amd64 Docker images (TAR files) into the /tmp/docker directory
pattern: docker-image-*-linux-amd64
path: /tmp/docker
merge-multiple: true
# Load each of the images into Docker by calling "docker image load" for each.
# This ensures we are using the images just built & not any prior versions on DockerHub
- name: Load all downloaded Docker images
run: |
find /tmp/docker -type f -name "*.tar" -exec docker image load --input "{}" \;
docker image ls -a
# Start backend using our compose script in the codebase.
- name: Start backend in Docker
run: |
docker compose -f docker-compose.yml up -d
sleep 10
docker container ls
# Create a test admin account. Load test data from a simple set of AIPs as defined in cli.ingest.yml
- name: Load test data into Backend
run: |
docker compose -f docker-compose-cli.yml run --rm dspace-cli create-administrator -e test@test.edu -f admin -l user -p admin -c en
docker compose -f docker-compose-cli.yml -f dspace/src/main/docker-compose/cli.ingest.yml run --rm dspace-cli
# Verify backend started successfully.
# 1. Make sure root endpoint is responding (check for dspace.name defined in docker-compose.yml)
# 2. Also check /collections endpoint to ensure the test data loaded properly (check for a collection name in AIPs)
- name: Verify backend is responding properly
run: |
result=$(wget -O- -q http://127.0.0.1:8080/server/api)
echo "$result"
echo "$result" | grep -oE "\"DSpace Started with Docker Compose\","
result=$(wget -O- -q http://127.0.0.1:8080/server/api/core/collections)
echo "$result"
echo "$result" | grep -oE "\"Dog in Yard\","
# Verify basic backend logging is working.
# 1. Access the top communities list. Verify that the "Before request" INFO statement is logged
# 2. Access an invalid endpoint (and ignore 404 response). Verify that a "status:404" WARN statement is logged
- name: Verify backend is logging properly
run: |
wget -O/dev/null -q http://127.0.0.1:8080/server/api/core/communities/search/top
logs=$(docker compose -f docker-compose.yml logs -n 5 dspace)
echo "$logs"
echo "$logs" | grep -o "Before request \[GET /server/api/core/communities/search/top\]"
wget -O/dev/null -q http://127.0.0.1:8080/server/api/does/not/exist || true
logs=$(docker compose -f docker-compose.yml logs -n 5 dspace)
echo "$logs"
echo "$logs" | grep -o "status:404 exception: The repository type does.not was not found"
# Verify Handle Server can be stared and is working properly
# 1. First generate the "[dspace]/handle-server" folder with the sitebndl.zip
# 2. Start the Handle Server (and wait 20 seconds to let it start up)
# 3. Verify logs do NOT include "Exception" in the text (as that means an error occurred)
# 4. Check that Handle Proxy HTML page is responding on default port (8000)
- name: Verify Handle Server is working properly
run: |
docker exec -i dspace /dspace/bin/make-handle-config
echo "Starting Handle Server..."
docker exec -i dspace /dspace/bin/start-handle-server
sleep 20
echo "Checking for errors in error.log"
result=$(docker exec -i dspace sh -c "cat /dspace/handle-server/logs/error.log* || echo ''")
echo "$result"
echo "$result" | grep -vqz "Exception"
echo "Checking for errors in handle-server.log..."
result=$(docker exec -i dspace cat /dspace/log/handle-server.log)
echo "$result"
echo "$result" | grep -vqz "Exception"
echo "Checking to see if Handle Proxy webpage is available..."
result=$(wget -O- -q http://127.0.0.1:8000/)
echo "$result"
echo "$result" | grep -oE "Handle Proxy"
# Shutdown our containers
- name: Shutdown Docker containers
run: |
docker compose -f docker-compose.yml down

View File

@@ -1,26 +0,0 @@
# This workflow runs whenever a new issue is created
name: Issue opened
on:
issues:
types: [opened]
permissions: {}
jobs:
automation:
runs-on: ubuntu-latest
steps:
# Add the new issue to a project board, if it needs triage
# See https://github.com/actions/add-to-project
- name: Add issue to triage board
# Only add to project board if issue is flagged as "needs triage" or has no labels
# NOTE: By default we flag new issues as "needs triage" in our issue template
if: (contains(github.event.issue.labels.*.name, 'needs triage') || join(github.event.issue.labels.*.name) == '')
uses: actions/add-to-project@v1.0.0
# Note, the authentication token below is an ORG level Secret.
# It must be created/recreated manually via a personal access token with admin:org, project, public_repo permissions
# See: https://docs.github.com/en/actions/configuring-and-managing-workflows/authenticating-with-the-github_token#permissions-for-the-github_token
# This is necessary because the "DSpace Backlog" project is an org level project (i.e. not repo specific)
with:
github-token: ${{ secrets.TRIAGE_PROJECT_TOKEN }}
project-url: https://github.com/orgs/DSpace/projects/24

View File

@@ -1,39 +0,0 @@
# This workflow checks open PRs for merge conflicts and labels them when conflicts are found
name: Check for merge conflicts
# Run this for all pushes (i.e. merges) to 'main' or maintenance branches
on:
push:
branches:
- main
- 'dspace-**'
# So that the `conflict_label_name` is removed if conflicts are resolved,
# we allow this to run for `pull_request_target` so that github secrets are available.
pull_request_target:
types: [ synchronize ]
permissions: {}
jobs:
triage:
# Ensure this job never runs on forked repos. It's only executed for 'dspace/dspace'
if: github.repository == 'dspace/dspace'
runs-on: ubuntu-latest
permissions:
pull-requests: write
steps:
# See: https://github.com/prince-chrismc/label-merge-conflicts-action
- name: Auto-label PRs with merge conflicts
uses: prince-chrismc/label-merge-conflicts-action@v3
# Ignore any failures -- may occur (randomly?) for older, outdated PRs.
continue-on-error: true
# Add "merge conflict" label if a merge conflict is detected. Remove it when resolved.
# Note, the authentication token is created automatically
# See: https://docs.github.com/en/actions/configuring-and-managing-workflows/authenticating-with-the-github_token
with:
conflict_label_name: 'merge conflict'
github_token: ${{ secrets.GITHUB_TOKEN }}
conflict_comment: |
Hi @${author},
Conflicts have been detected against the base branch.
Please [resolve these conflicts](https://docs.github.com/en/pull-requests/collaborating-with-pull-requests/addressing-merge-conflicts/about-merge-conflicts) as soon as you can. Thanks!

View File

@@ -1,46 +0,0 @@
# This workflow will attempt to port a merged pull request to
# the branch specified in a "port to" label (if exists)
name: Port merged Pull Request
# Only run for merged PRs against the "main" or maintenance branches
# We allow this to run for `pull_request_target` so that github secrets are available
# (This is required when the PR comes from a forked repo)
on:
pull_request_target:
types: [ closed ]
branches:
- main
- 'dspace-**'
permissions:
contents: write # so action can add comments
pull-requests: write # so action can create pull requests
jobs:
port_pr:
runs-on: ubuntu-latest
# Don't run on closed *unmerged* pull requests
if: github.event.pull_request.merged
steps:
# Checkout code
- uses: actions/checkout@v4
# Port PR to other branch (ONLY if labeled with "port to")
# See https://github.com/korthout/backport-action
- name: Create backport pull requests
uses: korthout/backport-action@v2
with:
# Trigger based on a "port to [branch]" label on PR
# (This label must specify the branch name to port to)
label_pattern: '^port to ([^ ]+)$'
# Title to add to the (newly created) port PR
pull_title: '[Port ${target_branch}] ${pull_title}'
# Description to add to the (newly created) port PR
pull_description: 'Port of #${pull_number} by @${pull_author} to `${target_branch}`.'
# Copy all labels from original PR to (newly created) port PR
# NOTE: The labels matching 'label_pattern' are automatically excluded
copy_labels_pattern: '.*'
# Skip any merge commits in the ported PR. This means only non-merge commits are cherry-picked to the new PR
merge_commits: 'skip'
# Use a personal access token (PAT) to create PR as 'dspace-bot' user.
# A PAT is required in order for the new PR to trigger its own actions (for CI checks)
github_token: ${{ secrets.PR_PORT_TOKEN }}

View File

@@ -1,24 +0,0 @@
# This workflow runs whenever a new pull request is created
name: Pull Request opened
# Only run for newly opened PRs against the "main" or maintenance branches
# We allow this to run for `pull_request_target` so that github secrets are available
# (This is required to assign a PR back to the creator when the PR comes from a forked repo)
on:
pull_request_target:
types: [ opened ]
branches:
- main
- 'dspace-**'
permissions:
pull-requests: write
jobs:
automation:
runs-on: ubuntu-latest
steps:
# Assign the PR to whomever created it. This is useful for visualizing assignments on project boards
# See https://github.com/toshimaru/auto-author-assign
- name: Assign PR to creator
uses: toshimaru/auto-author-assign@v2.1.0

View File

@@ -1,353 +0,0 @@
#
# DSpace's reusable Docker build/push workflow.
#
# This is used by docker.yml for all Docker image builds
name: Reusable DSpace Docker Build
on:
workflow_call:
# Possible Inputs to this reusable job
inputs:
# Build name/id for this Docker build. Used for digest storage to avoid digest overlap between builds.
build_id:
required: true
type: string
# Requires the image name to build (e.g dspace/dspace-test)
image_name:
required: true
type: string
# Optionally the path to the Dockerfile to use for the build. (Default is [dockerfile_context]/Dockerfile)
dockerfile_path:
required: false
type: string
# Optionally the context directory to build the Dockerfile within. Defaults to "." (current directory)
dockerfile_context:
required: false
type: string
default: '.'
# Optionally a list of "additional_contexts" to pass to Dockerfile. Defaults to empty
dockerfile_additional_contexts:
required: false
type: string
default: ''
# If Docker image should have additional tag flavor details (e.g. a suffix), it may be passed in.
tags_flavor:
required: false
type: string
secrets:
# Requires that Docker login info be passed in as secrets.
DOCKER_USERNAME:
required: true
DOCKER_ACCESS_TOKEN:
required: true
# These URL secrets are optional. When specified & branch checks match, the redeployment code below will trigger.
# Therefore builds which need to trigger redeployment MUST specify these URLs. All others should leave them empty.
REDEPLOY_SANDBOX_URL:
required: false
REDEPLOY_DEMO_URL:
required: false
# Define shared default settings as environment variables
env:
IMAGE_NAME: ${{ inputs.image_name }}
# Define tags to use for Docker images based on Git tags/branches (for docker/metadata-action)
# For a new commit on default branch (main), use the literal tag 'latest' on Docker image.
# For a new commit on other branches, use the branch name as the tag for Docker image.
# For a new tag, copy that tag name as the tag for Docker image.
# For a pull request, use the name of the base branch that the PR was created against or "latest" (for main).
# e.g. PR against 'main' will use "latest". a PR against 'dspace-7_x' will use 'dspace-7_x'.
IMAGE_TAGS: |
type=raw,value=latest,enable=${{ github.ref_name == github.event.repository.default_branch }}
type=ref,event=branch,enable=${{ github.ref_name != github.event.repository.default_branch }}
type=ref,event=tag
type=raw,value=${{ (github.event.pull_request.base.ref == github.event.repository.default_branch && 'latest') || github.event.pull_request.base.ref }},enable=${{ github.event_name == 'pull_request' }}
# Define default tag "flavor" for docker/metadata-action per
# https://github.com/docker/metadata-action#flavor-input
# We manage the 'latest' tag ourselves to the 'main' branch (see settings above)
TAGS_FLAVOR: |
latest=false
${{ inputs.tags_flavor }}
# When these URL variables are specified & required branch matches, then the sandbox or demo site will be redeployed.
# See "Redeploy" steps below for more details.
REDEPLOY_SANDBOX_URL: ${{ secrets.REDEPLOY_SANDBOX_URL }}
REDEPLOY_DEMO_URL: ${{ secrets.REDEPLOY_DEMO_URL }}
# Current DSpace branches (and architecture) which are deployed to demo.dspace.org & sandbox.dspace.org respectively
DEPLOY_DEMO_BRANCH: 'dspace-9_x'
DEPLOY_SANDBOX_BRANCH: 'main'
DEPLOY_ARCH: 'linux/amd64'
# Registry used during building of Docker images. (All images are later copied to docker.io registry)
# We use GitHub's Container Registry to avoid aggressive rate limits at DockerHub.
DOCKER_BUILD_REGISTRY: ghcr.io
jobs:
docker-build:
strategy:
matrix:
# Architectures / Platforms for which we will build Docker images
arch: [ 'linux/amd64', 'linux/arm64' ]
isPr:
- ${{ github.event_name == 'pull_request' }}
# If this is a PR, we ONLY build for AMD64. For PRs we only do a sanity check test to ensure Docker builds work.
# The below exclude therefore ensures we do NOT build ARM64 for PRs.
exclude:
- isPr: true
arch: linux/arm64
# If ARM64, then use the Ubuntu ARM64 runner. Otherwise, use the Ubuntu AMD64 runner
runs-on: ${{ matrix.arch == 'linux/arm64' && 'ubuntu-24.04-arm' || 'ubuntu-latest' }}
steps:
# This step converts the slashes in the "arch" matrix values above into dashes & saves to env.ARCH_NAME
# E.g. "linux/amd64" becomes "linux-amd64"
# This is necessary because all upload artifacts CANNOT have special chars (like slashes)
# NOTE: The regex-like syntax below is Bash Parameter Substitution
- name: Prepare
run: |
platform=${{ matrix.arch }}
echo "ARCH_NAME=${platform//\//-}" >> $GITHUB_ENV
# https://github.com/actions/checkout
- name: Checkout codebase
uses: actions/checkout@v4
# https://github.com/docker/login-action
# NOTE: This login occurs for BOTH non-PRs or PRs. PRs *must* also login to access private images from GHCR
# during the build process
- name: Login to ${{ env.DOCKER_BUILD_REGISTRY }}
uses: docker/login-action@v3
with:
registry: ${{ env.DOCKER_BUILD_REGISTRY }}
username: ${{ github.repository_owner }}
password: ${{ secrets.GITHUB_TOKEN }}
# https://github.com/docker/setup-buildx-action
- name: Setup Docker Buildx
uses: docker/setup-buildx-action@v3
# https://github.com/docker/metadata-action
# Extract metadata used for Docker images in all build steps below
- name: Extract metadata (tags, labels) from GitHub for Docker image
id: meta_build
uses: docker/metadata-action@v5
with:
images: ${{ env.DOCKER_BUILD_REGISTRY }}/${{ env.IMAGE_NAME }}
tags: ${{ env.IMAGE_TAGS }}
flavor: ${{ env.TAGS_FLAVOR }}
#--------------------------------------------------------------------
# First, for all branch commits (non-PRs) we build the image & upload
# to GitHub Container Registry (GHCR). After uploading the image
# to GHCR, we store the image digest in an artifact, so we can
# create a merged manifest later (see 'docker-build_manifest' job).
#
# NOTE: We use GHCR in order to avoid aggressive rate limits at DockerHub.
#--------------------------------------------------------------------
# https://github.com/docker/build-push-action
- name: Build and push image to ${{ env.DOCKER_BUILD_REGISTRY }}
if: ${{ ! matrix.isPr }}
id: docker_build
uses: docker/build-push-action@v5
with:
build-contexts: |
${{ inputs.dockerfile_additional_contexts }}
context: ${{ inputs.dockerfile_context }}
file: ${{ inputs.dockerfile_path }}
# Tell DSpace's Docker files to use the build registry instead of DockerHub
build-args:
DOCKER_REGISTRY=${{ env.DOCKER_BUILD_REGISTRY }}
platforms: ${{ matrix.arch }}
push: true
# Use tags / labels provided by 'docker/metadata-action' above
tags: ${{ steps.meta_build.outputs.tags }}
labels: ${{ steps.meta_build.outputs.labels }}
# Use GitHub cache to load cached Docker images and cache the results of this build
# This decreases the number of images we need to fetch from DockerHub
cache-from: type=gha,scope=${{ inputs.build_id }}
cache-to: type=gha,scope=${{ inputs.build_id }},mode=min
# Export the digest of Docker build locally
- name: Export Docker build digest
if: ${{ ! matrix.isPr }}
run: |
mkdir -p /tmp/digests
digest="${{ steps.docker_build.outputs.digest }}"
touch "/tmp/digests/${digest#sha256:}"
# Upload digest to an artifact, so that it can be used in combined manifest below
# (The purpose of the combined manifest is to list both amd64 and arm64 builds under same tag)
- name: Upload Docker build digest to artifact
if: ${{ ! matrix.isPr }}
uses: actions/upload-artifact@v4
with:
name: digests-${{ inputs.build_id }}-${{ env.ARCH_NAME }}
path: /tmp/digests/*
if-no-files-found: error
retention-days: 1
#------------------------------------------------------------------------------
# Second, we build the image again in order to store it in a local TAR file.
# This TAR of the image is cached/saved as an artifact, so that it can be used
# by later jobs to install the brand-new images for automated testing.
# This TAR build is performed BOTH for PRs and for branch commits (non-PRs).
#
# (This approach has the advantage of avoiding having to download the newly built
# image from DockerHub or GHCR during automated testing.)
#
# See the 'docker-deploy' job in docker.yml as an example of where this TAR is used.
#-------------------------------------------------------------------------------
# Build local image (again) and store in a TAR file in /tmp directory
# This step is only done for AMD64, as that's the only image we use in our automated testing (at this time).
# NOTE: This step cannot be combined with the build above as it's a different type of output.
- name: Build and push image to local TAR file
if: ${{ matrix.arch == 'linux/amd64'}}
uses: docker/build-push-action@v5
with:
build-contexts: |
${{ inputs.dockerfile_additional_contexts }}
context: ${{ inputs.dockerfile_context }}
file: ${{ inputs.dockerfile_path }}
# Tell DSpace's Docker files to use the build registry instead of DockerHub
build-args:
DOCKER_REGISTRY=${{ env.DOCKER_BUILD_REGISTRY }}
platforms: ${{ matrix.arch }}
tags: ${{ steps.meta_build.outputs.tags }}
labels: ${{ steps.meta_build.outputs.labels }}
# Use GitHub cache to load cached Docker images and cache the results of this build
# This decreases the number of images we need to fetch from DockerHub
cache-from: type=gha,scope=${{ inputs.build_id }}
cache-to: type=gha,scope=${{ inputs.build_id }},mode=min
# Export image to a local TAR file
outputs: type=docker,dest=/tmp/${{ inputs.build_id }}.tar
# Upload the local docker image (in TAR file) to a build Artifact
# This step is only done for AMD64, as that's the only image we use in our automated testing (at this time).
- name: Upload local image TAR to artifact
if: ${{ matrix.arch == 'linux/amd64'}}
uses: actions/upload-artifact@v4
with:
name: docker-image-${{ inputs.build_id }}-${{ env.ARCH_NAME }}
path: /tmp/${{ inputs.build_id }}.tar
if-no-files-found: error
retention-days: 1
##########################################################################################
# Merge Docker digests (from various architectures) into a single manifest.
# This runs after all Docker builds complete above. The purpose is to include all builds
# under a single manifest for this tag.
# (e.g. both linux/amd64 and linux/arm64 should be listed under the same tagged Docker image)
##########################################################################################
docker-build_manifest:
# Only run if this is NOT a PR
if: ${{ github.event_name != 'pull_request' }}
runs-on: ubuntu-latest
needs:
- docker-build
steps:
- name: Download Docker build digests
uses: actions/download-artifact@v4
with:
path: /tmp/digests
# Download digests for both AMD64 and ARM64 into same directory
pattern: digests-${{ inputs.build_id }}-*
merge-multiple: true
- name: Login to ${{ env.DOCKER_BUILD_REGISTRY }}
uses: docker/login-action@v3
with:
registry: ${{ env.DOCKER_BUILD_REGISTRY }}
username: ${{ github.repository_owner }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
- name: Add Docker metadata for image
id: meta
uses: docker/metadata-action@v5
with:
images: ${{ env.DOCKER_BUILD_REGISTRY }}/${{ env.IMAGE_NAME }}
tags: ${{ env.IMAGE_TAGS }}
flavor: ${{ env.TAGS_FLAVOR }}
- name: Create manifest list from digests and push to ${{ env.DOCKER_BUILD_REGISTRY }}
working-directory: /tmp/digests
run: |
docker buildx imagetools create $(jq -cr '.tags | map("-t " + .) | join(" ")' <<< "$DOCKER_METADATA_OUTPUT_JSON") \
$(printf '${{ env.DOCKER_BUILD_REGISTRY }}/${{ env.IMAGE_NAME }}@sha256:%s ' *)
- name: Inspect manifest in ${{ env.DOCKER_BUILD_REGISTRY }}
run: |
docker buildx imagetools inspect ${{ env.DOCKER_BUILD_REGISTRY }}/${{ env.IMAGE_NAME }}:${{ steps.meta.outputs.version }}
##########################################################################################
# Copy images / manifest to DockerHub.
# This MUST run after *both* images (AMD64 and ARM64) are built and uploaded to GitHub
# Container Registry (GHCR). Attempting to run this in parallel to GHCR builds can result
# in a race condition...i.e. the copy to DockerHub may fail if GHCR image has been updated
# at the moment when the copy occurs.
##########################################################################################
docker-copy_to_dockerhub:
# Only run if this is NOT a PR
if: ${{ github.event_name != 'pull_request' }}
runs-on: ubuntu-latest
needs:
- docker-build_manifest
steps:
# 'regctl' is used to more easily copy the image to DockerHub and obtain the digest from DockerHub
# See https://github.com/regclient/regclient/blob/main/docs/regctl.md
- name: Install regctl for Docker registry tools
uses: regclient/actions/regctl-installer@main
with:
release: 'v0.8.0'
# This recreates Docker tags for DockerHub
- name: Add Docker metadata for image
id: meta_dockerhub
uses: docker/metadata-action@v5
with:
images: ${{ env.IMAGE_NAME }}
tags: ${{ env.IMAGE_TAGS }}
flavor: ${{ env.TAGS_FLAVOR }}
# Login to source registry first, as this is where we are copying *from*
- name: Login to ${{ env.DOCKER_BUILD_REGISTRY }}
uses: docker/login-action@v3
with:
registry: ${{ env.DOCKER_BUILD_REGISTRY }}
username: ${{ github.repository_owner }}
password: ${{ secrets.GITHUB_TOKEN }}
# Login to DockerHub, since this is where we are copying *to*
- name: Login to DockerHub
uses: docker/login-action@v3
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_ACCESS_TOKEN }}
# Copy the image from source to DockerHub
- name: Copy image from ${{ env.DOCKER_BUILD_REGISTRY }} to docker.io
run: |
regctl image copy ${{ env.DOCKER_BUILD_REGISTRY }}/${{ env.IMAGE_NAME }}:${{ steps.meta_dockerhub.outputs.version }} docker.io/${{ env.IMAGE_NAME }}:${{ steps.meta_dockerhub.outputs.version }}
#--------------------------------------------------------------------
# Finally, check whether demo.dspace.org or sandbox.dspace.org need
# to be redeployed based on these new DockerHub images.
#--------------------------------------------------------------------
# If this build is for the branch that Sandbox uses and passed in a REDEPLOY_SANDBOX_URL secret,
# Then redeploy https://sandbox.dspace.org
- name: Redeploy sandbox.dspace.org (based on main branch)
if: |
env.REDEPLOY_SANDBOX_URL != '' &&
github.ref_name == env.DEPLOY_SANDBOX_BRANCH
run: |
curl -X POST $REDEPLOY_SANDBOX_URL
# If this build is for the branch that Demo uses and passed in a REDEPLOY_DEMO_URL secret,
# Then redeploy https://demo.dspace.org
- name: Redeploy demo.dspace.org (based on maintenance branch)
if: |
env.REDEPLOY_DEMO_URL != '' &&
github.ref_name == env.DEPLOY_DEMO_BRANCH
run: |
curl -X POST $REDEPLOY_DEMO_URL

52
.gitignore vendored
View File

@@ -1,52 +0,0 @@
## Ignore the MVN compiled output directories from version tracking
target/
## Ignore tags index files created by Exuberant Ctags
tags
## Ignore project files created by Eclipse
.settings/
/bin/
.project
.classpath
.checkstyle
.factorypath
## Ignore project files created by IntelliJ IDEA
*.iml
*.ipr
*.iws
.idea/
overlays/
## Ignore project files created by NetBeans
nbproject/
build/
nbbuild/
dist/
nbdist/
nbactions.xml
nb-configuration.xml
## Ignore project files created by Visual Studio Code
.vscode/
## Ignore all *.properties file in root folder, EXCEPT build.properties (the default)
## KEPT FOR BACKWARDS COMPATIBILITY WITH 5.x (build.properties is now replaced with local.cfg)
/*.properties
!/build.properties
# Ignore a local.cfg file in root folder, if it exists
/local.cfg
# Also ignore it under dspace/config
/dspace/config/local.cfg
##Mac noise
.DS_Store
##Ignore JRebel project configuration
rebel.xml
## Ignore jenv configuration
.java-version

View File

@@ -1,46 +0,0 @@
# How to Contribute
DSpace is a community built and supported project. We do not have a centralized development or support team, but have a dedicated group of volunteers who help us improve the software, documentation, resources, etc.
* [Contribute new code via a Pull Request](#contribute-new-code-via-a-pull-request)
* [Contribute documentation](#contribute-documentation)
* [Help others on mailing lists or Slack](#help-others-on-mailing-lists-or-slack)
* [Join a working or interest group](#join-a-working-or-interest-group)
## Contribute new code via a Pull Request
We accept [GitHub Pull Requests (PRs)](https://docs.github.com/en/pull-requests/collaborating-with-pull-requests/proposing-changes-to-your-work-with-pull-requests/creating-a-pull-request-from-a-fork) at any time from anyone.
Contributors to each release are recognized in our [Release Notes](https://wiki.lyrasis.org/display/DSDOC9x/Release+Notes).
Code Contribution Checklist
- [ ] PRs _should_ be smaller in size (ideally less than 1,000 lines of code, not including comments & tests)
- [ ] PRs **must** pass Checkstyle validation based on our [Code Style Guide](https://wiki.lyrasis.org/display/DSPACE/Code+Style+Guide).
- [ ] PRs **must** include Javadoc for _all new/modified public methods and classes_. Larger private methods should also have Javadoc
- [ ] PRs **must** pass all automated tests and include new/updated Unit or Integration tests based on our [Code Testing Guide](https://wiki.lyrasis.org/display/DSPACE/Code+Testing+Guide).
- [ ] Details on how to test the PR **must** be provided. Reviewers must be aware of any steps they need to take to successfully test your fix or feature.
- [ ] If a PR includes new libraries/dependencies (in any `pom.xml`), then their software licenses **must** align with the [DSpace BSD License](https://github.com/DSpace/DSpace/blob/main/LICENSE) based on the [Licensing of Contributions](https://wiki.lyrasis.org/display/DSPACE/Code+Contribution+Guidelines#CodeContributionGuidelines-LicensingofContributions) documentation.
- [ ] Basic technical documentation _should_ be provided for any new features or changes to the REST API. REST API changes should be documented in our [Rest Contract](https://github.com/DSpace/RestContract).
- [ ] If a PR fixes an issue ticket, please [link them together](https://docs.github.com/en/issues/tracking-your-work-with-issues/linking-a-pull-request-to-an-issue).
Additional details on the code contribution process can be found in our [Code Contribution Guidelines](https://wiki.lyrasis.org/display/DSPACE/Code+Contribution+Guidelines)
## Contribute documentation
DSpace Documentation is a collaborative effort in a shared Wiki. The latest documentation is at https://wiki.lyrasis.org/display/DSDOC
If you find areas of the DSpace Documentation which you wish to improve, please request a Wiki account by emailing wikihelp@lyrasis.org.
Once you have an account setup, contact @tdonohue (via [Slack](https://wiki.lyrasis.org/display/DSPACE/Slack) or email) for access to edit our Documentation.
## Help others on mailing lists or Slack
DSpace has our own [Slack](https://wiki.lyrasis.org/display/DSPACE/Slack) community and [Mailing Lists](https://wiki.lyrasis.org/display/DSPACE/Mailing+Lists) where discussions take place and questions are answered.
Anyone is welcome to join and help others. We just ask you to follow our [Code of Conduct](https://www.lyrasis.org/about/Pages/Code-of-Conduct.aspx) (adopted via Lyrasis).
## Join a working or interest group
Most of the work in building/improving DSpace comes via [Working Groups](https://wiki.lyrasis.org/display/DSPACE/DSpace+Working+Groups) or [Interest Groups](https://wiki.lyrasis.org/display/DSPACE/DSpace+Interest+Groups).
All working/interest groups are open to anyone to join and participate. A few key groups to be aware of include:
* [DSpace Developer Team](https://wiki.lyrasis.org/display/DSPACE/Developer+Meetings) - This is the primary, volunteer development team. We meet weekly to review our current development [project board](https://github.com/orgs/DSpace/projects), assigning tickets and/or PRs. This is also were discussions of the next release or major issues occur. Anyone is welcome to attend.
* [DSpace Community Advisory Team (DCAT)](https://wiki.lyrasis.org/display/cmtygp/DSpace+Community+Advisory+Team) - This is an interest group for repository managers/administrators. We meet monthly to discuss DSpace, share tips & provide feedback back to developers. Anyone is welcome to attend.

View File

@@ -1,73 +0,0 @@
# This image will be published as dspace/dspace
# See https://github.com/DSpace/DSpace/tree/main/dspace/src/main/docker for usage details
#
# - note: default tag for branch: dspace/dspace: dspace/dspace:latest
# This Dockerfile uses JDK17 by default.
# To build with other versions, use "--build-arg JDK_VERSION=[value]"
ARG JDK_VERSION=17
# The Docker version tag to build from
ARG DSPACE_VERSION=latest
# The Docker registry to use for DSpace images. Defaults to "docker.io"
# NOTE: non-DSpace images are hardcoded to use "docker.io" and are not impacted by this build argument
ARG DOCKER_REGISTRY=docker.io
# Step 1 - Run Maven Build
FROM ${DOCKER_REGISTRY}/dspace/dspace-dependencies:${DSPACE_VERSION} AS build
ARG TARGET_DIR=dspace-installer
WORKDIR /app
# The dspace-installer directory will be written to /install
RUN mkdir /install \
&& chown -Rv dspace: /install \
&& chown -Rv dspace: /app
USER dspace
# Copy the DSpace source code (from local machine) into the workdir (excluding .dockerignore contents)
ADD --chown=dspace . /app/
# Build DSpace
# Copy the dspace-installer directory to /install. Clean up the build to keep the docker image small
# Maven flags here ensure that we skip building test environment and skip all code verification checks.
# These flags speed up this compilation as much as reasonably possible.
ENV MAVEN_FLAGS="-P-test-environment -Denforcer.skip=true -Dcheckstyle.skip=true -Dlicense.skip=true -Dxml.skip=true"
RUN mvn --no-transfer-progress package ${MAVEN_FLAGS} && \
mv /app/dspace/target/${TARGET_DIR}/* /install && \
mvn clean
# Remove the server webapp to keep image small.
RUN rm -rf /install/webapps/server/
# Step 2 - Run Ant Deploy
FROM docker.io/eclipse-temurin:${JDK_VERSION} AS ant_build
ARG TARGET_DIR=dspace-installer
# COPY the /install directory from 'build' container to /dspace-src in this container
COPY --from=build /install /dspace-src
WORKDIR /dspace-src
# Create the initial install deployment using ANT
ENV ANT_VERSION=1.10.13
ENV ANT_HOME=/tmp/ant-$ANT_VERSION
ENV PATH=$ANT_HOME/bin:$PATH
# Download and install 'ant'
RUN mkdir $ANT_HOME && \
curl --silent --show-error --location --fail --retry 5 --output /tmp/apache-ant.tar.gz \
https://archive.apache.org/dist/ant/binaries/apache-ant-${ANT_VERSION}-bin.tar.gz && \
tar -zx --strip-components=1 -f /tmp/apache-ant.tar.gz -C $ANT_HOME && \
rm /tmp/apache-ant.tar.gz
# Run necessary 'ant' deploy scripts
RUN ant init_installation update_configs update_code update_webapps
# Step 3 - Start up DSpace via Runnable JAR
FROM docker.io/eclipse-temurin:${JDK_VERSION}
# NOTE: DSPACE_INSTALL must align with the "dspace.dir" default configuration.
ENV DSPACE_INSTALL=/dspace
# Copy the /dspace directory from 'ant_build' container to /dspace in this container
COPY --from=ant_build /dspace $DSPACE_INSTALL
WORKDIR $DSPACE_INSTALL
# Need host command for "[dspace]/bin/make-handle-config"
RUN apt-get update \
&& apt-get install -y --no-install-recommends host \
&& apt-get purge -y --auto-remove \
&& rm -rf /var/lib/apt/lists/*
# Expose Tomcat port (8080) & Handle Server HTTP port (8000)
EXPOSE 8080 8000
# Give java extra memory (2GB)
ENV JAVA_OPTS=-Xmx2000m
# On startup, run DSpace Runnable JAR
ENTRYPOINT ["java", "-jar", "webapps/server-boot.jar", "--dspace.dir=$DSPACE_INSTALL"]

View File

@@ -1,62 +0,0 @@
# This image will be published as dspace/dspace-cli
# See https://github.com/DSpace/DSpace/tree/main/dspace/src/main/docker for usage details
#
# - note: default tag for branch: dspace/dspace-cli: dspace/dspace-cli:latest
# This Dockerfile uses JDK17 by default.
# To build with other versions, use "--build-arg JDK_VERSION=[value]"
ARG JDK_VERSION=17
# The Docker version tag to build from
ARG DSPACE_VERSION=latest
# The Docker registry to use for DSpace images. Defaults to "docker.io"
# NOTE: non-DSpace images are hardcoded to use "docker.io" and are not impacted by this build argument
ARG DOCKER_REGISTRY=docker.io
# Step 1 - Run Maven Build
FROM ${DOCKER_REGISTRY}/dspace/dspace-dependencies:${DSPACE_VERSION} AS build
ARG TARGET_DIR=dspace-installer
WORKDIR /app
# The dspace-installer directory will be written to /install
RUN mkdir /install \
&& chown -Rv dspace: /install \
&& chown -Rv dspace: /app
USER dspace
# Copy the DSpace source code (from local machine) into the workdir (excluding .dockerignore contents)
ADD --chown=dspace . /app/
# Build DSpace. Copy the dspace-installer directory to /install. Clean up the build to keep the docker image small
RUN mvn --no-transfer-progress package && \
mv /app/dspace/target/${TARGET_DIR}/* /install && \
mvn clean
# Step 2 - Run Ant Deploy
FROM docker.io/eclipse-temurin:${JDK_VERSION} AS ant_build
ARG TARGET_DIR=dspace-installer
# COPY the /install directory from 'build' container to /dspace-src in this container
COPY --from=build /install /dspace-src
WORKDIR /dspace-src
# Create the initial install deployment using ANT
ENV ANT_VERSION=1.10.13
ENV ANT_HOME=/tmp/ant-$ANT_VERSION
ENV PATH=$ANT_HOME/bin:$PATH
# Download and install 'ant'
RUN mkdir $ANT_HOME && \
curl --silent --show-error --location --fail --retry 5 --output /tmp/apache-ant.tar.gz \
https://archive.apache.org/dist/ant/binaries/apache-ant-${ANT_VERSION}-bin.tar.gz && \
tar -zx --strip-components=1 -f /tmp/apache-ant.tar.gz -C $ANT_HOME && \
rm /tmp/apache-ant.tar.gz
# Run necessary 'ant' deploy scripts
RUN ant init_installation update_configs update_code
# Step 3 - Run jdk
FROM docker.io/eclipse-temurin:${JDK_VERSION}
# NOTE: DSPACE_INSTALL must align with the "dspace.dir" default configuration.
ENV DSPACE_INSTALL=/dspace
# Copy the /dspace directory from 'ant_build' container to /dspace in this container
COPY --from=ant_build /dspace $DSPACE_INSTALL
# Give java extra memory (1GB)
ENV JAVA_OPTS=-Xmx1000m
# Install unzip for AIPs
RUN apt-get update \
&& apt-get install -y --no-install-recommends unzip \
&& apt-get purge -y --auto-remove \
&& rm -rf /var/lib/apt/lists/*

View File

@@ -1,82 +0,0 @@
# This image will be published as dspace/dspace-dependencies
# The purpose of this image is to make the build for dspace/dspace run faster
#
# This Dockerfile uses JDK17 by default.
# To build with other versions, use "--build-arg JDK_VERSION=[value]"
ARG JDK_VERSION=17
# Step 1 - Download all Dependencies
FROM docker.io/maven:3-eclipse-temurin-${JDK_VERSION} AS build
ARG TARGET_DIR=dspace-installer
WORKDIR /app
# Create the 'dspace' user account & home directory
RUN useradd dspace \
&& mkdir -p /home/dspace \
&& chown -Rv dspace: /home/dspace
RUN chown -Rv dspace: /app
# Switch to dspace user & run below commands as that user
USER dspace
# This next part may look odd, but it speeds up the build of this image *significantly*.
# Copy ONLY the POMs to this image (from local machine). This will allow us to download all dependencies *without*
# performing any code compilation steps.
# Parent POM
ADD --chown=dspace pom.xml /app/
RUN mkdir -p /app/dspace
# 'dspace' module POM. Includes 'additions' ONLY, as it's the only submodule that is required to exist.
ADD --chown=dspace dspace/pom.xml /app/dspace/
RUN mkdir -p /app/dspace/modules/
ADD --chown=dspace dspace/modules/pom.xml /app/dspace/modules/
RUN mkdir -p /app/dspace/modules/additions
ADD --chown=dspace dspace/modules/additions/pom.xml /app/dspace/modules/additions/
# 'dspace-api' module POM
RUN mkdir -p /app/dspace-api
ADD --chown=dspace dspace-api/pom.xml /app/dspace-api/
# 'dspace-iiif' module POM
RUN mkdir -p /app/dspace-iiif
ADD --chown=dspace dspace-iiif/pom.xml /app/dspace-iiif/
# 'dspace-oai' module POM
RUN mkdir -p /app/dspace-oai
ADD --chown=dspace dspace-oai/pom.xml /app/dspace-oai/
# 'dspace-rdf' module POM
RUN mkdir -p /app/dspace-rdf
ADD --chown=dspace dspace-rdf/pom.xml /app/dspace-rdf/
# 'dspace-saml2' module POM
RUN mkdir -p /app/dspace-saml2
ADD --chown=dspace dspace-saml2/pom.xml /app/dspace-saml2/
# 'dspace-server-webapp' module POM
RUN mkdir -p /app/dspace-server-webapp
ADD --chown=dspace dspace-server-webapp/pom.xml /app/dspace-server-webapp/
# 'dspace-services' module POM
RUN mkdir -p /app/dspace-services
ADD --chown=dspace dspace-services/pom.xml /app/dspace-services/
# 'dspace-sword' module POM
RUN mkdir -p /app/dspace-sword
ADD --chown=dspace dspace-sword/pom.xml /app/dspace-sword/
# 'dspace-swordv2' module POM
RUN mkdir -p /app/dspace-swordv2
ADD --chown=dspace dspace-swordv2/pom.xml /app/dspace-swordv2/
# Trigger the installation of all maven dependencies (hide download progress messages)
# Maven flags here ensure that we skip final assembly, skip building test environment and skip all code verification checks.
# These flags speed up this installation and skip tasks we cannot perform as we don't have the full source code.
ENV MAVEN_FLAGS="-P-assembly -P-test-environment -Denforcer.skip=true -Dcheckstyle.skip=true -Dlicense.skip=true -Dxjc.skip=true -Dxml.skip=true"
RUN mvn --no-transfer-progress verify ${MAVEN_FLAGS}
# Clear the contents of the /app directory (including all maven target folders), so no artifacts remain.
# This ensures when dspace:dspace is built, it will use the Maven local cache (~/.m2) for dependencies
USER root
RUN rm -rf /app/*

View File

@@ -1,74 +0,0 @@
# This image will be published as dspace/dspace
# See https://github.com/DSpace/DSpace/tree/main/dspace/src/main/docker for usage details
#
# - note: default tag for branch: dspace/dspace: dspace/dspace:latest-test
#
# This image is meant for TESTING/DEVELOPMENT ONLY as it deploys the old v6 REST API under HTTP (not HTTPS)
# This Dockerfile uses JDK17 by default.
# To build with other versions, use "--build-arg JDK_VERSION=[value]"
ARG JDK_VERSION=17
# The Docker version tag to build from
ARG DSPACE_VERSION=latest
# The Docker registry to use for DSpace images. Defaults to "docker.io"
# NOTE: non-DSpace images are hardcoded to use "docker.io" and are not impacted by this build argument
ARG DOCKER_REGISTRY=docker.io
# Step 1 - Run Maven Build
FROM ${DOCKER_REGISTRY}/dspace/dspace-dependencies:${DSPACE_VERSION} AS build
ARG TARGET_DIR=dspace-installer
WORKDIR /app
# The dspace-installer directory will be written to /install
RUN mkdir /install \
&& chown -Rv dspace: /install \
&& chown -Rv dspace: /app
USER dspace
# Copy the DSpace source code (from local machine) into the workdir (excluding .dockerignore contents)
ADD --chown=dspace . /app/
# Build DSpace
# Copy the dspace-installer directory to /install. Clean up the build to keep the docker image small
RUN mvn --no-transfer-progress package && \
mv /app/dspace/target/${TARGET_DIR}/* /install && \
mvn clean
# Remove the server webapp to keep image small. Rename runnable JAR to server-boot.jar.
RUN rm -rf /install/webapps/server/
# Step 2 - Run Ant Deploy
FROM docker.io/eclipse-temurin:${JDK_VERSION} AS ant_build
ARG TARGET_DIR=dspace-installer
# COPY the /install directory from 'build' container to /dspace-src in this container
COPY --from=build /install /dspace-src
WORKDIR /dspace-src
# Create the initial install deployment using ANT
ENV ANT_VERSION=1.10.12
ENV ANT_HOME=/tmp/ant-$ANT_VERSION
ENV PATH=$ANT_HOME/bin:$PATH
# Download and install 'ant'
RUN mkdir $ANT_HOME && \
curl --silent --show-error --location --fail --retry 5 --output /tmp/apache-ant.tar.gz \
https://archive.apache.org/dist/ant/binaries/apache-ant-${ANT_VERSION}-bin.tar.gz && \
tar -zx --strip-components=1 -f /tmp/apache-ant.tar.gz -C $ANT_HOME && \
rm /tmp/apache-ant.tar.gz
# Run necessary 'ant' deploy scripts
RUN ant init_installation update_configs update_code update_webapps
# Step 3 - Start up DSpace via Runnable JAR
FROM docker.io/eclipse-temurin:${JDK_VERSION}
# NOTE: DSPACE_INSTALL must align with the "dspace.dir" default configuration.
ENV DSPACE_INSTALL=/dspace
# Copy the /dspace directory from 'ant_build' container to /dspace in this container
COPY --from=ant_build /dspace $DSPACE_INSTALL
WORKDIR $DSPACE_INSTALL
# Need host command for "[dspace]/bin/make-handle-config"
RUN apt-get update \
&& apt-get install -y --no-install-recommends host \
&& apt-get purge -y --auto-remove \
&& rm -rf /var/lib/apt/lists/*
# Expose Tomcat port and debugging port
EXPOSE 8080 8000
# Give java extra memory (2GB)
ENV JAVA_OPTS=-Xmx2000m
# enable JVM debugging via JDWP
ENV JAVA_TOOL_OPTIONS=-agentlib:jdwp=transport=dt_socket,server=y,suspend=n,address=*:8000
# On startup, run DSpace Runnable JAR
ENTRYPOINT ["java", "-jar", "webapps/server-boot.jar", "--dspace.dir=$DSPACE_INSTALL"]

23
LICENSE
View File

@@ -1,6 +1,7 @@
BSD 3-Clause License DSpace source code license:
Copyright (c) 2002-2021, LYRASIS. All rights reserved.
Copyright (c) 2002-2010, DuraSpace. All rights reserved.
Redistribution and use in source and binary forms, with or without Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are modification, are permitted provided that the following conditions are
@@ -13,12 +14,13 @@ notice, this list of conditions and the following disclaimer.
notice, this list of conditions and the following disclaimer in the notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution. documentation and/or other materials provided with the distribution.
- Neither the name of the copyright holder nor the names of its - Neither the name DuraSpace nor the name of the DSpace Foundation
contributors may be used to endorse or promote products derived from nor the names of its contributors may be used to endorse or promote
this software without specific prior written permission. products derived from this software without specific prior written
permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
HOLDERS OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, HOLDERS OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
@@ -28,4 +30,11 @@ OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR
TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH
DAMAGE. DAMAGE.
DSpace uses third-party libraries which may be distributed under
different licenses to the above. These licenses are located in
the lib/licenses directory. You must agree to the terms of these
licenses, in addition to the above DSpace source code license, in
order to use this software.

View File

@@ -1,5 +0,0 @@
The contents of this file are subject to the license and copyright
detailed in the LICENSE and NOTICE files at the root of the source
tree and available online at
http://www.dspace.org/license/

View File

@@ -1,760 +0,0 @@
DSpace uses third-party libraries which may be distributed under different
licenses. We have listed all of these third party libraries and their licenses
below. This file can be regenerated at any time by simply running:
mvn clean verify -Dthird.party.licenses=true
You must agree to the terms of these licenses, in addition to the DSpace
source code license, in order to use this software.
---------------------------------------------------
Third party Java libraries listed by License type.
PLEASE NOTE: Some dependencies may be listed under multiple licenses if they
are dual-licensed. This is especially true of anything listed as
"GNU General Public Library" below, as DSpace actually does NOT allow for any
dependencies that are solely released under GPL terms. For more info see:
https://wiki.lyrasis.org/display/DSPACE/Code+Contribution+Guidelines
---------------------------------------------------
Apache Software License, Version 2.0:
* Ant-Contrib Tasks (ant-contrib:ant-contrib:1.0b3 - http://ant-contrib.sourceforge.net)
* AWS SDK for Java - Core (com.amazonaws:aws-java-sdk-core:1.12.783 - https://aws.amazon.com/sdkforjava)
* AWS Java SDK for AWS KMS (com.amazonaws:aws-java-sdk-kms:1.12.783 - https://aws.amazon.com/sdkforjava)
* AWS Java SDK for Amazon S3 (com.amazonaws:aws-java-sdk-s3:1.12.783 - https://aws.amazon.com/sdkforjava)
* JMES Path Query library (com.amazonaws:jmespath-java:1.12.783 - https://aws.amazon.com/sdkforjava)
* Titanium JSON-LD 1.1 (JRE11) (com.apicatalog:titanium-json-ld:1.3.2 - https://github.com/filip26/titanium-json-ld)
* HPPC Collections (com.carrotsearch:hppc:0.8.1 - http://labs.carrotsearch.com/hppc.html/hppc)
* com.drewnoakes:metadata-extractor (com.drewnoakes:metadata-extractor:2.19.0 - https://drewnoakes.com/code/exif/)
* parso (com.epam:parso:2.0.14 - https://github.com/epam/parso)
* Internet Time Utility (com.ethlo.time:itu:1.7.0 - https://github.com/ethlo/itu)
* ClassMate (com.fasterxml:classmate:1.5.1 - https://github.com/FasterXML/java-classmate)
* Jackson-annotations (com.fasterxml.jackson.core:jackson-annotations:2.19.0 - https://github.com/FasterXML/jackson)
* Jackson-core (com.fasterxml.jackson.core:jackson-core:2.19.0 - https://github.com/FasterXML/jackson-core)
* jackson-databind (com.fasterxml.jackson.core:jackson-databind:2.19.0 - https://github.com/FasterXML/jackson)
* Jackson dataformat: CBOR (com.fasterxml.jackson.dataformat:jackson-dataformat-cbor:2.17.2 - https://github.com/FasterXML/jackson-dataformats-binary)
* Jackson dataformat: Smile (com.fasterxml.jackson.dataformat:jackson-dataformat-smile:2.15.2 - https://github.com/FasterXML/jackson-dataformats-binary)
* Jackson-dataformat-TOML (com.fasterxml.jackson.dataformat:jackson-dataformat-toml:2.15.2 - https://github.com/FasterXML/jackson-dataformats-text)
* Jackson-dataformat-YAML (com.fasterxml.jackson.dataformat:jackson-dataformat-yaml:2.16.2 - https://github.com/FasterXML/jackson-dataformats-text)
* Jackson datatype: jdk8 (com.fasterxml.jackson.datatype:jackson-datatype-jdk8:2.18.3 - https://github.com/FasterXML/jackson-modules-java8/jackson-datatype-jdk8)
* Jackson datatype: JSR310 (com.fasterxml.jackson.datatype:jackson-datatype-jsr310:2.19.0 - https://github.com/FasterXML/jackson-modules-java8/jackson-datatype-jsr310)
* Jackson Jakarta-RS: base (com.fasterxml.jackson.jakarta.rs:jackson-jakarta-rs-base:2.16.2 - https://github.com/FasterXML/jackson-jakarta-rs-providers/jackson-jakarta-rs-base)
* Jackson Jakarta-RS: JSON (com.fasterxml.jackson.jakarta.rs:jackson-jakarta-rs-json-provider:2.16.2 - https://github.com/FasterXML/jackson-jakarta-rs-providers/jackson-jakarta-rs-json-provider)
* Jackson module: Jakarta XML Bind Annotations (jakarta.xml.bind) (com.fasterxml.jackson.module:jackson-module-jakarta-xmlbind-annotations:2.16.2 - https://github.com/FasterXML/jackson-modules-base)
* Jackson-module-parameter-names (com.fasterxml.jackson.module:jackson-module-parameter-names:2.18.3 - https://github.com/FasterXML/jackson-modules-java8/jackson-module-parameter-names)
* Java UUID Generator (com.fasterxml.uuid:java-uuid-generator:4.1.0 - https://github.com/cowtowncoder/java-uuid-generator)
* Woodstox (com.fasterxml.woodstox:woodstox-core:6.5.1 - https://github.com/FasterXML/woodstox)
* zjsonpatch (com.flipkart.zjsonpatch:zjsonpatch:0.4.16 - https://github.com/flipkart-incubator/zjsonpatch/)
* Caffeine cache (com.github.ben-manes.caffeine:caffeine:2.9.3 - https://github.com/ben-manes/caffeine)
* Caffeine cache (com.github.ben-manes.caffeine:caffeine:3.1.8 - https://github.com/ben-manes/caffeine)
* JSON.simple (com.github.cliftonlabs:json-simple:3.0.2 - https://cliftonlabs.github.io/json-simple/)
* btf (com.github.java-json-tools:btf:1.3 - https://github.com/java-json-tools/btf)
* jackson-coreutils (com.github.java-json-tools:jackson-coreutils:2.0 - https://github.com/java-json-tools/jackson-coreutils)
* jackson-coreutils-equivalence (com.github.java-json-tools:jackson-coreutils-equivalence:1.0 - https://github.com/java-json-tools/jackson-coreutils)
* json-patch (com.github.java-json-tools:json-patch:1.13 - https://github.com/java-json-tools/json-patch)
* json-schema-core (com.github.java-json-tools:json-schema-core:1.2.14 - https://github.com/java-json-tools/json-schema-core)
* json-schema-validator (com.github.java-json-tools:json-schema-validator:2.2.14 - https://github.com/java-json-tools/json-schema-validator)
* msg-simple (com.github.java-json-tools:msg-simple:1.2 - https://github.com/java-json-tools/msg-simple)
* uri-template (com.github.java-json-tools:uri-template:0.10 - https://github.com/java-json-tools/uri-template)
* JCIP Annotations under Apache License (com.github.stephenc.jcip:jcip-annotations:1.0-1 - http://stephenc.github.com/jcip-annotations)
* FindBugs-jsr305 (com.google.code.findbugs:jsr305:3.0.2 - http://findbugs.sourceforge.net/)
* Gson (com.google.code.gson:gson:2.13.1 - https://github.com/google/gson)
* error-prone annotations (com.google.errorprone:error_prone_annotations:2.38.0 - https://errorprone.info/error_prone_annotations)
* Guava InternalFutureFailureAccess and InternalFutures (com.google.guava:failureaccess:1.0.1 - https://github.com/google/guava/failureaccess)
* Guava: Google Core Libraries for Java (com.google.guava:guava:32.1.3-jre - https://github.com/google/guava)
* Guava ListenableFuture only (com.google.guava:listenablefuture:9999.0-empty-to-avoid-conflict-with-guava - https://github.com/google/guava/listenablefuture)
* Google Guice - Core Library (com.google.inject:guice:7.0.0 - https://github.com/google/guice/guice)
* Google Guice - Extensions - AssistedInject (com.google.inject.extensions:guice-assistedinject:7.0.0 - https://github.com/google/guice/extensions-parent/guice-assistedinject)
* J2ObjC Annotations (com.google.j2objc:j2objc-annotations:1.3 - https://github.com/google/j2objc/)
* J2ObjC Annotations (com.google.j2objc:j2objc-annotations:2.8 - https://github.com/google/j2objc/)
* libphonenumber (com.googlecode.libphonenumber:libphonenumber:8.11.1 - https://github.com/google/libphonenumber/)
* Jackcess (com.healthmarketscience.jackcess:jackcess:4.0.8 - https://jackcess.sourceforge.io)
* Jackcess Encrypt (com.healthmarketscience.jackcess:jackcess-encrypt:4.0.3 - http://jackcessencrypt.sf.net)
* json-path (com.jayway.jsonpath:json-path:2.9.0 - https://github.com/jayway/JsonPath)
* json-path-assert (com.jayway.jsonpath:json-path-assert:2.9.0 - https://github.com/jayway/JsonPath)
* Disruptor Framework (com.lmax:disruptor:3.4.2 - http://lmax-exchange.github.com/disruptor)
* MaxMind DB Reader (com.maxmind.db:maxmind-db:2.1.0 - http://dev.maxmind.com/)
* MaxMind GeoIP2 API (com.maxmind.geoip2:geoip2:2.17.0 - https://dev.maxmind.com/geoip?lang=en)
* JsonSchemaValidator (com.networknt:json-schema-validator:1.0.76 - https://github.com/networknt/json-schema-validator)
* Nimbus JOSE+JWT (com.nimbusds:nimbus-jose-jwt:9.28 - https://bitbucket.org/connect2id/nimbus-jose-jwt)
* Nimbus JOSE+JWT (com.nimbusds:nimbus-jose-jwt:9.48 - https://bitbucket.org/connect2id/nimbus-jose-jwt)
* opencsv (com.opencsv:opencsv:5.11 - http://opencsv.sf.net)
* java-libpst (com.pff:java-libpst:0.9.3 - https://github.com/rjohnsondev/java-libpst)
* rome (com.rometools:rome:1.19.0 - http://rometools.com/rome)
* rome-modules (com.rometools:rome-modules:1.19.0 - http://rometools.com/rome-modules)
* rome-utils (com.rometools:rome-utils:1.19.0 - http://rometools.com/rome-utils)
* mockwebserver (com.squareup.okhttp3:mockwebserver:4.12.0 - https://square.github.io/okhttp/)
* okhttp (com.squareup.okhttp3:okhttp:4.12.0 - https://square.github.io/okhttp/)
* okio (com.squareup.okio:okio:3.6.0 - https://github.com/square/okio/)
* okio (com.squareup.okio:okio-jvm:3.6.0 - https://github.com/square/okio/)
* T-Digest (com.tdunning:t-digest:3.1 - https://github.com/tdunning/t-digest)
* config (com.typesafe:config:1.3.3 - https://github.com/lightbend/config)
* ssl-config-core (com.typesafe:ssl-config-core_2.13:0.3.8 - https://github.com/lightbend/ssl-config)
* akka-actor (com.typesafe.akka:akka-actor_2.13:2.5.31 - https://akka.io/)
* akka-http-core (com.typesafe.akka:akka-http-core_2.13:10.1.12 - https://akka.io)
* akka-http (com.typesafe.akka:akka-http_2.13:10.1.12 - https://akka.io)
* akka-parsing (com.typesafe.akka:akka-parsing_2.13:10.1.12 - https://akka.io)
* akka-protobuf (com.typesafe.akka:akka-protobuf_2.13:2.5.31 - https://akka.io/)
* akka-stream (com.typesafe.akka:akka-stream_2.13:2.5.31 - https://akka.io/)
* scala-logging (com.typesafe.scala-logging:scala-logging_2.13:3.9.2 - https://github.com/lightbend/scala-logging)
* JSON library from Android SDK (com.vaadin.external.google:android-json:0.0.20131108.vaadin1 - http://developer.android.com/sdk)
* SparseBitSet (com.zaxxer:SparseBitSet:1.3 - https://github.com/brettwooldridge/SparseBitSet)
* Apache Commons BeanUtils (commons-beanutils:commons-beanutils:1.10.1 - https://commons.apache.org/proper/commons-beanutils)
* Apache Commons CLI (commons-cli:commons-cli:1.9.0 - https://commons.apache.org/proper/commons-cli/)
* Apache Commons Codec (commons-codec:commons-codec:1.18.0 - https://commons.apache.org/proper/commons-codec/)
* Apache Commons Collections (commons-collections:commons-collections:3.2.2 - http://commons.apache.org/collections/)
* Commons Digester (commons-digester:commons-digester:2.1 - http://commons.apache.org/digester/)
* Apache Commons IO (commons-io:commons-io:2.19.0 - https://commons.apache.org/proper/commons-io/)
* Commons Lang (commons-lang:commons-lang:2.6 - http://commons.apache.org/lang/)
* Apache Commons Logging (commons-logging:commons-logging:1.3.5 - https://commons.apache.org/proper/commons-logging/)
* Apache Commons Validator (commons-validator:commons-validator:1.9.0 - http://commons.apache.org/proper/commons-validator/)
* GeoJson POJOs for Jackson (de.grundid.opendatalab:geojson-jackson:1.14 - https://github.com/opendatalab-de/geojson-jackson)
* broker-client (eu.openaire:broker-client:1.1.2 - http://api.openaire.eu/broker/broker-client)
* OpenAIRE Funders Model (eu.openaire:funders-model:2.0.0 - https://api.openaire.eu)
* Metrics Core (io.dropwizard.metrics:metrics-core:4.1.5 - https://metrics.dropwizard.io/metrics-core)
* Metrics Core (io.dropwizard.metrics:metrics-core:4.2.25 - https://metrics.dropwizard.io/metrics-core)
* Graphite Integration for Metrics (io.dropwizard.metrics:metrics-graphite:4.1.5 - https://metrics.dropwizard.io/metrics-graphite)
* Metrics Integration for Jetty 9.3 and higher (io.dropwizard.metrics:metrics-jetty9:4.1.5 - https://metrics.dropwizard.io/metrics-jetty9)
* Metrics Integration with JMX (io.dropwizard.metrics:metrics-jmx:4.1.5 - https://metrics.dropwizard.io/metrics-jmx)
* JVM Integration for Metrics (io.dropwizard.metrics:metrics-jvm:4.1.5 - https://metrics.dropwizard.io/metrics-jvm)
* SWORD v2 Common Server Library (forked) (io.gdcc:sword2-server:2.0.0 - https://github.com/gdcc/sword2-server)
* micrometer-commons (io.micrometer:micrometer-commons:1.14.6 - https://github.com/micrometer-metrics/micrometer)
* micrometer-commons (io.micrometer:micrometer-commons:1.14.7 - https://github.com/micrometer-metrics/micrometer)
* micrometer-core (io.micrometer:micrometer-core:1.14.6 - https://github.com/micrometer-metrics/micrometer)
* micrometer-jakarta9 (io.micrometer:micrometer-jakarta9:1.14.6 - https://github.com/micrometer-metrics/micrometer)
* micrometer-observation (io.micrometer:micrometer-observation:1.14.6 - https://github.com/micrometer-metrics/micrometer)
* micrometer-observation (io.micrometer:micrometer-observation:1.14.7 - https://github.com/micrometer-metrics/micrometer)
* Netty/Buffer (io.netty:netty-buffer:4.1.99.Final - https://netty.io/netty-buffer/)
* Netty/Codec (io.netty:netty-codec:4.1.99.Final - https://netty.io/netty-codec/)
* Netty/Codec/HTTP (io.netty:netty-codec-http:4.1.86.Final - https://netty.io/netty-codec-http/)
* Netty/Codec/HTTP2 (io.netty:netty-codec-http2:4.1.86.Final - https://netty.io/netty-codec-http2/)
* Netty/Codec/Socks (io.netty:netty-codec-socks:4.1.86.Final - https://netty.io/netty-codec-socks/)
* Netty/Common (io.netty:netty-common:4.1.99.Final - https://netty.io/netty-common/)
* Netty/Handler (io.netty:netty-handler:4.1.99.Final - https://netty.io/netty-handler/)
* Netty/Handler/Proxy (io.netty:netty-handler-proxy:4.1.86.Final - https://netty.io/netty-handler-proxy/)
* Netty/Resolver (io.netty:netty-resolver:4.1.99.Final - https://netty.io/netty-resolver/)
* Netty/TomcatNative [BoringSSL - Static] (io.netty:netty-tcnative-boringssl-static:2.0.56.Final - https://github.com/netty/netty-tcnative/netty-tcnative-boringssl-static/)
* Netty/TomcatNative [OpenSSL - Classes] (io.netty:netty-tcnative-classes:2.0.56.Final - https://github.com/netty/netty-tcnative/netty-tcnative-classes/)
* Netty/Transport (io.netty:netty-transport:4.1.99.Final - https://netty.io/netty-transport/)
* Netty/Transport/Classes/Epoll (io.netty:netty-transport-classes-epoll:4.1.99.Final - https://netty.io/netty-transport-classes-epoll/)
* Netty/Transport/Native/Epoll (io.netty:netty-transport-native-epoll:4.1.99.Final - https://netty.io/netty-transport-native-epoll/)
* Netty/Transport/Native/Unix/Common (io.netty:netty-transport-native-unix-common:4.1.99.Final - https://netty.io/netty-transport-native-unix-common/)
* OpenTracing API (io.opentracing:opentracing-api:0.33.0 - https://github.com/opentracing/opentracing-java/opentracing-api)
* OpenTracing-noop (io.opentracing:opentracing-noop:0.33.0 - https://github.com/opentracing/opentracing-java/opentracing-noop)
* OpenTracing-util (io.opentracing:opentracing-util:0.33.0 - https://github.com/opentracing/opentracing-java/opentracing-util)
* Prometheus Java Simpleclient (io.prometheus:simpleclient:0.16.0 - http://github.com/prometheus/client_java/simpleclient)
* Prometheus Java Simpleclient Common (io.prometheus:simpleclient_common:0.16.0 - http://github.com/prometheus/client_java/simpleclient_common)
* Prometheus Java Simpleclient Httpserver (io.prometheus:simpleclient_httpserver:0.16.0 - http://github.com/prometheus/client_java/simpleclient_httpserver)
* Prometheus Java Span Context Supplier - Common (io.prometheus:simpleclient_tracer_common:0.16.0 - http://github.com/prometheus/client_java/simpleclient_tracer/simpleclient_tracer_common)
* Prometheus Java Span Context Supplier - OpenTelemetry (io.prometheus:simpleclient_tracer_otel:0.16.0 - http://github.com/prometheus/client_java/simpleclient_tracer/simpleclient_tracer_otel)
* Prometheus Java Span Context Supplier - OpenTelemetry Agent (io.prometheus:simpleclient_tracer_otel_agent:0.16.0 - http://github.com/prometheus/client_java/simpleclient_tracer/simpleclient_tracer_otel_agent)
* Google S2 geometry library (io.sgr:s2-geometry-library-java:1.0.0 - https://github.com/sgr-io/s2-geometry-library-java)
* Jandex: Core (io.smallrye:jandex:3.1.2 - https://smallrye.io)
* swagger-annotations (io.swagger:swagger-annotations:1.6.9 - https://github.com/swagger-api/swagger-core/modules/swagger-annotations)
* swagger-compat-spec-parser (io.swagger:swagger-compat-spec-parser:1.0.64 - http://nexus.sonatype.org/oss-repository-hosting.html/swagger-parser-project/modules/swagger-compat-spec-parser)
* swagger-core (io.swagger:swagger-core:1.6.9 - https://github.com/swagger-api/swagger-core/modules/swagger-core)
* swagger-models (io.swagger:swagger-models:1.6.9 - https://github.com/swagger-api/swagger-core/modules/swagger-models)
* swagger-parser (io.swagger:swagger-parser:1.0.64 - http://nexus.sonatype.org/oss-repository-hosting.html/swagger-parser-project/modules/swagger-parser)
* swagger-annotations (io.swagger.core.v3:swagger-annotations:2.2.8 - https://github.com/swagger-api/swagger-core/modules/swagger-annotations)
* swagger-annotations-jakarta (io.swagger.core.v3:swagger-annotations-jakarta:2.2.21 - https://github.com/swagger-api/swagger-core/modules/swagger-annotations-jakarta)
* swagger-core (io.swagger.core.v3:swagger-core:2.2.8 - https://github.com/swagger-api/swagger-core/modules/swagger-core)
* swagger-core-jakarta (io.swagger.core.v3:swagger-core-jakarta:2.2.21 - https://github.com/swagger-api/swagger-core/modules/swagger-core-jakarta)
* swagger-integration-jakarta (io.swagger.core.v3:swagger-integration-jakarta:2.2.21 - https://github.com/swagger-api/swagger-core/modules/swagger-integration-jakarta)
* swagger-jaxrs2-jakarta (io.swagger.core.v3:swagger-jaxrs2-jakarta:2.2.21 - https://github.com/swagger-api/swagger-core/modules/swagger-jaxrs2-jakarta)
* swagger-models (io.swagger.core.v3:swagger-models:2.2.8 - https://github.com/swagger-api/swagger-core/modules/swagger-models)
* swagger-models-jakarta (io.swagger.core.v3:swagger-models-jakarta:2.2.21 - https://github.com/swagger-api/swagger-core/modules/swagger-models-jakarta)
* swagger-parser (io.swagger.parser.v3:swagger-parser:2.1.10 - http://nexus.sonatype.org/oss-repository-hosting.html/swagger-parser-project/modules/swagger-parser)
* swagger-parser (io.swagger.parser.v3:swagger-parser-core:2.1.10 - http://nexus.sonatype.org/oss-repository-hosting.html/swagger-parser-project/modules/swagger-parser-core)
* swagger-parser-v2-converter (io.swagger.parser.v3:swagger-parser-v2-converter:2.1.10 - http://nexus.sonatype.org/oss-repository-hosting.html/swagger-parser-project/modules/swagger-parser-v2-converter)
* swagger-parser-v3 (io.swagger.parser.v3:swagger-parser-v3:2.1.10 - http://nexus.sonatype.org/oss-repository-hosting.html/swagger-parser-project/modules/swagger-parser-v3)
* Jakarta Dependency Injection (jakarta.inject:jakarta.inject-api:2.0.1 - https://github.com/eclipse-ee4j/injection-api)
* Jakarta Bean Validation API (jakarta.validation:jakarta.validation-api:3.0.2 - https://beanvalidation.org)
* JSR107 API and SPI (javax.cache:cache-api:1.1.1 - https://github.com/jsr107/jsr107spec)
* jdbm (jdbm:jdbm:1.0 - no url defined)
* Joda-Time (joda-time:joda-time:2.12.7 - https://www.joda.org/joda-time/)
* Byte Buddy (without dependencies) (net.bytebuddy:byte-buddy:1.11.13 - https://bytebuddy.net/byte-buddy)
* Byte Buddy (without dependencies) (net.bytebuddy:byte-buddy:1.14.11 - https://bytebuddy.net/byte-buddy)
* Byte Buddy agent (net.bytebuddy:byte-buddy-agent:1.11.13 - https://bytebuddy.net/byte-buddy-agent)
* eigenbase-properties (net.hydromatic:eigenbase-properties:1.1.5 - http://github.com/julianhyde/eigenbase-properties)
* json-unit-core (net.javacrumbs.json-unit:json-unit-core:2.36.0 - https://github.com/lukas-krecan/JsonUnit/json-unit-core)
* "Java Concurrency in Practice" book annotations (net.jcip:jcip-annotations:1.0 - http://jcip.net/)
* ASM based accessors helper used by json-smart (net.minidev:accessors-smart:2.5.0 - https://urielch.github.io/)
* ASM based accessors helper used by json-smart (net.minidev:accessors-smart:2.5.2 - https://urielch.github.io/)
* JSON Small and Fast Parser (net.minidev:json-smart:2.5.0 - https://urielch.github.io/)
* JSON Small and Fast Parser (net.minidev:json-smart:2.5.2 - https://urielch.github.io/)
* java-support (net.shibboleth.utilities:java-support:8.4.2 - http://shibboleth.net/java-support/)
* OGNL - Object Graph Navigation Library (ognl:ognl:3.3.4 - https://github.com/jkuhnert/ognl/)
* Abdera Core (org.apache.abdera:abdera-core:1.1.3 - http://abdera.apache.org/abdera-core)
* I18N Libraries (org.apache.abdera:abdera-i18n:1.1.3 - http://abdera.apache.org)
* Abdera Parser (org.apache.abdera:abdera-parser:1.1.3 - http://abdera.apache.org/abdera-parser)
* Apache Ant Core (org.apache.ant:ant:1.10.15 - https://ant.apache.org/)
* Apache Ant Launcher (org.apache.ant:ant-launcher:1.10.15 - https://ant.apache.org/)
* Apache Commons BCEL (org.apache.bcel:bcel:6.10.0 - https://commons.apache.org/proper/commons-bcel)
* Calcite Core (org.apache.calcite:calcite-core:1.35.0 - https://calcite.apache.org)
* Calcite Linq4j (org.apache.calcite:calcite-linq4j:1.35.0 - https://calcite.apache.org)
* Apache Calcite Avatica (org.apache.calcite.avatica:avatica-core:1.23.0 - https://calcite.apache.org/avatica)
* Apache Calcite Avatica Metrics (org.apache.calcite.avatica:avatica-metrics:1.23.0 - https://calcite.apache.org/avatica)
* Apache Commons Collections (org.apache.commons:commons-collections4:4.5.0 - https://commons.apache.org/proper/commons-collections/)
* Apache Commons Compress (org.apache.commons:commons-compress:1.27.1 - https://commons.apache.org/proper/commons-compress/)
* Apache Commons Configuration (org.apache.commons:commons-configuration2:2.12.0 - https://commons.apache.org/proper/commons-configuration/)
* Apache Commons CSV (org.apache.commons:commons-csv:1.14.0 - https://commons.apache.org/proper/commons-csv/)
* Apache Commons DBCP (org.apache.commons:commons-dbcp2:2.13.0 - https://commons.apache.org/proper/commons-dbcp/)
* Apache Commons Digester (org.apache.commons:commons-digester3:3.2 - http://commons.apache.org/digester/)
* Apache Commons Exec (org.apache.commons:commons-exec:1.3 - http://commons.apache.org/proper/commons-exec/)
* Apache Commons Exec (org.apache.commons:commons-exec:1.4.0 - https://commons.apache.org/proper/commons-exec/)
* Apache Commons Lang (org.apache.commons:commons-lang3:3.17.0 - https://commons.apache.org/proper/commons-lang/)
* Apache Commons Math (org.apache.commons:commons-math3:3.6.1 - http://commons.apache.org/proper/commons-math/)
* Apache Commons Pool (org.apache.commons:commons-pool2:2.12.1 - https://commons.apache.org/proper/commons-pool/)
* Apache Commons Text (org.apache.commons:commons-text:1.13.1 - https://commons.apache.org/proper/commons-text)
* Curator Client (org.apache.curator:curator-client:2.13.0 - http://curator.apache.org/curator-client)
* Curator Framework (org.apache.curator:curator-framework:2.13.0 - http://curator.apache.org/curator-framework)
* Curator Recipes (org.apache.curator:curator-recipes:2.13.0 - http://curator.apache.org/curator-recipes)
* Apache Hadoop Annotations (org.apache.hadoop:hadoop-annotations:3.2.4 - no url defined)
* Apache Hadoop Auth (org.apache.hadoop:hadoop-auth:3.2.4 - no url defined)
* Apache Hadoop Common (org.apache.hadoop:hadoop-common:3.2.4 - no url defined)
* Apache Hadoop HDFS Client (org.apache.hadoop:hadoop-hdfs-client:3.2.4 - no url defined)
* htrace-core4 (org.apache.htrace:htrace-core4:4.1.0-incubating - http://incubator.apache.org/projects/htrace.html)
* Apache HttpClient (org.apache.httpcomponents:httpclient:4.5.14 - http://hc.apache.org/httpcomponents-client-ga)
* Apache HttpClient Cache (org.apache.httpcomponents:httpclient-cache:4.5.14 - http://hc.apache.org/httpcomponents-client-ga)
* Apache HttpCore (org.apache.httpcomponents:httpcore:4.4.16 - http://hc.apache.org/httpcomponents-core-ga)
* Apache HttpClient Mime (org.apache.httpcomponents:httpmime:4.5.14 - http://hc.apache.org/httpcomponents-client-ga)
* Apache HttpClient (org.apache.httpcomponents.client5:httpclient5:5.1.3 - https://hc.apache.org/httpcomponents-client-5.0.x/5.1.3/httpclient5/)
* Apache HttpClient (org.apache.httpcomponents.client5:httpclient5:5.4.4 - https://hc.apache.org/httpcomponents-client-5.4.x/5.4.4/httpclient5/)
* Apache HttpComponents Core HTTP/1.1 (org.apache.httpcomponents.core5:httpcore5:5.1.3 - https://hc.apache.org/httpcomponents-core-5.1.x/5.1.3/httpcore5/)
* Apache HttpComponents Core HTTP/1.1 (org.apache.httpcomponents.core5:httpcore5:5.3.4 - https://hc.apache.org/httpcomponents-core-5.3.x/5.3.4/httpcore5/)
* Apache HttpComponents Core HTTP/2 (org.apache.httpcomponents.core5:httpcore5-h2:5.1.3 - https://hc.apache.org/httpcomponents-core-5.1.x/5.1.3/httpcore5-h2/)
* Apache HttpComponents Core HTTP/2 (org.apache.httpcomponents.core5:httpcore5-h2:5.3.4 - https://hc.apache.org/httpcomponents-core-5.3.x/5.3.4/httpcore5-h2/)
* Apache James :: Mime4j :: Core (org.apache.james:apache-mime4j-core:0.8.12 - http://james.apache.org/mime4j/apache-mime4j-core)
* Apache James :: Mime4j :: DOM (org.apache.james:apache-mime4j-dom:0.8.12 - http://james.apache.org/mime4j/apache-mime4j-dom)
* jclouds blobstore core (org.apache.jclouds:jclouds-blobstore:2.7.0 - https://jclouds.apache.org/jclouds-blobstore/)
* jclouds Components Core (org.apache.jclouds:jclouds-core:2.7.0 - https://jclouds.apache.org/jclouds-core/)
* jclouds filesystem core (org.apache.jclouds.api:filesystem:2.7.0 - https://jclouds.apache.org/filesystem/)
* jclouds s3 api (org.apache.jclouds.api:s3:2.7.0 - https://jclouds.apache.org/s3/)
* jclouds sts api (org.apache.jclouds.api:sts:2.7.0 - https://jclouds.apache.org/sts/)
* jclouds Amazon Simple Storage Service (S3) provider (org.apache.jclouds.provider:aws-s3:2.7.0 - https://jclouds.apache.org/aws-s3/)
* Apache Jena - Libraries POM (org.apache.jena:apache-jena-libs:4.10.0 - https://jena.apache.org/apache-jena-libs/)
* Apache Jena - ARQ (org.apache.jena:jena-arq:4.10.0 - https://jena.apache.org/jena-arq/)
* Apache Jena - Base (org.apache.jena:jena-base:4.10.0 - https://jena.apache.org/jena-base/)
* Apache Jena - Core (org.apache.jena:jena-core:4.10.0 - https://jena.apache.org/jena-core/)
* Apache Jena - DBOE Base (org.apache.jena:jena-dboe-base:4.10.0 - https://jena.apache.org/jena-dboe-base/)
* Apache Jena - DBOE Indexes (org.apache.jena:jena-dboe-index:4.10.0 - https://jena.apache.org/jena-dboe-index/)
* Apache Jena - DBOE Storage (org.apache.jena:jena-dboe-storage:4.10.0 - https://jena.apache.org/jena-dboe-storage/)
* Apache Jena - DBOE Transactional Datastructures (org.apache.jena:jena-dboe-trans-data:4.10.0 - https://jena.apache.org/jena-dboe-trans-data/)
* Apache Jena - DBOE Transactions (org.apache.jena:jena-dboe-transaction:4.10.0 - https://jena.apache.org/jena-dboe-transaction/)
* Apache Jena - IRI (org.apache.jena:jena-iri:4.10.0 - https://jena.apache.org/jena-iri/)
* Apache Jena - RDF Connection (org.apache.jena:jena-rdfconnection:4.10.0 - https://jena.apache.org/jena-rdfconnection/)
* Apache Jena - RDF Patch (org.apache.jena:jena-rdfpatch:4.10.0 - https://jena.apache.org/jena-rdfpatch/)
* Apache Jena - SHACL (org.apache.jena:jena-shacl:4.10.0 - https://jena.apache.org/jena-shacl/)
* Apache Jena - ShEx (org.apache.jena:jena-shex:4.10.0 - https://jena.apache.org/jena-shex/)
* Apache Jena - TDB1 (Native Triple Store) (org.apache.jena:jena-tdb:4.10.0 - https://jena.apache.org/jena-tdb/)
* Apache Jena - TDB2 (Native Triple Store) (org.apache.jena:jena-tdb2:4.10.0 - https://jena.apache.org/jena-tdb2/)
* Kerby-kerb core (org.apache.kerby:kerb-core:1.0.1 - http://directory.apache.org/kerby/kerby-kerb/kerb-core)
* Kerby-kerb Util (org.apache.kerby:kerb-util:1.0.1 - http://directory.apache.org/kerby/kerby-kerb/kerb-util)
* Kerby ASN1 Project (org.apache.kerby:kerby-asn1:1.0.1 - http://directory.apache.org/kerby/kerby-common/kerby-asn1)
* Kerby PKIX Project (org.apache.kerby:kerby-pkix:1.0.1 - http://directory.apache.org/kerby/kerby-pkix)
* Apache Log4j 1.x Compatibility API (org.apache.logging.log4j:log4j-1.2-api:2.17.2 - https://logging.apache.org/log4j/2.x/log4j-1.2-api/)
* Apache Log4j API (org.apache.logging.log4j:log4j-api:2.24.3 - https://logging.apache.org/log4j/2.x/log4j/log4j-api/)
* Apache Log4j Core (org.apache.logging.log4j:log4j-core:2.24.3 - https://logging.apache.org/log4j/2.x/log4j/log4j-core/)
* Apache Log4j JUL Adapter (org.apache.logging.log4j:log4j-jul:2.24.3 - https://logging.apache.org/log4j/2.x/log4j/log4j-jul/)
* Apache Log4j Layout for JSON template (org.apache.logging.log4j:log4j-layout-template-json:2.17.2 - https://logging.apache.org/log4j/2.x/log4j-layout-template-json/)
* Apache Log4j SLF4J Binding (org.apache.logging.log4j:log4j-slf4j-impl:2.17.2 - https://logging.apache.org/log4j/2.x/log4j-slf4j-impl/)
* SLF4J 2 Provider for Log4j API (org.apache.logging.log4j:log4j-slf4j2-impl:2.24.3 - https://logging.apache.org/log4j/2.x/log4j/log4j-slf4j2-impl/)
* Apache Log4j Web (org.apache.logging.log4j:log4j-web:2.17.2 - https://logging.apache.org/log4j/2.x/log4j-web/)
* Lucene Common Analyzers (org.apache.lucene:lucene-analyzers-common:8.11.4 - https://lucene.apache.org/lucene-parent/lucene-analyzers-common)
* Lucene ICU Analysis Components (org.apache.lucene:lucene-analyzers-icu:8.11.4 - https://lucene.apache.org/lucene-parent/lucene-analyzers-icu)
* Lucene Kuromoji Japanese Morphological Analyzer (org.apache.lucene:lucene-analyzers-kuromoji:8.11.4 - https://lucene.apache.org/lucene-parent/lucene-analyzers-kuromoji)
* Lucene Nori Korean Morphological Analyzer (org.apache.lucene:lucene-analyzers-nori:8.11.4 - https://lucene.apache.org/lucene-parent/lucene-analyzers-nori)
* Lucene Phonetic Filters (org.apache.lucene:lucene-analyzers-phonetic:8.11.4 - https://lucene.apache.org/lucene-parent/lucene-analyzers-phonetic)
* Lucene Smart Chinese Analyzer (org.apache.lucene:lucene-analyzers-smartcn:8.11.4 - https://lucene.apache.org/lucene-parent/lucene-analyzers-smartcn)
* Lucene Stempel Analyzer (org.apache.lucene:lucene-analyzers-stempel:8.11.4 - https://lucene.apache.org/lucene-parent/lucene-analyzers-stempel)
* Lucene Memory (org.apache.lucene:lucene-backward-codecs:8.11.4 - https://lucene.apache.org/lucene-parent/lucene-backward-codecs)
* Lucene Classification (org.apache.lucene:lucene-classification:8.11.4 - https://lucene.apache.org/lucene-parent/lucene-classification)
* Lucene codecs (org.apache.lucene:lucene-codecs:8.11.4 - https://lucene.apache.org/lucene-parent/lucene-codecs)
* Lucene Core (org.apache.lucene:lucene-core:8.11.4 - https://lucene.apache.org/lucene-parent/lucene-core)
* Lucene Expressions (org.apache.lucene:lucene-expressions:8.11.4 - https://lucene.apache.org/lucene-parent/lucene-expressions)
* Lucene Grouping (org.apache.lucene:lucene-grouping:8.11.4 - https://lucene.apache.org/lucene-parent/lucene-grouping)
* Lucene Highlighter (org.apache.lucene:lucene-highlighter:8.11.4 - https://lucene.apache.org/lucene-parent/lucene-highlighter)
* Lucene Join (org.apache.lucene:lucene-join:8.11.4 - https://lucene.apache.org/lucene-parent/lucene-join)
* Lucene Memory (org.apache.lucene:lucene-memory:8.11.4 - https://lucene.apache.org/lucene-parent/lucene-memory)
* Lucene Miscellaneous (org.apache.lucene:lucene-misc:8.11.4 - https://lucene.apache.org/lucene-parent/lucene-misc)
* Lucene Queries (org.apache.lucene:lucene-queries:8.11.4 - https://lucene.apache.org/lucene-parent/lucene-queries)
* Lucene QueryParsers (org.apache.lucene:lucene-queryparser:8.11.4 - https://lucene.apache.org/lucene-parent/lucene-queryparser)
* Lucene Sandbox (org.apache.lucene:lucene-sandbox:8.11.4 - https://lucene.apache.org/lucene-parent/lucene-sandbox)
* Lucene Spatial Extras (org.apache.lucene:lucene-spatial-extras:8.11.4 - https://lucene.apache.org/lucene-parent/lucene-spatial-extras)
* Lucene Spatial 3D (org.apache.lucene:lucene-spatial3d:8.11.4 - https://lucene.apache.org/lucene-parent/lucene-spatial3d)
* Lucene Suggest (org.apache.lucene:lucene-suggest:8.11.4 - https://lucene.apache.org/lucene-parent/lucene-suggest)
* Apache FontBox (org.apache.pdfbox:fontbox:2.0.34 - http://pdfbox.apache.org/)
* PDFBox JBIG2 ImageIO plugin (org.apache.pdfbox:jbig2-imageio:3.0.4 - https://www.apache.org/jbig2-imageio/)
* Apache JempBox (org.apache.pdfbox:jempbox:1.8.17 - http://www.apache.org/pdfbox-parent/jempbox/)
* Apache PDFBox (org.apache.pdfbox:pdfbox:2.0.34 - https://www.apache.org/pdfbox-parent/pdfbox/)
* Apache PDFBox tools (org.apache.pdfbox:pdfbox-tools:2.0.34 - https://www.apache.org/pdfbox-parent/pdfbox-tools/)
* Apache XmpBox (org.apache.pdfbox:xmpbox:2.0.34 - https://www.apache.org/pdfbox-parent/xmpbox/)
* Apache POI - Common (org.apache.poi:poi:5.4.1 - https://poi.apache.org/)
* Apache POI - API based on OPC and OOXML schemas (org.apache.poi:poi-ooxml:5.4.1 - https://poi.apache.org/)
* Apache POI (org.apache.poi:poi-ooxml-lite:5.4.1 - https://poi.apache.org/)
* Apache POI (org.apache.poi:poi-scratchpad:5.4.1 - https://poi.apache.org/)
* Apache XML Security for Java (org.apache.santuario:xmlsec:2.3.4 - https://santuario.apache.org/)
* Apache Solr Core (org.apache.solr:solr-core:8.11.4 - https://lucene.apache.org/solr-parent/solr-core)
* Apache Solr Solrj (org.apache.solr:solr-solrj:8.11.4 - https://lucene.apache.org/solr-parent/solr-solrj)
* Apache Standard Taglib Implementation (org.apache.taglibs:taglibs-standard-impl:1.2.5 - http://tomcat.apache.org/taglibs/standard-1.2.5/taglibs-standard-impl)
* Apache Standard Taglib Specification API (org.apache.taglibs:taglibs-standard-spec:1.2.5 - http://tomcat.apache.org/taglibs/standard-1.2.5/taglibs-standard-spec)
* Apache Thrift (org.apache.thrift:libthrift:0.19.0 - http://thrift.apache.org)
* Apache Tika core (org.apache.tika:tika-core:2.9.4 - https://tika.apache.org/)
* Apache Tika Apple parser module (org.apache.tika:tika-parser-apple-module:2.9.4 - https://tika.apache.org/tika-parser-apple-module/)
* Apache Tika audiovideo parser module (org.apache.tika:tika-parser-audiovideo-module:2.9.4 - https://tika.apache.org/tika-parser-audiovideo-module/)
* Apache Tika cad parser module (org.apache.tika:tika-parser-cad-module:2.9.4 - https://tika.apache.org/tika-parser-cad-module/)
* Apache Tika code parser module (org.apache.tika:tika-parser-code-module:2.9.4 - https://tika.apache.org/tika-parser-code-module/)
* Apache Tika crypto parser module (org.apache.tika:tika-parser-crypto-module:2.9.4 - https://tika.apache.org/tika-parser-crypto-module/)
* Apache Tika digest commons (org.apache.tika:tika-parser-digest-commons:2.9.4 - https://tika.apache.org/tika-parser-digest-commons/)
* Apache Tika font parser module (org.apache.tika:tika-parser-font-module:2.9.4 - https://tika.apache.org/tika-parser-font-module/)
* Apache Tika html parser module (org.apache.tika:tika-parser-html-module:2.9.4 - https://tika.apache.org/tika-parser-html-module/)
* Apache Tika image parser module (org.apache.tika:tika-parser-image-module:2.9.4 - https://tika.apache.org/tika-parser-image-module/)
* Apache Tika mail commons (org.apache.tika:tika-parser-mail-commons:2.9.4 - https://tika.apache.org/tika-parser-mail-commons/)
* Apache Tika mail parser module (org.apache.tika:tika-parser-mail-module:2.9.4 - https://tika.apache.org/tika-parser-mail-module/)
* Apache Tika Microsoft parser module (org.apache.tika:tika-parser-microsoft-module:2.9.4 - https://tika.apache.org/tika-parser-microsoft-module/)
* Apache Tika miscellaneous office format parser module (org.apache.tika:tika-parser-miscoffice-module:2.9.4 - https://tika.apache.org/tika-parser-miscoffice-module/)
* Apache Tika news parser module (org.apache.tika:tika-parser-news-module:2.9.4 - https://tika.apache.org/tika-parser-news-module/)
* Apache Tika OCR parser module (org.apache.tika:tika-parser-ocr-module:2.9.4 - https://tika.apache.org/tika-parser-ocr-module/)
* Apache Tika PDF parser module (org.apache.tika:tika-parser-pdf-module:2.9.4 - https://tika.apache.org/tika-parser-pdf-module/)
* Apache Tika package parser module (org.apache.tika:tika-parser-pkg-module:2.9.4 - https://tika.apache.org/tika-parser-pkg-module/)
* Apache Tika text parser module (org.apache.tika:tika-parser-text-module:2.9.4 - https://tika.apache.org/tika-parser-text-module/)
* Apache Tika WARC parser module (org.apache.tika:tika-parser-webarchive-module:2.9.4 - https://tika.apache.org/tika-parser-webarchive-module/)
* Apache Tika XML parser module (org.apache.tika:tika-parser-xml-module:2.9.4 - https://tika.apache.org/tika-parser-xml-module/)
* Apache Tika XMP commons (org.apache.tika:tika-parser-xmp-commons:2.9.4 - https://tika.apache.org/tika-parser-xmp-commons/)
* Apache Tika ZIP commons (org.apache.tika:tika-parser-zip-commons:2.9.4 - https://tika.apache.org/tika-parser-zip-commons/)
* Apache Tika standard parser package (org.apache.tika:tika-parsers-standard-package:2.9.4 - https://tika.apache.org/tika-parsers/tika-parsers-standard/tika-parsers-standard-package/)
* tomcat-embed-core (org.apache.tomcat.embed:tomcat-embed-core:10.1.40 - https://tomcat.apache.org/)
* tomcat-embed-el (org.apache.tomcat.embed:tomcat-embed-el:10.1.40 - https://tomcat.apache.org/)
* tomcat-embed-websocket (org.apache.tomcat.embed:tomcat-embed-websocket:10.1.40 - https://tomcat.apache.org/)
* Apache Velocity - Engine (org.apache.velocity:velocity-engine-core:2.4.1 - http://velocity.apache.org/engine/devel/velocity-engine-core/)
* Apache Velocity - JSR 223 Scripting (org.apache.velocity:velocity-engine-scripting:2.3 - http://velocity.apache.org/engine/devel/velocity-engine-scripting/)
* Apache Velocity Tools - Generic tools (org.apache.velocity.tools:velocity-tools-generic:3.1 - https://velocity.apache.org/tools/devel/velocity-tools-generic/)
* Axiom API (org.apache.ws.commons.axiom:axiom-api:1.2.14 - http://ws.apache.org/axiom/)
* Axiom Impl (org.apache.ws.commons.axiom:axiom-impl:1.2.14 - http://ws.apache.org/axiom/)
* XmlBeans (org.apache.xmlbeans:xmlbeans:5.3.0 - https://xmlbeans.apache.org/)
* Apache ZooKeeper - Server (org.apache.zookeeper:zookeeper:3.6.2 - http://zookeeper.apache.org/zookeeper)
* Apache ZooKeeper - Jute (org.apache.zookeeper:zookeeper-jute:3.6.2 - http://zookeeper.apache.org/zookeeper-jute)
* org.apiguardian:apiguardian-api (org.apiguardian:apiguardian-api:1.1.2 - https://github.com/apiguardian-team/apiguardian)
* AssertJ Core (org.assertj:assertj-core:3.26.3 - https://assertj.github.io/doc/#assertj-core)
* Evo Inflector (org.atteo:evo-inflector:1.3 - http://atteo.org/static/evo-inflector)
* attoparser (org.attoparser:attoparser:2.0.7.RELEASE - https://www.attoparser.org)
* Awaitility (org.awaitility:awaitility:4.2.2 - http://awaitility.org)
* jose4j (org.bitbucket.b_c:jose4j:0.6.5 - https://bitbucket.org/b_c/jose4j/)
* TagSoup (org.ccil.cowan.tagsoup:tagsoup:1.2.1 - http://home.ccil.org/~cowan/XML/tagsoup/)
* Woodstox (org.codehaus.woodstox:wstx-asl:3.2.6 - http://woodstox.codehaus.org)
* Cryptacular Library (org.cryptacular:cryptacular:1.2.5 - http://www.cryptacular.org)
* jems (org.dmfs:jems:1.18 - https://github.com/dmfs/jems)
* rfc3986-uri (org.dmfs:rfc3986-uri:0.8.1 - https://github.com/dmfs/uri-toolkit)
* Jetty :: Apache JSP Implementation (org.eclipse.jetty:apache-jsp:9.4.15.v20190215 - http://www.eclipse.org/jetty)
* Apache :: JSTL module (org.eclipse.jetty:apache-jstl:9.4.15.v20190215 - http://tomcat.apache.org/taglibs/standard/)
* Jetty :: ALPN :: Client (org.eclipse.jetty:jetty-alpn-client:9.4.53.v20231009 - https://eclipse.org/jetty/jetty-alpn-parent/jetty-alpn-client)
* Jetty :: ALPN :: JDK9 Client Implementation (org.eclipse.jetty:jetty-alpn-java-client:9.4.53.v20231009 - https://eclipse.org/jetty/jetty-alpn-parent/jetty-alpn-java-client)
* Jetty :: ALPN :: JDK9 Server Implementation (org.eclipse.jetty:jetty-alpn-java-server:9.4.15.v20190215 - https://eclipse.org/jetty/jetty-alpn-parent/jetty-alpn-java-server)
* Jetty :: ALPN :: JDK9 Server Implementation (org.eclipse.jetty:jetty-alpn-java-server:9.4.53.v20231009 - https://eclipse.org/jetty/jetty-alpn-parent/jetty-alpn-java-server)
* Jetty :: ALPN :: Server (org.eclipse.jetty:jetty-alpn-server:9.4.15.v20190215 - https://eclipse.org/jetty/jetty-alpn-parent/jetty-alpn-server)
* Jetty :: ALPN :: Server (org.eclipse.jetty:jetty-alpn-server:9.4.53.v20231009 - https://eclipse.org/jetty/jetty-alpn-parent/jetty-alpn-server)
* Jetty :: Servlet Annotations (org.eclipse.jetty:jetty-annotations:9.4.15.v20190215 - http://www.eclipse.org/jetty)
* Jetty :: Asynchronous HTTP Client (org.eclipse.jetty:jetty-client:9.4.53.v20231009 - https://eclipse.org/jetty/jetty-client)
* Jetty :: Continuation (org.eclipse.jetty:jetty-continuation:9.4.15.v20190215 - http://www.eclipse.org/jetty)
* Jetty :: Continuation (org.eclipse.jetty:jetty-continuation:9.4.53.v20231009 - https://eclipse.org/jetty/jetty-continuation)
* Jetty :: Deployers (org.eclipse.jetty:jetty-deploy:9.4.57.v20241219 - https://jetty.org/jetty-deploy/)
* Jetty :: Http Utility (org.eclipse.jetty:jetty-http:9.4.57.v20241219 - https://jetty.org/jetty-http/)
* Jetty :: IO Utility (org.eclipse.jetty:jetty-io:9.4.57.v20241219 - https://jetty.org/jetty-io/)
* Jetty :: JMX Management (org.eclipse.jetty:jetty-jmx:9.4.53.v20231009 - https://eclipse.org/jetty/jetty-jmx)
* Jetty :: JNDI Naming (org.eclipse.jetty:jetty-jndi:9.4.15.v20190215 - http://www.eclipse.org/jetty)
* Jetty :: Plus (org.eclipse.jetty:jetty-plus:9.4.15.v20190215 - http://www.eclipse.org/jetty)
* Jetty :: Rewrite Handler (org.eclipse.jetty:jetty-rewrite:9.4.53.v20231009 - https://eclipse.org/jetty/jetty-rewrite)
* Jetty :: Security (org.eclipse.jetty:jetty-security:9.4.53.v20231009 - https://eclipse.org/jetty/jetty-security)
* Jetty :: Security (org.eclipse.jetty:jetty-security:9.4.57.v20241219 - https://jetty.org/jetty-security/)
* Jetty :: Server Core (org.eclipse.jetty:jetty-server:9.4.57.v20241219 - https://jetty.org/jetty-server/)
* Jetty :: Servlet Handling (org.eclipse.jetty:jetty-servlet:9.4.57.v20241219 - https://jetty.org/jetty-servlet/)
* Jetty :: Utility Servlets and Filters (org.eclipse.jetty:jetty-servlets:9.4.15.v20190215 - http://www.eclipse.org/jetty)
* Jetty :: Utility Servlets and Filters (org.eclipse.jetty:jetty-servlets:9.4.53.v20231009 - https://eclipse.org/jetty/jetty-servlets)
* Jetty :: Utilities (org.eclipse.jetty:jetty-util:9.4.57.v20241219 - https://jetty.org/jetty-util/)
* Jetty :: Utilities :: Ajax(JSON) (org.eclipse.jetty:jetty-util-ajax:9.4.57.v20241219 - https://jetty.org/jetty-util-ajax/)
* Jetty :: Webapp Application Support (org.eclipse.jetty:jetty-webapp:9.4.57.v20241219 - https://jetty.org/jetty-webapp/)
* Jetty :: XML utilities (org.eclipse.jetty:jetty-xml:9.4.53.v20231009 - https://eclipse.org/jetty/jetty-xml)
* Jetty :: XML utilities (org.eclipse.jetty:jetty-xml:9.4.57.v20241219 - https://jetty.org/jetty-xml/)
* Jetty :: ALPN :: API (org.eclipse.jetty.alpn:alpn-api:1.1.3.v20160715 - http://www.eclipse.org/jetty/alpn-api)
* Jetty :: HTTP2 :: Client (org.eclipse.jetty.http2:http2-client:9.4.53.v20231009 - https://eclipse.org/jetty/http2-parent/http2-client)
* Jetty :: HTTP2 :: Common (org.eclipse.jetty.http2:http2-common:9.4.57.v20241219 - https://jetty.org/http2-parent/http2-common/)
* Jetty :: HTTP2 :: HPACK (org.eclipse.jetty.http2:http2-hpack:9.4.53.v20231009 - https://eclipse.org/jetty/http2-parent/http2-hpack)
* Jetty :: HTTP2 :: HTTP Client Transport (org.eclipse.jetty.http2:http2-http-client-transport:9.4.53.v20231009 - https://eclipse.org/jetty/http2-parent/http2-http-client-transport)
* Jetty :: HTTP2 :: Server (org.eclipse.jetty.http2:http2-server:9.4.15.v20190215 - https://eclipse.org/jetty/http2-parent/http2-server)
* Jetty :: HTTP2 :: Server (org.eclipse.jetty.http2:http2-server:9.4.53.v20231009 - https://eclipse.org/jetty/http2-parent/http2-server)
* Jetty :: Schemas (org.eclipse.jetty.toolchain:jetty-schemas:3.1.2 - https://eclipse.org/jetty/jetty-schemas)
* Ehcache (org.ehcache:ehcache:3.10.8 - http://ehcache.org)
* flyway-core (org.flywaydb:flyway-core:10.22.0 - https://flywaydb.org/flyway-core)
* flyway-database-postgresql (org.flywaydb:flyway-database-postgresql:10.22.0 - https://flywaydb.org/flyway-database-postgresql)
* Ogg and Vorbis for Java, Core (org.gagravarr:vorbis-java-core:0.8 - https://github.com/Gagravarr/VorbisJava)
* Apache Tika plugin for Ogg, Vorbis and FLAC (org.gagravarr:vorbis-java-tika:0.8 - https://github.com/Gagravarr/VorbisJava)
* jersey-core-client (org.glassfish.jersey.core:jersey-client:3.1.10 - https://projects.eclipse.org/projects/ee4j.jersey/jersey-client)
* jersey-core-common (org.glassfish.jersey.core:jersey-common:3.1.10 - https://projects.eclipse.org/projects/ee4j.jersey/jersey-common)
* jersey-inject-hk2 (org.glassfish.jersey.inject:jersey-hk2:3.1.10 - https://projects.eclipse.org/projects/ee4j.jersey/project/jersey-hk2)
* jersey-media-multipart (org.glassfish.jersey.media:jersey-media-multipart:3.1.3 - https://projects.eclipse.org/projects/ee4j.jersey/project/jersey-media-multipart)
* Hibernate Validator Engine (org.hibernate.validator:hibernate-validator:8.0.1.Final - http://hibernate.org/validator/hibernate-validator)
* Hibernate Validator Portable Extension (org.hibernate.validator:hibernate-validator-cdi:8.0.1.Final - http://hibernate.org/validator/hibernate-validator-cdi)
* org.immutables.value-annotations (org.immutables:value-annotations:2.9.2 - http://immutables.org/value-annotations)
* leveldb (org.iq80.leveldb:leveldb:0.12 - http://github.com/dain/leveldb/leveldb)
* leveldb-api (org.iq80.leveldb:leveldb-api:0.12 - http://github.com/dain/leveldb/leveldb-api)
* Javassist (org.javassist:javassist:3.30.2-GA - https://www.javassist.org/)
* JBoss Logging 3 (org.jboss.logging:jboss-logging:3.6.1.Final - http://www.jboss.org)
* JDOM (org.jdom:jdom2:2.0.6.1 - http://www.jdom.org)
* IntelliJ IDEA Annotations (org.jetbrains:annotations:13.0 - http://www.jetbrains.org)
* Kotlin Stdlib (org.jetbrains.kotlin:kotlin-stdlib:1.8.21 - https://kotlinlang.org/)
* Kotlin Stdlib Common (org.jetbrains.kotlin:kotlin-stdlib-common:1.8.21 - https://kotlinlang.org/)
* Kotlin Stdlib Jdk7 (org.jetbrains.kotlin:kotlin-stdlib-jdk7:1.8.21 - https://kotlinlang.org/)
* Kotlin Stdlib Jdk8 (org.jetbrains.kotlin:kotlin-stdlib-jdk8:1.8.21 - https://kotlinlang.org/)
* Proj4J (org.locationtech.proj4j:proj4j:1.1.5 - https://github.com/locationtech/proj4j)
* Spatial4J (org.locationtech.spatial4j:spatial4j:0.7 - https://projects.eclipse.org/projects/locationtech.spatial4j)
* MockServer Java Client (org.mock-server:mockserver-client-java:5.15.0 - https://www.mock-server.com)
* MockServer Core (org.mock-server:mockserver-core:5.15.0 - https://www.mock-server.com)
* MockServer JUnit 4 Integration (org.mock-server:mockserver-junit-rule:5.15.0 - https://www.mock-server.com)
* MockServer & Proxy Netty (org.mock-server:mockserver-netty:5.15.0 - https://www.mock-server.com)
* jwarc (org.netpreserve:jwarc:0.31.1 - https://github.com/iipc/jwarc)
* Objenesis (org.objenesis:objenesis:3.2 - http://objenesis.org/objenesis)
* OpenSAML :: Core (org.opensaml:opensaml-core:4.3.2 - http://shibboleth.net/opensaml-core/)
* OpenSAML :: Messaging API (org.opensaml:opensaml-messaging-api:4.3.2 - http://shibboleth.net/opensaml-messaging-api/)
* OpenSAML :: Profile API (org.opensaml:opensaml-profile-api:4.3.2 - http://shibboleth.net/opensaml-profile-api/)
* OpenSAML :: SAML Provider API (org.opensaml:opensaml-saml-api:4.3.2 - http://shibboleth.net/opensaml-saml-api/)
* OpenSAML :: SAML Provider Implementations (org.opensaml:opensaml-saml-impl:4.3.2 - http://shibboleth.net/opensaml-saml-impl/)
* OpenSAML :: Security API (org.opensaml:opensaml-security-api:4.3.2 - http://shibboleth.net/opensaml-security-api/)
* OpenSAML :: Security Implementation (org.opensaml:opensaml-security-impl:4.3.2 - http://shibboleth.net/opensaml-security-impl/)
* OpenSAML :: SOAP Provider API (org.opensaml:opensaml-soap-api:4.3.2 - http://shibboleth.net/opensaml-soap-api/)
* OpenSAML :: SOAP Provider Implementations (org.opensaml:opensaml-soap-impl:4.3.2 - http://shibboleth.net/opensaml-soap-impl/)
* OpenSAML :: Storage API (org.opensaml:opensaml-storage-api:4.3.2 - http://shibboleth.net/opensaml-storage-api/)
* OpenSAML :: XML Security API (org.opensaml:opensaml-xmlsec-api:4.3.2 - http://shibboleth.net/opensaml-xmlsec-api/)
* OpenSAML :: XML Security Implementation (org.opensaml:opensaml-xmlsec-impl:4.3.2 - http://shibboleth.net/opensaml-xmlsec-impl/)
* org.roaringbitmap:RoaringBitmap (org.roaringbitmap:RoaringBitmap:1.0.0 - https://github.com/RoaringBitmap/RoaringBitmap)
* RRD4J (org.rrd4j:rrd4j:3.5 - https://github.com/rrd4j/rrd4j/)
* Scala Library (org.scala-lang:scala-library:2.13.2 - https://www.scala-lang.org/)
* Scala Compiler (org.scala-lang:scala-reflect:2.13.0 - https://www.scala-lang.org/)
* scala-collection-compat (org.scala-lang.modules:scala-collection-compat_2.13:2.1.6 - http://www.scala-lang.org/)
* scala-java8-compat (org.scala-lang.modules:scala-java8-compat_2.13:0.9.0 - http://www.scala-lang.org/)
* scala-parser-combinators (org.scala-lang.modules:scala-parser-combinators_2.13:1.1.2 - http://www.scala-lang.org/)
* scala-xml (org.scala-lang.modules:scala-xml_2.13:1.3.0 - http://www.scala-lang.org/)
* JSONassert (org.skyscreamer:jsonassert:1.5.3 - https://github.com/skyscreamer/JSONassert)
* JCL 1.2 implemented over SLF4J (org.slf4j:jcl-over-slf4j:2.0.17 - http://www.slf4j.org)
* Spring AOP (org.springframework:spring-aop:6.2.7 - https://github.com/spring-projects/spring-framework)
* Spring Beans (org.springframework:spring-beans:6.2.7 - https://github.com/spring-projects/spring-framework)
* Spring Context (org.springframework:spring-context:6.2.7 - https://github.com/spring-projects/spring-framework)
* Spring Context Support (org.springframework:spring-context-support:6.2.7 - https://github.com/spring-projects/spring-framework)
* Spring Core (org.springframework:spring-core:6.2.7 - https://github.com/spring-projects/spring-framework)
* Spring Expression Language (SpEL) (org.springframework:spring-expression:6.2.7 - https://github.com/spring-projects/spring-framework)
* Spring Commons Logging Bridge (org.springframework:spring-jcl:6.2.7 - https://github.com/spring-projects/spring-framework)
* Spring JDBC (org.springframework:spring-jdbc:6.2.7 - https://github.com/spring-projects/spring-framework)
* Spring Object/Relational Mapping (org.springframework:spring-orm:6.2.7 - https://github.com/spring-projects/spring-framework)
* Spring TestContext Framework (org.springframework:spring-test:6.2.7 - https://github.com/spring-projects/spring-framework)
* Spring Transaction (org.springframework:spring-tx:6.2.7 - https://github.com/spring-projects/spring-framework)
* Spring Web (org.springframework:spring-web:6.2.7 - https://github.com/spring-projects/spring-framework)
* Spring Web MVC (org.springframework:spring-webmvc:6.2.7 - https://github.com/spring-projects/spring-framework)
* spring-boot (org.springframework.boot:spring-boot:3.4.5 - https://spring.io/projects/spring-boot)
* spring-boot-actuator (org.springframework.boot:spring-boot-actuator:3.4.5 - https://spring.io/projects/spring-boot)
* spring-boot-actuator-autoconfigure (org.springframework.boot:spring-boot-actuator-autoconfigure:3.4.5 - https://spring.io/projects/spring-boot)
* spring-boot-autoconfigure (org.springframework.boot:spring-boot-autoconfigure:3.4.5 - https://spring.io/projects/spring-boot)
* spring-boot-starter (org.springframework.boot:spring-boot-starter:3.4.5 - https://spring.io/projects/spring-boot)
* spring-boot-starter-actuator (org.springframework.boot:spring-boot-starter-actuator:3.4.5 - https://spring.io/projects/spring-boot)
* spring-boot-starter-aop (org.springframework.boot:spring-boot-starter-aop:3.4.5 - https://spring.io/projects/spring-boot)
* spring-boot-starter-cache (org.springframework.boot:spring-boot-starter-cache:3.4.5 - https://spring.io/projects/spring-boot)
* spring-boot-starter-data-rest (org.springframework.boot:spring-boot-starter-data-rest:3.4.5 - https://spring.io/projects/spring-boot)
* spring-boot-starter-json (org.springframework.boot:spring-boot-starter-json:3.4.5 - https://spring.io/projects/spring-boot)
* spring-boot-starter-log4j2 (org.springframework.boot:spring-boot-starter-log4j2:3.4.5 - https://spring.io/projects/spring-boot)
* spring-boot-starter-security (org.springframework.boot:spring-boot-starter-security:3.4.5 - https://spring.io/projects/spring-boot)
* spring-boot-starter-test (org.springframework.boot:spring-boot-starter-test:3.4.5 - https://spring.io/projects/spring-boot)
* spring-boot-starter-thymeleaf (org.springframework.boot:spring-boot-starter-thymeleaf:3.4.5 - https://spring.io/projects/spring-boot)
* spring-boot-starter-tomcat (org.springframework.boot:spring-boot-starter-tomcat:3.4.5 - https://spring.io/projects/spring-boot)
* spring-boot-starter-web (org.springframework.boot:spring-boot-starter-web:3.4.5 - https://spring.io/projects/spring-boot)
* spring-boot-test (org.springframework.boot:spring-boot-test:3.4.5 - https://spring.io/projects/spring-boot)
* spring-boot-test-autoconfigure (org.springframework.boot:spring-boot-test-autoconfigure:3.4.5 - https://spring.io/projects/spring-boot)
* Spring Data Core (org.springframework.data:spring-data-commons:3.4.5 - https://spring.io/projects/spring-data)
* Spring Data REST - Core (org.springframework.data:spring-data-rest-core:4.4.5 - https://www.spring.io/spring-data/spring-data-rest-parent/spring-data-rest-core)
* Spring Data REST - WebMVC (org.springframework.data:spring-data-rest-webmvc:4.4.5 - https://www.spring.io/spring-data/spring-data-rest-parent/spring-data-rest-webmvc)
* Spring HATEOAS (org.springframework.hateoas:spring-hateoas:2.4.1 - https://github.com/spring-projects/spring-hateoas)
* Spring Plugin - Core (org.springframework.plugin:spring-plugin-core:3.0.0 - https://github.com/spring-projects/spring-plugin/spring-plugin-core)
* spring-security-config (org.springframework.security:spring-security-config:6.4.5 - https://spring.io/projects/spring-security)
* spring-security-core (org.springframework.security:spring-security-core:6.4.5 - https://spring.io/projects/spring-security)
* spring-security-crypto (org.springframework.security:spring-security-crypto:6.4.5 - https://spring.io/projects/spring-security)
* spring-security-saml2-service-provider (org.springframework.security:spring-security-saml2-service-provider:6.4.5 - https://spring.io/projects/spring-security)
* spring-security-test (org.springframework.security:spring-security-test:6.4.5 - https://spring.io/projects/spring-security)
* spring-security-web (org.springframework.security:spring-security-web:6.4.5 - https://spring.io/projects/spring-security)
* thymeleaf (org.thymeleaf:thymeleaf:3.1.3.RELEASE - http://www.thymeleaf.org/thymeleaf-lib/thymeleaf)
* thymeleaf-spring6 (org.thymeleaf:thymeleaf-spring6:3.1.3.RELEASE - http://www.thymeleaf.org/thymeleaf-lib/thymeleaf-spring6)
* unbescape (org.unbescape:unbescape:1.1.6.RELEASE - http://www.unbescape.org)
* snappy-java (org.xerial.snappy:snappy-java:1.1.10.1 - https://github.com/xerial/snappy-java)
* xml-matchers (org.xmlmatchers:xml-matchers:0.10 - http://code.google.com/p/xml-matchers/)
* org.xmlunit:xmlunit-core (org.xmlunit:xmlunit-core:2.10.0 - https://www.xmlunit.org/)
* org.xmlunit:xmlunit-placeholders (org.xmlunit:xmlunit-placeholders:2.9.1 - https://www.xmlunit.org/xmlunit-placeholders/)
* SnakeYAML (org.yaml:snakeyaml:2.3 - https://bitbucket.org/snakeyaml/snakeyaml)
* Xerces2-j (xerces:xercesImpl:2.12.2 - https://xerces.apache.org/xerces2-j/)
BSD License:
* Adobe XMPCore (com.adobe.xmp:xmpcore:6.1.11 - https://www.adobe.com/devnet/xmp/library/eula-xmp-library-java.html)
* coverity-escapers (com.coverity.security:coverity-escapers:1.1.1 - http://coverity.com/security)
* Java Advanced Imaging Image I/O Tools API core (standalone) (com.github.jai-imageio:jai-imageio-core:1.4.0 - https://github.com/jai-imageio/jai-imageio-core)
* JSONLD Java :: Core (com.github.jsonld-java:jsonld-java:0.13.4 - http://github.com/jsonld-java/jsonld-java/jsonld-java/)
* curvesapi (com.github.virtuald:curvesapi:1.08 - https://github.com/virtuald/curvesapi)
* Protocol Buffers [Core] (com.google.protobuf:protobuf-java:3.15.0 - https://developers.google.com/protocol-buffers/protobuf-java/)
* Protocol Buffers [Core] (com.google.protobuf:protobuf-java:3.24.3 - https://developers.google.com/protocol-buffers/protobuf-java/)
* JZlib (com.jcraft:jzlib:1.1.3 - http://www.jcraft.com/jzlib/)
* jmustache (com.samskivert:jmustache:1.15 - http://github.com/samskivert/jmustache)
* dnsjava (dnsjava:dnsjava:3.6.3 - https://github.com/dnsjava/dnsjava)
* jaxen (jaxen:jaxen:2.0.0 - http://www.cafeconleche.org/jaxen/jaxen)
* ANTLR 4 Runtime (org.antlr:antlr4-runtime:4.13.2 - https://www.antlr.org/antlr4-runtime/)
* commons-compiler (org.codehaus.janino:commons-compiler:3.1.8 - http://janino-compiler.github.io/commons-compiler/)
* janino (org.codehaus.janino:janino:3.1.8 - http://janino-compiler.github.io/janino/)
* Stax2 API (org.codehaus.woodstox:stax2-api:4.2.1 - http://github.com/FasterXML/stax2-api)
* Hamcrest Date (org.exparity:hamcrest-date:2.0.8 - https://github.com/exparity/hamcrest-date)
* jersey-core-client (org.glassfish.jersey.core:jersey-client:3.1.10 - https://projects.eclipse.org/projects/ee4j.jersey/jersey-client)
* jersey-inject-hk2 (org.glassfish.jersey.inject:jersey-hk2:3.1.10 - https://projects.eclipse.org/projects/ee4j.jersey/project/jersey-hk2)
* jersey-media-multipart (org.glassfish.jersey.media:jersey-media-multipart:3.1.3 - https://projects.eclipse.org/projects/ee4j.jersey/project/jersey-media-multipart)
* Hamcrest (org.hamcrest:hamcrest:2.2 - http://hamcrest.org/JavaHamcrest/)
* Hamcrest Core (org.hamcrest:hamcrest-core:2.2 - http://hamcrest.org/JavaHamcrest/)
* HdrHistogram (org.hdrhistogram:HdrHistogram:2.2.2 - http://hdrhistogram.github.io/HdrHistogram/)
* JBibTeX (org.jbibtex:jbibtex:1.0.20 - http://www.jbibtex.org)
* asm (org.ow2.asm:asm:8.0.1 - http://asm.ow2.io/)
* asm-analysis (org.ow2.asm:asm-analysis:8.0.1 - http://asm.ow2.io/)
* asm-commons (org.ow2.asm:asm-commons:8.0.1 - http://asm.ow2.io/)
* asm-tree (org.ow2.asm:asm-tree:8.0.1 - http://asm.ow2.io/)
* PostgreSQL JDBC Driver (org.postgresql:postgresql:42.7.5 - https://jdbc.postgresql.org)
* Reflections (org.reflections:reflections:0.9.12 - http://github.com/ronmamo/reflections)
* JMatIO (org.tallison:jmatio:1.5 - https://github.com/tballison/jmatio)
* XZ for Java (org.tukaani:xz:1.10 - https://tukaani.org/xz/java.html)
* XMLUnit for Java (xmlunit:xmlunit:1.3 - http://xmlunit.sourceforge.net/)
CC0:
* reactive-streams (org.reactivestreams:reactive-streams:1.0.2 - http://www.reactive-streams.org/)
Common Development and Distribution License (CDDL):
* JavaMail API (no providers) (com.sun.mail:mailapi:1.6.2 - http://javaee.github.io/javamail/mailapi)
* Old JAXB Core (com.sun.xml.bind:jaxb-core:2.3.0.1 - http://jaxb.java.net/jaxb-bundles/jaxb-core)
* Old JAXB Runtime (com.sun.xml.bind:jaxb-impl:2.3.1 - http://jaxb.java.net/jaxb-bundles/jaxb-impl)
* Jakarta Annotations API (jakarta.annotation:jakarta.annotation-api:2.1.1 - https://projects.eclipse.org/projects/ee4j.ca)
* Jakarta Mail API (jakarta.mail:jakarta.mail-api:2.1.3 - https://projects.eclipse.org/projects/ee4j/jakarta.mail-api)
* Jakarta Servlet (jakarta.servlet:jakarta.servlet-api:6.1.0 - https://projects.eclipse.org/projects/ee4j.servlet)
* jakarta.transaction API (jakarta.transaction:jakarta.transaction-api:2.0.1 - https://projects.eclipse.org/projects/ee4j.jta)
* JavaBeans Activation Framework API jar (javax.activation:javax.activation-api:1.2.0 - http://java.net/all/javax.activation-api/)
* javax.annotation API (javax.annotation:javax.annotation-api:1.3 - http://jcp.org/en/jsr/detail?id=250)
* Java Servlet API (javax.servlet:javax.servlet-api:3.1.0 - http://servlet-spec.java.net)
* javax.transaction API (javax.transaction:javax.transaction-api:1.3 - http://jta-spec.java.net)
* jaxb-api (javax.xml.bind:jaxb-api:2.3.1 - https://github.com/javaee/jaxb-spec/jaxb-api)
* JHighlight (org.codelibs:jhighlight:1.1.0 - https://github.com/codelibs/jhighlight)
* Angus Mail default provider (org.eclipse.angus:jakarta.mail:2.0.3 - http://eclipse-ee4j.github.io/angus-mail/jakarta.mail)
* HK2 API module (org.glassfish.hk2:hk2-api:3.0.6 - https://github.com/eclipse-ee4j/glassfish-hk2/hk2-api)
* ServiceLocator Default Implementation (org.glassfish.hk2:hk2-locator:3.0.6 - https://github.com/eclipse-ee4j/glassfish-hk2/hk2-locator)
* HK2 Implementation Utilities (org.glassfish.hk2:hk2-utils:3.0.6 - https://github.com/eclipse-ee4j/glassfish-hk2/hk2-utils)
* OSGi resource locator (org.glassfish.hk2:osgi-resource-locator:1.0.3 - https://projects.eclipse.org/projects/ee4j/osgi-resource-locator)
* aopalliance version 1.0 repackaged as a module (org.glassfish.hk2.external:aopalliance-repackaged:3.0.6 - https://github.com/eclipse-ee4j/glassfish-hk2/external/aopalliance-repackaged)
* jersey-core-client (org.glassfish.jersey.core:jersey-client:3.1.10 - https://projects.eclipse.org/projects/ee4j.jersey/jersey-client)
* jersey-inject-hk2 (org.glassfish.jersey.inject:jersey-hk2:3.1.10 - https://projects.eclipse.org/projects/ee4j.jersey/project/jersey-hk2)
* jersey-media-multipart (org.glassfish.jersey.media:jersey-media-multipart:3.1.3 - https://projects.eclipse.org/projects/ee4j.jersey/project/jersey-media-multipart)
Cordra (Version 2) License Agreement:
* net.cnri:cnri-servlet-container-lib (net.cnri:cnri-servlet-container-lib:3.1.0 - https://gitlab.com/cnri/cnri-servlet-container)
* net.cnri:cnriutil (net.cnri:cnriutil:2.0 - https://gitlab.com/cnri/cnriutil)
Cordra (Version 2.5.0) License Agreement:
* net.cnri:cnri-servlet-container (net.cnri:cnri-servlet-container:3.1.0 - https://gitlab.com/cnri/cnri-servlet-container)
Eclipse Distribution License, Version 1.0:
* istack common utility code runtime (com.sun.istack:istack-commons-runtime:4.1.2 - https://projects.eclipse.org/projects/ee4j/istack-commons/istack-commons-runtime)
* Jakarta Activation API (jakarta.activation:jakarta.activation-api:2.1.3 - https://github.com/jakartaee/jaf-api)
* Jakarta Mail API (jakarta.mail:jakarta.mail-api:2.1.3 - https://projects.eclipse.org/projects/ee4j/jakarta.mail-api)
* Jakarta Persistence API (jakarta.persistence:jakarta.persistence-api:3.1.0 - https://github.com/eclipse-ee4j/jpa-api)
* Jakarta XML Binding API (jakarta.xml.bind:jakarta.xml.bind-api:4.0.2 - https://github.com/jakartaee/jaxb-api/jakarta.xml.bind-api)
* Angus Activation Registries (org.eclipse.angus:angus-activation:2.0.2 - https://github.com/eclipse-ee4j/angus-activation/angus-activation)
* Angus Mail default provider (org.eclipse.angus:jakarta.mail:2.0.3 - http://eclipse-ee4j.github.io/angus-mail/jakarta.mail)
* JAXB Core (org.glassfish.jaxb:jaxb-core:4.0.5 - https://eclipse-ee4j.github.io/jaxb-ri/)
* JAXB Runtime (org.glassfish.jaxb:jaxb-runtime:4.0.5 - https://eclipse-ee4j.github.io/jaxb-ri/)
* TXW2 Runtime (org.glassfish.jaxb:txw2:4.0.5 - https://eclipse-ee4j.github.io/jaxb-ri/)
* jersey-core-client (org.glassfish.jersey.core:jersey-client:3.1.10 - https://projects.eclipse.org/projects/ee4j.jersey/jersey-client)
* jersey-inject-hk2 (org.glassfish.jersey.inject:jersey-hk2:3.1.10 - https://projects.eclipse.org/projects/ee4j.jersey/project/jersey-hk2)
* jersey-media-multipart (org.glassfish.jersey.media:jersey-media-multipart:3.1.3 - https://projects.eclipse.org/projects/ee4j.jersey/project/jersey-media-multipart)
* MIME streaming extension (org.jvnet.mimepull:mimepull:1.9.15 - https://github.com/eclipse-ee4j/metro-mimepull)
* org.locationtech.jts:jts-core (org.locationtech.jts:jts-core:1.19.0 - https://www.locationtech.org/projects/technology.jts/jts-modules/jts-core)
* org.locationtech.jts.io:jts-io-common (org.locationtech.jts.io:jts-io-common:1.19.0 - https://www.locationtech.org/projects/technology.jts/jts-modules/jts-io/jts-io-common)
Eclipse Public License:
* System Rules (com.github.stefanbirkner:system-rules:1.19.0 - http://stefanbirkner.github.io/system-rules/)
* H2 Database Engine (com.h2database:h2:2.3.232 - https://h2database.com)
* Jakarta Annotations API (jakarta.annotation:jakarta.annotation-api:2.1.1 - https://projects.eclipse.org/projects/ee4j.ca)
* Jakarta Mail API (jakarta.mail:jakarta.mail-api:2.1.3 - https://projects.eclipse.org/projects/ee4j/jakarta.mail-api)
* Jakarta Persistence API (jakarta.persistence:jakarta.persistence-api:3.1.0 - https://github.com/eclipse-ee4j/jpa-api)
* Jakarta Servlet (jakarta.servlet:jakarta.servlet-api:6.1.0 - https://projects.eclipse.org/projects/ee4j.servlet)
* jakarta.transaction API (jakarta.transaction:jakarta.transaction-api:2.0.1 - https://projects.eclipse.org/projects/ee4j.jta)
* Jakarta RESTful WS API (jakarta.ws.rs:jakarta.ws.rs-api:3.1.0 - https://github.com/eclipse-ee4j/jaxrs-api)
* JUnit (junit:junit:4.13.2 - http://junit.org)
* AspectJ Weaver (org.aspectj:aspectjweaver:1.9.24 - https://www.eclipse.org/aspectj/)
* Angus Mail default provider (org.eclipse.angus:jakarta.mail:2.0.3 - http://eclipse-ee4j.github.io/angus-mail/jakarta.mail)
* Jetty :: Apache JSP Implementation (org.eclipse.jetty:apache-jsp:9.4.15.v20190215 - http://www.eclipse.org/jetty)
* Apache :: JSTL module (org.eclipse.jetty:apache-jstl:9.4.15.v20190215 - http://tomcat.apache.org/taglibs/standard/)
* Jetty :: ALPN :: Client (org.eclipse.jetty:jetty-alpn-client:9.4.53.v20231009 - https://eclipse.org/jetty/jetty-alpn-parent/jetty-alpn-client)
* Jetty :: ALPN :: JDK9 Client Implementation (org.eclipse.jetty:jetty-alpn-java-client:9.4.53.v20231009 - https://eclipse.org/jetty/jetty-alpn-parent/jetty-alpn-java-client)
* Jetty :: ALPN :: JDK9 Server Implementation (org.eclipse.jetty:jetty-alpn-java-server:9.4.15.v20190215 - https://eclipse.org/jetty/jetty-alpn-parent/jetty-alpn-java-server)
* Jetty :: ALPN :: JDK9 Server Implementation (org.eclipse.jetty:jetty-alpn-java-server:9.4.53.v20231009 - https://eclipse.org/jetty/jetty-alpn-parent/jetty-alpn-java-server)
* Jetty :: ALPN :: Server (org.eclipse.jetty:jetty-alpn-server:9.4.15.v20190215 - https://eclipse.org/jetty/jetty-alpn-parent/jetty-alpn-server)
* Jetty :: ALPN :: Server (org.eclipse.jetty:jetty-alpn-server:9.4.53.v20231009 - https://eclipse.org/jetty/jetty-alpn-parent/jetty-alpn-server)
* Jetty :: Servlet Annotations (org.eclipse.jetty:jetty-annotations:9.4.15.v20190215 - http://www.eclipse.org/jetty)
* Jetty :: Asynchronous HTTP Client (org.eclipse.jetty:jetty-client:9.4.53.v20231009 - https://eclipse.org/jetty/jetty-client)
* Jetty :: Continuation (org.eclipse.jetty:jetty-continuation:9.4.15.v20190215 - http://www.eclipse.org/jetty)
* Jetty :: Continuation (org.eclipse.jetty:jetty-continuation:9.4.53.v20231009 - https://eclipse.org/jetty/jetty-continuation)
* Jetty :: Deployers (org.eclipse.jetty:jetty-deploy:9.4.57.v20241219 - https://jetty.org/jetty-deploy/)
* Jetty :: Http Utility (org.eclipse.jetty:jetty-http:9.4.57.v20241219 - https://jetty.org/jetty-http/)
* Jetty :: IO Utility (org.eclipse.jetty:jetty-io:9.4.57.v20241219 - https://jetty.org/jetty-io/)
* Jetty :: JMX Management (org.eclipse.jetty:jetty-jmx:9.4.53.v20231009 - https://eclipse.org/jetty/jetty-jmx)
* Jetty :: JNDI Naming (org.eclipse.jetty:jetty-jndi:9.4.15.v20190215 - http://www.eclipse.org/jetty)
* Jetty :: Plus (org.eclipse.jetty:jetty-plus:9.4.15.v20190215 - http://www.eclipse.org/jetty)
* Jetty :: Rewrite Handler (org.eclipse.jetty:jetty-rewrite:9.4.53.v20231009 - https://eclipse.org/jetty/jetty-rewrite)
* Jetty :: Security (org.eclipse.jetty:jetty-security:9.4.53.v20231009 - https://eclipse.org/jetty/jetty-security)
* Jetty :: Security (org.eclipse.jetty:jetty-security:9.4.57.v20241219 - https://jetty.org/jetty-security/)
* Jetty :: Server Core (org.eclipse.jetty:jetty-server:9.4.57.v20241219 - https://jetty.org/jetty-server/)
* Jetty :: Servlet Handling (org.eclipse.jetty:jetty-servlet:9.4.57.v20241219 - https://jetty.org/jetty-servlet/)
* Jetty :: Utility Servlets and Filters (org.eclipse.jetty:jetty-servlets:9.4.15.v20190215 - http://www.eclipse.org/jetty)
* Jetty :: Utility Servlets and Filters (org.eclipse.jetty:jetty-servlets:9.4.53.v20231009 - https://eclipse.org/jetty/jetty-servlets)
* Jetty :: Utilities (org.eclipse.jetty:jetty-util:9.4.57.v20241219 - https://jetty.org/jetty-util/)
* Jetty :: Utilities :: Ajax(JSON) (org.eclipse.jetty:jetty-util-ajax:9.4.57.v20241219 - https://jetty.org/jetty-util-ajax/)
* Jetty :: Webapp Application Support (org.eclipse.jetty:jetty-webapp:9.4.57.v20241219 - https://jetty.org/jetty-webapp/)
* Jetty :: XML utilities (org.eclipse.jetty:jetty-xml:9.4.53.v20231009 - https://eclipse.org/jetty/jetty-xml)
* Jetty :: XML utilities (org.eclipse.jetty:jetty-xml:9.4.57.v20241219 - https://jetty.org/jetty-xml/)
* Jetty :: ALPN :: API (org.eclipse.jetty.alpn:alpn-api:1.1.3.v20160715 - http://www.eclipse.org/jetty/alpn-api)
* Jetty :: HTTP2 :: Client (org.eclipse.jetty.http2:http2-client:9.4.53.v20231009 - https://eclipse.org/jetty/http2-parent/http2-client)
* Jetty :: HTTP2 :: Common (org.eclipse.jetty.http2:http2-common:9.4.57.v20241219 - https://jetty.org/http2-parent/http2-common/)
* Jetty :: HTTP2 :: HPACK (org.eclipse.jetty.http2:http2-hpack:9.4.53.v20231009 - https://eclipse.org/jetty/http2-parent/http2-hpack)
* Jetty :: HTTP2 :: HTTP Client Transport (org.eclipse.jetty.http2:http2-http-client-transport:9.4.53.v20231009 - https://eclipse.org/jetty/http2-parent/http2-http-client-transport)
* Jetty :: HTTP2 :: Server (org.eclipse.jetty.http2:http2-server:9.4.15.v20190215 - https://eclipse.org/jetty/http2-parent/http2-server)
* Jetty :: HTTP2 :: Server (org.eclipse.jetty.http2:http2-server:9.4.53.v20231009 - https://eclipse.org/jetty/http2-parent/http2-server)
* Jetty :: Schemas (org.eclipse.jetty.toolchain:jetty-schemas:3.1.2 - https://eclipse.org/jetty/jetty-schemas)
* JSON-P Default Provider (org.glassfish:jakarta.json:2.0.1 - https://github.com/eclipse-ee4j/jsonp)
* HK2 API module (org.glassfish.hk2:hk2-api:3.0.6 - https://github.com/eclipse-ee4j/glassfish-hk2/hk2-api)
* ServiceLocator Default Implementation (org.glassfish.hk2:hk2-locator:3.0.6 - https://github.com/eclipse-ee4j/glassfish-hk2/hk2-locator)
* HK2 Implementation Utilities (org.glassfish.hk2:hk2-utils:3.0.6 - https://github.com/eclipse-ee4j/glassfish-hk2/hk2-utils)
* OSGi resource locator (org.glassfish.hk2:osgi-resource-locator:1.0.3 - https://projects.eclipse.org/projects/ee4j/osgi-resource-locator)
* aopalliance version 1.0 repackaged as a module (org.glassfish.hk2.external:aopalliance-repackaged:3.0.6 - https://github.com/eclipse-ee4j/glassfish-hk2/external/aopalliance-repackaged)
* jersey-core-client (org.glassfish.jersey.core:jersey-client:3.1.10 - https://projects.eclipse.org/projects/ee4j.jersey/jersey-client)
* jersey-core-common (org.glassfish.jersey.core:jersey-common:3.1.10 - https://projects.eclipse.org/projects/ee4j.jersey/jersey-common)
* jersey-inject-hk2 (org.glassfish.jersey.inject:jersey-hk2:3.1.10 - https://projects.eclipse.org/projects/ee4j.jersey/project/jersey-hk2)
* jersey-media-multipart (org.glassfish.jersey.media:jersey-media-multipart:3.1.3 - https://projects.eclipse.org/projects/ee4j.jersey/project/jersey-media-multipart)
* org.locationtech.jts:jts-core (org.locationtech.jts:jts-core:1.19.0 - https://www.locationtech.org/projects/technology.jts/jts-modules/jts-core)
* org.locationtech.jts.io:jts-io-common (org.locationtech.jts.io:jts-io-common:1.19.0 - https://www.locationtech.org/projects/technology.jts/jts-modules/jts-io/jts-io-common)
GENERAL PUBLIC LICENSE, version 3 (GPL-3.0):
* juniversalchardet (com.github.albfernandez:juniversalchardet:2.5.0 - https://github.com/albfernandez/juniversalchardet)
GNU Lesser General Public License (LGPL):
* juniversalchardet (com.github.albfernandez:juniversalchardet:2.5.0 - https://github.com/albfernandez/juniversalchardet)
* btf (com.github.java-json-tools:btf:1.3 - https://github.com/java-json-tools/btf)
* jackson-coreutils (com.github.java-json-tools:jackson-coreutils:2.0 - https://github.com/java-json-tools/jackson-coreutils)
* jackson-coreutils-equivalence (com.github.java-json-tools:jackson-coreutils-equivalence:1.0 - https://github.com/java-json-tools/jackson-coreutils)
* json-patch (com.github.java-json-tools:json-patch:1.13 - https://github.com/java-json-tools/json-patch)
* json-schema-core (com.github.java-json-tools:json-schema-core:1.2.14 - https://github.com/java-json-tools/json-schema-core)
* json-schema-validator (com.github.java-json-tools:json-schema-validator:2.2.14 - https://github.com/java-json-tools/json-schema-validator)
* msg-simple (com.github.java-json-tools:msg-simple:1.2 - https://github.com/java-json-tools/msg-simple)
* uri-template (com.github.java-json-tools:uri-template:0.10 - https://github.com/java-json-tools/uri-template)
* openpdf (com.github.librepdf:openpdf:2.0.3 - https://github.com/LibrePDF/OpenPDF/openpdf)
* FindBugs-Annotations (com.google.code.findbugs:annotations:3.0.1u2 - http://findbugs.sourceforge.net/)
* JHighlight (org.codelibs:jhighlight:1.1.0 - https://github.com/codelibs/jhighlight)
* Cryptacular Library (org.cryptacular:cryptacular:1.2.5 - http://www.cryptacular.org)
* Hibernate Commons Annotations (org.hibernate.common:hibernate-commons-annotations:6.0.6.Final - http://hibernate.org)
* Hibernate ORM - hibernate-core (org.hibernate.orm:hibernate-core:6.4.8.Final - https://hibernate.org/orm)
* Hibernate ORM - hibernate-jcache (org.hibernate.orm:hibernate-jcache:6.4.8.Final - https://hibernate.org/orm)
* Hibernate ORM - hibernate-jpamodelgen (org.hibernate.orm:hibernate-jpamodelgen:6.4.8.Final - https://hibernate.org/orm)
* im4java (org.im4java:im4java:1.4.0 - http://sourceforge.net/projects/im4java/)
* Javassist (org.javassist:javassist:3.30.2-GA - https://www.javassist.org/)
* Flying Saucer Core Renderer (org.xhtmlrenderer:flying-saucer-core:9.12.0 - http://code.google.com/p/flying-saucer/flying-saucer-core/)
* Flying Saucer PDF Rendering (org.xhtmlrenderer:flying-saucer-pdf:9.12.0 - http://code.google.com/p/flying-saucer/flying-saucer-pdf/)
* XOM (xom:xom:1.3.9 - https://xom.nu)
Go License:
* RE2/J (com.google.re2j:re2j:1.2 - http://github.com/google/re2j)
Handle.Net Public License Agreement (Ver.2):
* Handle Server (net.handle:handle:9.3.1 - https://www.handle.net)
ISC License:
* Simple Magic (com.j256.simplemagic:simplemagic:1.17 - https://256stuff.com/sources/simplemagic/)
MIT License:
* dexx (com.github.andrewoma.dexx:collection:0.7 - https://github.com/andrewoma/dexx)
* better-files (com.github.pathikrit:better-files_2.13:3.9.1 - https://github.com/pathikrit/better-files)
* Java SemVer (com.github.zafarkhaja:java-semver:0.9.0 - https://github.com/zafarkhaja/jsemver)
* dd-plist (com.googlecode.plist:dd-plist:1.28 - http://www.github.com/3breadt/dd-plist)
* DigitalCollections: IIIF API Library (de.digitalcollections.iiif:iiif-apis:0.3.11 - https://github.com/dbmdz/iiif-apis)
* s3mock (io.findify:s3mock_2.13:0.2.6 - https://github.com/findify/s3mock)
* ClassGraph (io.github.classgraph:classgraph:4.8.165 - https://github.com/classgraph/classgraph)
* JOpt Simple (net.sf.jopt-simple:jopt-simple:5.0.4 - http://jopt-simple.github.io/jopt-simple)
* Bouncy Castle JavaMail S/MIME APIs (org.bouncycastle:bcmail-jdk18on:1.80 - https://www.bouncycastle.org/download/bouncy-castle-java/)
* Bouncy Castle PKIX, CMS, EAC, TSP, PKCS, OCSP, CMP, and CRMF APIs (org.bouncycastle:bcpkix-jdk18on:1.80 - https://www.bouncycastle.org/download/bouncy-castle-java/)
* Bouncy Castle Provider (org.bouncycastle:bcprov-jdk18on:1.80 - https://www.bouncycastle.org/download/bouncy-castle-java/)
* Bouncy Castle ASN.1 Extension and Utility APIs (org.bouncycastle:bcutil-jdk18on:1.80 - https://www.bouncycastle.org/download/bouncy-castle-java/)
* org.brotli:dec (org.brotli:dec:0.1.2 - http://brotli.org/dec)
* Checker Qual (org.checkerframework:checker-qual:3.49.3 - https://checkerframework.org/)
* jersey-core-client (org.glassfish.jersey.core:jersey-client:3.1.10 - https://projects.eclipse.org/projects/ee4j.jersey/jersey-client)
* jersey-inject-hk2 (org.glassfish.jersey.inject:jersey-hk2:3.1.10 - https://projects.eclipse.org/projects/ee4j.jersey/project/jersey-hk2)
* jersey-media-multipart (org.glassfish.jersey.media:jersey-media-multipart:3.1.3 - https://projects.eclipse.org/projects/ee4j.jersey/project/jersey-media-multipart)
* mockito-core (org.mockito:mockito-core:3.12.4 - https://github.com/mockito/mockito)
* mockito-inline (org.mockito:mockito-inline:3.12.4 - https://github.com/mockito/mockito)
* SLF4J API Module (org.slf4j:slf4j-api:2.0.17 - http://www.slf4j.org)
* HAL Browser (org.webjars:hal-browser:ad9b865 - http://webjars.org)
* toastr (org.webjars.bowergithub.codeseven:toastr:2.1.4 - http://webjars.org)
* backbone (org.webjars.bowergithub.jashkenas:backbone:1.4.1 - https://www.webjars.org)
* underscore (org.webjars.bowergithub.jashkenas:underscore:1.13.2 - https://www.webjars.org)
* jquery (org.webjars.bowergithub.jquery:jquery-dist:3.7.1 - https://www.webjars.org)
* urijs (org.webjars.bowergithub.medialize:uri.js:1.19.11 - https://www.webjars.org)
* bootstrap (org.webjars.bowergithub.twbs:bootstrap:4.6.2 - https://www.webjars.org)
* core-js (org.webjars.npm:core-js:3.42.0 - https://www.webjars.org)
* @json-editor/json-editor (org.webjars.npm:json-editor__json-editor:2.15.2 - https://www.webjars.org)
Mozilla Public License:
* juniversalchardet (com.github.albfernandez:juniversalchardet:2.5.0 - https://github.com/albfernandez/juniversalchardet)
* openpdf (com.github.librepdf:openpdf:2.0.3 - https://github.com/LibrePDF/OpenPDF/openpdf)
* H2 Database Engine (com.h2database:h2:2.3.232 - https://h2database.com)
* Saxon-HE (net.sf.saxon:Saxon-HE:9.9.1-8 - http://www.saxonica.com/)
* Javassist (org.javassist:javassist:3.30.2-GA - https://www.javassist.org/)
* Mozilla Rhino (org.mozilla:rhino:1.7.7.2 - https://developer.mozilla.org/en/Rhino)
Public Domain:
* AOP alliance (aopalliance:aopalliance:1.0 - http://aopalliance.sourceforge.net)
* jersey-core-client (org.glassfish.jersey.core:jersey-client:3.1.10 - https://projects.eclipse.org/projects/ee4j.jersey/jersey-client)
* jersey-core-common (org.glassfish.jersey.core:jersey-common:3.1.10 - https://projects.eclipse.org/projects/ee4j.jersey/jersey-common)
* jersey-inject-hk2 (org.glassfish.jersey.inject:jersey-hk2:3.1.10 - https://projects.eclipse.org/projects/ee4j.jersey/project/jersey-hk2)
* jersey-media-multipart (org.glassfish.jersey.media:jersey-media-multipart:3.1.3 - https://projects.eclipse.org/projects/ee4j.jersey/project/jersey-media-multipart)
* HdrHistogram (org.hdrhistogram:HdrHistogram:2.2.2 - http://hdrhistogram.github.io/HdrHistogram/)
* JSON in Java (org.json:json:20231013 - https://github.com/douglascrockford/JSON-java)
* LatencyUtils (org.latencyutils:LatencyUtils:2.0.3 - http://latencyutils.github.io/LatencyUtils/)
* Reflections (org.reflections:reflections:0.9.12 - http://github.com/ronmamo/reflections)
UnRar License:
* Java Unrar (com.github.junrar:junrar:7.5.5 - https://github.com/junrar/junrar)
Unicode/ICU License:
* ICU4J (com.ibm.icu:icu4j:62.2 - http://icu-project.org/)
W3C license:
* jersey-core-client (org.glassfish.jersey.core:jersey-client:3.1.10 - https://projects.eclipse.org/projects/ee4j.jersey/jersey-client)
* jersey-inject-hk2 (org.glassfish.jersey.inject:jersey-hk2:3.1.10 - https://projects.eclipse.org/projects/ee4j.jersey/project/jersey-hk2)
* jersey-media-multipart (org.glassfish.jersey.media:jersey-media-multipart:3.1.3 - https://projects.eclipse.org/projects/ee4j.jersey/project/jersey-media-multipart)
jQuery license:
* jersey-core-client (org.glassfish.jersey.core:jersey-client:3.1.10 - https://projects.eclipse.org/projects/ee4j.jersey/jersey-client)
* jersey-inject-hk2 (org.glassfish.jersey.inject:jersey-hk2:3.1.10 - https://projects.eclipse.org/projects/ee4j.jersey/project/jersey-hk2)
* jersey-media-multipart (org.glassfish.jersey.media:jersey-media-multipart:3.1.3 - https://projects.eclipse.org/projects/ee4j.jersey/project/jersey-media-multipart)

33
NOTICE
View File

@@ -1,28 +1,15 @@
Licenses of Third-Party Libraries
=================================
DSpace uses third-party libraries which may be distributed under Licensing Notice
different licenses than specified in our LICENSE file. Information
about these licenses is detailed in the LICENSES_THIRD_PARTY file at
the root of the source tree. You must agree to the terms of these
licenses, in addition to the DSpace source code license, in order to
use this software.
Licensing Notices Fedora Commons joined with the DSpace Foundation and began operating under
=================
[July 2019] DuraSpace joined with LYRASIS (another 501(c)3 organization) in July 2019.
LYRASIS holds the copyrights of DuraSpace.
[July 2009] Fedora Commons joined with the DSpace Foundation and began operating under
the new name DuraSpace in July 2009. DuraSpace holds the copyrights of the new name DuraSpace in July 2009. DuraSpace holds the copyrights of
the DSpace Foundation, Inc. the DSpace Foundation, Inc.
[July 2007] The DSpace Foundation, Inc. is a 501(c)3 corporation established in July 2007 The DSpace Foundation, Inc. is a 501(c)3 corporation established in July 2007
with a mission to promote and advance the dspace platform enabling management, with a mission to promote and advance the dspace platform enabling management,
access and preservation of digital works. The Foundation was able to transfer access and preservation of digital works. The Foundation was able to transfer
the legal copyright from Hewlett-Packard Company (HP) and Massachusetts the legal copyright from Hewlett-Packard Company (HP) and Massachusetts
Institute of Technology (MIT) to the DSpace Foundation in October 2007. Many Institute of Technology (MIT) to the DSpace Foundation in October 2007. Many
of the files in the source code may contain a copyright statement stating HP of the files in the source code may contain a copyright statement stating HP
and MIT possess the copyright, in these instances please note that the copy and MIT posses the copyright, in these instances please note that the copy
right has transferred to the DSpace foundation, and subsequently to DuraSpace. right has transfered to the DSpace foundation, and subsequently to Duraspace.

51
README Normal file
View File

@@ -0,0 +1,51 @@
Installation instructions are included in this release package under
- dspace/docs/html/index.html
or
- dspace/docs/pdf/DSpace-Manual.pdf
DSpace version information can be found in this release package under
- dspace/CHANGES
or
- dspace/docs/html/History.html
Documentation for the most recent stable release(s) may be downloaded
or viewed online at
- http://www.dspace.org/latest-release/
- http://wiki.duraspace.org/display/DSDOC/
Installation instructions for other versions may be different, so you
are encouraged to obtain the appropriate version of the Documentation
(from the links above or from SVN).
To obtain files from the SVN repository and build, please see:
- https://scm.dspace.org/svn/repo/dspace/tags/
Please refer any further problems to the dspace-tech@lists.sourceforge.net
mailing list.
- http://sourceforge.net/mail/?group_id=19984
Detailed Issue Tracking for DSpace is done on our JIRA Issue Tracker
- http://jira.dspace.org/
To contribute to DSpace, please see:
- https://wiki.duraspace.org/display/DSPACE/HowToContribute
For more details about DSpace, including a list of service providers,
places to seek help, news articles and lists of other users, please see:
- http://www.dspace.org/
DSpace source code licensing information available online at:
- http://www.dspace.org/license/
Copyright (c) 2002-2010, DuraSpace. All rights reserved.

130
README.md
View File

@@ -1,130 +0,0 @@
# DSpace
[![Build Status](https://github.com/DSpace/DSpace/workflows/Build/badge.svg)](https://github.com/DSpace/DSpace/actions?query=workflow%3ABuild)
[DSpace Documentation](https://wiki.lyrasis.org/display/DSDOC/) |
[DSpace Releases](https://github.com/DSpace/DSpace/releases) |
[DSpace Wiki](https://wiki.lyrasis.org/display/DSPACE/Home) |
[Support](https://wiki.lyrasis.org/display/DSPACE/Support)
## Overview
DSpace open source software is a turnkey repository application used by more than
2,000 organizations and institutions worldwide to provide durable access to digital resources.
For more information, visit http://www.dspace.org/
DSpace consists of both a Java-based backend and an Angular-based frontend.
* Backend (this codebase) provides a REST API, along with other machine-based interfaces (e.g. OAI-PMH, SWORD, etc)
* The REST Contract is at https://github.com/DSpace/RestContract
* Frontend (https://github.com/DSpace/dspace-angular/) is the User Interface built on the REST API
Prior versions of DSpace (v6.x and below) used two different UIs (XMLUI and JSPUI). Those UIs are no longer supported in v7 and above.
* A maintenance branch for older versions is still available, see `dspace-6_x` for 6.x maintenance.
## Downloads
* Backend (REST API): https://github.com/DSpace/DSpace/releases
* Frontend (User Interface): https://github.com/DSpace/dspace-angular/releases
## Documentation / Installation
Documentation for each release may be viewed online or downloaded via our [Documentation Wiki](https://wiki.lyrasis.org/display/DSDOC/).
The latest DSpace Installation instructions are available at:
https://wiki.lyrasis.org/display/DSDOC9x/Installing+DSpace
Please be aware that, as a Java web application, DSpace requires a database (PostgreSQL)
and a servlet container (usually Tomcat) in order to function.
More information about these and all other prerequisites can be found in the Installation instructions above.
## Running DSpace 9 in Docker
NOTE: At this time, we do not have production-ready Docker images for DSpace.
That said, we do have quick-start Docker Compose scripts for development or testing purposes.
See [Running DSpace 9 with Docker Compose](dspace/src/main/docker-compose/README.md)
## Contributing
See [Contributing documentation](CONTRIBUTING.md)
## Getting Help
DSpace provides public mailing lists where you can post questions or raise topics for discussion.
We welcome everyone to participate in these lists:
* [dspace-community@googlegroups.com](https://groups.google.com/d/forum/dspace-community) : General discussion about DSpace platform, announcements, sharing of best practices
* [dspace-tech@googlegroups.com](https://groups.google.com/d/forum/dspace-tech) : Technical support mailing list. See also our guide for [How to troubleshoot an error](https://wiki.lyrasis.org/display/DSPACE/Troubleshoot+an+error).
* [dspace-devel@googlegroups.com](https://groups.google.com/d/forum/dspace-devel) : Developers / Development mailing list
Great Q&A is also available under the [DSpace tag on Stackoverflow](http://stackoverflow.com/questions/tagged/dspace)
Additional support options are at https://wiki.lyrasis.org/display/DSPACE/Support
DSpace also has an active service provider network. If you'd rather hire a service provider to
install, upgrade, customize, or host DSpace, then we recommend getting in touch with one of our
[Registered Service Providers](http://www.dspace.org/service-providers).
## Issue Tracker
DSpace uses GitHub to track issues:
* Backend (REST API) issues: https://github.com/DSpace/DSpace/issues
* Frontend (User Interface) issues: https://github.com/DSpace/dspace-angular/issues
## Testing
### Running Tests
By default, in DSpace, Unit Tests and Integration Tests are disabled. However, they are
run automatically by [GitHub Actions](https://github.com/DSpace/DSpace/actions?query=workflow%3ABuild) for all Pull Requests and code commits.
* How to run both Unit Tests (via `maven-surefire-plugin`) and Integration Tests (via `maven-failsafe-plugin`):
```
mvn install -DskipUnitTests=false -DskipIntegrationTests=false
```
* How to run _only_ Unit Tests:
```
mvn test -DskipUnitTests=false
```
* How to run a *single* Unit Test
```
# Run all tests in a specific test class
# NOTE: failIfNoTests=false is required to skip tests in other modules
mvn test -DskipUnitTests=false -Dtest=[full.package.testClassName] -DfailIfNoTests=false
# Run one test method in a specific test class
mvn test -DskipUnitTests=false -Dtest=[full.package.testClassName]#[testMethodName] -DfailIfNoTests=false
```
* How to run _only_ Integration Tests
```
mvn install -DskipIntegrationTests=false
```
* How to run a *single* Integration Test
```
# Run all integration tests in a specific test class
# NOTE: failIfNoTests=false is required to skip tests in other modules
mvn install -DskipIntegrationTests=false -Dit.test=[full.package.testClassName] -DfailIfNoTests=false
# Run one test method in a specific test class
mvn install -DskipIntegrationTests=false -Dit.test=[full.package.testClassName]#[testMethodName] -DfailIfNoTests=false
```
* How to run only tests of a specific DSpace module
```
# Before you can run only one module's tests, other modules may need to be installed into your ~/.m2
cd [dspace-src]
mvn clean install
# Then, move into a module subdirectory, and run the test command
cd [dspace-src]/dspace-server-webapp
# Choose your test command from the lists above
```
## License
DSpace source code is freely available under a standard [BSD 3-Clause license](https://opensource.org/licenses/BSD-3-Clause).
The full license is available in the [LICENSE](LICENSE) file or online at http://www.dspace.org/license/
DSpace uses third-party libraries which may be distributed under different licenses. Those licenses are listed
in the [LICENSES_THIRD_PARTY](LICENSES_THIRD_PARTY) file.

View File

@@ -1,15 +0,0 @@
# Security Policy
## Supported Versions
For information regarding which versions of DSpace are currently under support, please see our DSpace Software Support Policy:
https://wiki.lyrasis.org/display/DSPACE/DSpace+Software+Support+Policy
## Reporting a Vulnerability
If you believe you have found a security vulnerability in a supported version of DSpace, we encourage you to let us know right away.
We will investigate all legitimate reports and do our best to quickly fix the problem. Please see our DSpace Software Support Policy
for information on privately reporting vulnerabilities:
https://wiki.lyrasis.org/display/DSPACE/DSpace+Software+Support+Policy

View File

@@ -1,11 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE suppressions PUBLIC
"-//Puppy Crawl//DTD Suppressions 1.2//EN"
"http://checkstyle.sourceforge.net/dtds/suppressions_1_2.dtd">
<suppressions>
<!-- Temporarily suppress indentation checks for all Tests -->
<!-- TODO: We should have these turned on. But, currently there's a known bug with indentation checks
on JMockIt Expectations blocks and similar. See https://github.com/checkstyle/checkstyle/issues/3739 -->
<suppress checks="Indentation" files="src[/\\]test[/\\]java"/>
<suppress checks="Regexp" files="DSpaceHttpClientFactory\.java"/>
</suppressions>

View File

@@ -1,160 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE module PUBLIC
"-//Puppy Crawl//DTD Check Configuration 1.3//EN"
"http://checkstyle.sourceforge.net/dtds/configuration_1_3.dtd">
<!--
DSpace CodeStyle Requirements
1. 4-space indents for Java, and 2-space indents for XML. NO TABS ALLOWED.
2. K&R style braces required. Braces required on all blocks.
3. Do not use wildcard imports (e.g. import java.util.*). Duplicated or unused imports also not allowed.
4. Javadocs should exist for all public classes and methods. (Methods rule is unenforced at this time.) Keep it short and to the point
5. Maximum line length is 120 characters (except for long URLs, packages or imports)
6. No trailing spaces allowed (except in comments)
7. Tokens should be surrounded by whitespace (see http://checkstyle.sourceforge.net/config_whitespace.html#WhitespaceAround)
8. Each source file must include our license header (validated separately by license-maven-plugin, see pom.xml)
For more information on CheckStyle configurations below, see: http://checkstyle.sourceforge.net/checks.html
-->
<module name="Checker">
<!-- Configure checker to use UTF-8 encoding -->
<property name="charset" value="UTF-8"/>
<!-- Configure checker to run on files with these extensions -->
<property name="fileExtensions" value="java, properties, cfg, xml"/>
<!-- Suppression configurations in checkstyle-suppressions.xml in same directory -->
<module name="SuppressionFilter">
<property name="file" value="${checkstyle.suppressions.file}" default="checkstyle-suppressions.xml"/>
</module>
<!-- No tab characters ('\t') allowed in the source code -->
<module name="FileTabCharacter">
<property name="eachLine" value="true"/>
<property name="fileExtensions" value="java, properties, cfg, css, js, xml"/>
</module>
<!-- No Trailing Whitespace, except on lines that only have an asterisk (e.g. Javadoc comments) -->
<module name="RegexpSingleline">
<property name="format" value="(?&lt;!\*)\s+$|\*\s\s+$"/>
<property name="message" value="Line has trailing whitespace"/>
<property name="fileExtensions" value="java, properties, cfg, css, js, xml"/>
</module>
<!-- Allow individual lines of code to be excluded from these rules, if they are annotated
with @SuppressWarnings. See also SuppressWarningsHolder below -->
<module name="SuppressWarningsFilter" />
<!-- Maximum line length is 120 characters -->
<module name="LineLength">
<property name="fileExtensions" value="java"/>
<property name="max" value="120"/>
<!-- Only exceptions for packages, imports, URLs, and JavaDoc {@link} tags -->
<property name="ignorePattern" value="^package.*|^import.*|http://|https://|@link"/>
</module>
<!-- Check individual Java source files for specific rules -->
<module name="TreeWalker">
<!-- Highlight any TODO or FIXME comments in info messages -->
<module name="TodoComment">
<property name="severity" value="info"/>
<property name="format" value="(TODO)|(FIXME)"/>
</module>
<!-- Do not report errors on any lines annotated with @SuppressWarnings -->
<module name="SuppressWarningsHolder"/>
<!-- ##### Import statement requirements ##### -->
<!-- Star imports (e.g. import java.util.*) are NOT ALLOWED -->
<module name="AvoidStarImport"/>
<!-- Redundant import statements are NOT ALLOWED -->
<module name="RedundantImport"/>
<!-- Unused import statements are NOT ALLOWED -->
<module name="UnusedImports"/>
<!-- Ensure imports appear alphabetically and grouped -->
<module name="CustomImportOrder">
<property name="sortImportsInGroupAlphabetically" value="true"/>
<property name="separateLineBetweenGroups" value="true"/>
<property name="customImportOrderRules" value="STATIC###STANDARD_JAVA_PACKAGE###THIRD_PARTY_PACKAGE"/>
</module>
<!-- ##### Javadocs requirements ##### -->
<!-- Requirements for Javadocs for classes/interfaces -->
<module name="JavadocType">
<!-- All public classes/interfaces MUST HAVE Javadocs -->
<property name="scope" value="public"/>
<!-- Add an exception for anonymous inner classes -->
<property name="excludeScope" value="anoninner"/>
<!-- Ignore errors related to unknown tags -->
<property name="allowUnknownTags" value="true"/>
<!-- Allow params tags to be optional -->
<property name="allowMissingParamTags" value="false"/>
</module>
<!-- Requirements for Javadocs for methods -->
<module name="JavadocMethod">
<!-- All public methods MUST HAVE Javadocs -->
<property name="accessModifiers" value="public"/>
<!-- Allow params, throws and return tags to be optional -->
<property name="allowMissingParamTags" value="true"/>
<property name="allowMissingReturnTag" value="true"/>
</module>
<!-- ##### Requirements for K&R Style braces ##### -->
<!-- Code blocks MUST HAVE braces, even single line statements (if, while, etc) -->
<module name="NeedBraces"/>
<!-- Left braces should be at the end of current line (default value)-->
<module name="LeftCurly"/>
<!-- Right braces should be on start of a new line (default value) -->
<module name="RightCurly"/>
<!-- Enforce Java-style array declaration instead of C-style -->
<module name="ArrayTypeStyle"/>
<!-- ##### Indentation / Whitespace requirements ##### -->
<!-- Require 4-space indentation (default value) -->
<module name="Indentation"/>
<!-- Whitespace should exist around all major tokens -->
<module name="WhitespaceAround">
<!-- However, make an exception for empty constructors, methods, types, etc. -->
<property name="allowEmptyConstructors" value="true"/>
<property name="allowEmptyMethods" value="true"/>
<property name="allowEmptyTypes" value="true"/>
<property name="allowEmptyLoops" value="true"/>
</module>
<!-- Validate whitespace around Generics (angle brackets) per typical conventions
http://checkstyle.sourceforge.net/config_whitespace.html#GenericWhitespace -->
<module name="GenericWhitespace"/>
<!-- ##### Requirements for "switch" statements ##### -->
<!-- "switch" statements MUST have a "default" clause -->
<module name="MissingSwitchDefault"/>
<!-- "case" clauses in switch statements MUST include break, return, throw or continue -->
<module name="FallThrough"/>
<!-- ##### Other / Miscellaneous requirements ##### -->
<!-- Require utility classes do not have a public constructor -->
<module name="HideUtilityClassConstructor"/>
<!-- Require each variable declaration is its own statement on its own line -->
<module name="MultipleVariableDeclarations"/>
<!-- Each line of code can only include one statement -->
<module name="OneStatementPerLine"/>
<!-- Require that "catch" statements are not empty (must at least contain a comment) -->
<module name="EmptyCatchBlock"/>
<!-- Require to use DSpaceHttpClientFactory.getClient() statement instead of creating directly the client -->
<module name="Regexp">
<property name="format" value="HttpClientBuilder\.create\s*\(\s*\)" />
<property name="message" value="Use DSpaceHttpClientFactory.getClient() instead of HttpClientBuilder.create()" />
<property name="illegalPattern" value="true"/>
<property name="ignoreComments" value="true"/>
</module>
<!-- Require to use DSpaceHttpClientFactory.getClient() statement instead of creating directly the client -->
<module name="Regexp">
<property name="format" value="HttpClients\.createDefault\s*\(\s*\)" />
<property name="message" value="Use DSpaceHttpClientFactory.getClient() instead of HttpClients.createDefault()" />
<property name="illegalPattern" value="true"/>
<property name="ignoreComments" value="true"/>
</module>
</module>
</module>

View File

@@ -1,37 +0,0 @@
networks:
# Default to using network named 'dspacenet' from docker-compose.yml.
# Its full name will be prepended with the project name (e.g. "-p d7" means it will be named "d7_dspacenet")
default:
name: ${COMPOSE_PROJECT_NAME}_dspacenet
external: true
services:
dspace-cli:
image: "${DOCKER_REGISTRY:-docker.io}/${DOCKER_OWNER:-dspace}/dspace-cli:${DSPACE_VER:-latest}"
container_name: dspace-cli
build:
context: .
dockerfile: Dockerfile.cli
environment:
# Below syntax may look odd, but it is how to override dspace.cfg settings via env variables.
# See https://github.com/DSpace/DSpace/blob/main/dspace/config/config-definition.xml
# __P__ => "." (e.g. dspace__P__dir => dspace.dir)
# __D__ => "-" (e.g. google__D__metadata => google-metadata)
# dspace.dir: Must match with Dockerfile's DSPACE_INSTALL directory.
dspace__P__dir: /dspace
# db.url: Ensure we are using the 'dspacedb' image for our database
db__P__url: 'jdbc:postgresql://dspacedb:5432/dspace'
# solr.server: Ensure we are using the 'dspacesolr' image for Solr
solr__P__server: http://dspacesolr:8983/solr
volumes:
# Keep DSpace assetstore directory between reboots
- assetstore:/dspace/assetstore
# Mount local [src]/dspace/config/ to container. This syncs your local configs with container
# NOTE: Environment variables specified above will OVERRIDE any configs in local.cfg or dspace.cfg
- ./dspace/config:/dspace/config
entrypoint: /dspace/bin/dspace
command: help
tty: true
stdin_open: true
volumes:
assetstore:

View File

@@ -1,138 +0,0 @@
networks:
dspacenet:
ipam:
config:
# Define a custom subnet for our DSpace network, so that we can easily trust requests from host to container.
# If you customize this value, be sure to customize the 'proxies.trusted.ipranges' env variable below.
- subnet: 172.23.0.0/16
services:
# DSpace (backend) webapp container
dspace:
container_name: dspace
environment:
# Below syntax may look odd, but it is how to override dspace.cfg settings via env variables.
# See https://github.com/DSpace/DSpace/blob/main/dspace/config/config-definition.xml
# __P__ => "." (e.g. dspace__P__dir => dspace.dir)
# __D__ => "-" (e.g. google__D__metadata => google-metadata)
# dspace.dir: Must match with Dockerfile's DSPACE_INSTALL directory.
dspace__P__dir: /dspace
# Uncomment to set a non-default value for dspace.server.url or dspace.ui.url
# dspace__P__server__P__url: http://localhost:8080/server
# dspace__P__ui__P__url: http://localhost:4000
dspace__P__name: 'DSpace Started with Docker Compose'
# db.url: Ensure we are using the 'dspacedb' image for our database
db__P__url: 'jdbc:postgresql://dspacedb:5432/dspace'
# solr.server: Ensure we are using the 'dspacesolr' image for Solr
solr__P__server: http://dspacesolr:8983/solr
# matomo.tracker.url: Ensure we are using the 'matomo' image for Matomo
matomo__P__tracker__P__url: http://matomo
# proxies.trusted.ipranges: This setting is required for a REST API running in Docker to trust requests
# from the host machine. This IP range MUST correspond to the 'dspacenet' subnet defined above.
proxies__P__trusted__P__ipranges: '172.23.0'
LOGGING_CONFIG: /dspace/config/log4j2-container.xml
image: "${DOCKER_REGISTRY:-docker.io}/${DOCKER_OWNER:-dspace}/dspace:${DSPACE_VER:-latest-test}"
build:
context: .
dockerfile: Dockerfile.test
depends_on:
- dspacedb
networks:
- dspacenet
ports:
- published: 8080
target: 8080
- published: 8000
target: 8000
stdin_open: true
tty: true
volumes:
# Keep DSpace assetstore directory between reboots
- assetstore:/dspace/assetstore
# Mount local [src]/dspace/config/ to container. This syncs your local configs with container
# NOTE: Environment variables specified above will OVERRIDE any configs in local.cfg or dspace.cfg
- ./dspace/config:/dspace/config
# Ensure that the database is ready BEFORE starting tomcat
# 1. While a TCP connection to dspacedb port 5432 is not available, continue to sleep
# 2. Then, run database migration to init database tables
# 3. Finally, start DSpace
entrypoint:
- /bin/bash
- '-c'
- |
while (!</dev/tcp/dspacedb/5432) > /dev/null 2>&1; do sleep 1; done;
/dspace/bin/dspace database migrate
java -jar /dspace/webapps/server-boot.jar --dspace.dir=/dspace
# DSpace PostgreSQL database container
dspacedb:
container_name: dspacedb
# Uses the base PostgreSQL image
image: "docker.io/postgres:${POSTGRES_VERSION:-15}"
environment:
PGDATA: /pgdata
POSTGRES_DB: dspace
POSTGRES_USER: dspace
POSTGRES_PASSWORD: dspace
networks:
dspacenet:
ports:
- published: 5432
target: 5432
stdin_open: true
tty: true
volumes:
# Keep Postgres data directory between reboots
- pgdata:/pgdata
# DSpace Solr container
dspacesolr:
container_name: dspacesolr
image: "${DOCKER_REGISTRY:-docker.io}/${DOCKER_OWNER:-dspace}/dspace-solr:${DSPACE_VER:-latest}"
build:
context: ./dspace/src/main/docker/dspace-solr/
# Provide path to Solr configs necessary to build Docker image
additional_contexts:
solrconfigs: ./dspace/solr/
args:
SOLR_VERSION: "${SOLR_VER:-9.8}"
networks:
dspacenet:
ports:
- published: 8983
target: 8983
stdin_open: true
tty: true
working_dir: /var/solr/data
volumes:
# Keep Solr data directory between reboots
- solr_data:/var/solr/data
# NOTE: We are not running Solr as "root", but we need root permissions to copy our cores to the mounted
# /var/solr/data directory. Then we start Solr as the "solr" user.
user: root
# Initialize all DSpace Solr cores then start Solr:
# * First, run precreate-core to create the core (if it doesn't yet exist). If exists already, this is a no-op
# * Second, copy configsets to this core:
# Updates to Solr configs require the container to be rebuilt/restarted: `docker compose -p d7 up -d --build dspacesolr`
# * Third, ensure all new folders are owned by "solr" user
# * Finally, start Solr as the "solr" user via the provided solr-foreground script
entrypoint:
- /bin/bash
- '-c'
- |
init-var-solr
precreate-core authority /opt/solr/server/solr/configsets/authority
cp -r /opt/solr/server/solr/configsets/authority/* authority
precreate-core oai /opt/solr/server/solr/configsets/oai
cp -r /opt/solr/server/solr/configsets/oai/* oai
precreate-core search /opt/solr/server/solr/configsets/search
cp -r /opt/solr/server/solr/configsets/search/* search
precreate-core statistics /opt/solr/server/solr/configsets/statistics
cp -r /opt/solr/server/solr/configsets/statistics/* statistics
precreate-core qaevent /opt/solr/server/solr/configsets/qaevent
cp -r /opt/solr/server/solr/configsets/qaevent/* qaevent
precreate-core suggestion /opt/solr/server/solr/configsets/suggestion
cp -r /opt/solr/server/solr/configsets/suggestion/* suggestion
chown -R solr:solr /var/solr
runuser -u solr -- solr-foreground
volumes:
assetstore:
pgdata:
solr_data:

File diff suppressed because it is too large Load Diff

View File

@@ -1,59 +0,0 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.access.status;
import java.sql.SQLException;
import java.time.LocalDate;
import org.dspace.content.AccessStatus;
import org.dspace.content.Bitstream;
import org.dspace.content.Item;
import org.dspace.core.Context;
/**
* Plugin interface for the access status calculation.
*/
public interface AccessStatusHelper {
/**
* Calculate the access status for the item.
*
* @param context the DSpace context
* @param item the item
* @param threshold the embargo threshold date
* @param type the type of calculation
* @return the access status
* @throws SQLException An exception that provides information on a database access error or other errors.
*/
public AccessStatus getAccessStatusFromItem(Context context,
Item item, LocalDate threshold, String type) throws SQLException;
/**
* Calculate the anonymous access status for the item.
*
* @param context the DSpace context
* @param item the item to check for embargo information
* @param threshold the embargo threshold date
* @return the access status
* @throws SQLException An exception that provides information on a database access error or other errors.
*/
public AccessStatus getAnonymousAccessStatusFromItem(Context context,
Item item, LocalDate threshold) throws SQLException;
/**
* Calculate the access status for the bitstream.
*
* @param context the DSpace context
* @param bitstream the bitstream
* @param threshold the embargo threshold date
* @param type the type of calculation
* @return the access status
* @throws SQLException An exception that provides information on a database access error or other errors.
*/
public AccessStatus getAccessStatusFromBitstream(Context context,
Bitstream bitstream, LocalDate threshold, String type) throws SQLException;
}

View File

@@ -1,103 +0,0 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.access.status;
import java.sql.SQLException;
import java.time.LocalDate;
import java.time.ZoneId;
import org.apache.commons.lang3.StringUtils;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.dspace.access.status.service.AccessStatusService;
import org.dspace.content.AccessStatus;
import org.dspace.content.Bitstream;
import org.dspace.content.Item;
import org.dspace.core.Context;
import org.dspace.core.service.PluginService;
import org.dspace.services.ConfigurationService;
import org.springframework.beans.factory.annotation.Autowired;
/**
* Implementation for the access status calculation service.
*/
public class AccessStatusServiceImpl implements AccessStatusService {
private static final Logger log = LogManager.getLogger(AccessStatusServiceImpl.class);
// Plugin implementation, set from the DSpace configuration by init().
protected AccessStatusHelper helper = null;
protected LocalDate forever_date = null;
protected String itemCalculationType = null;
protected String bitstreamCalculationType = null;
@Autowired(required = true)
protected ConfigurationService configurationService;
@Autowired(required = true)
protected PluginService pluginService;
/**
* Initialize the bean (after dependency injection has already taken place).
* Ensures the configurationService is injected, so that we can get the plugin
* and the forever embargo date threshold from the configuration.
* Called by "init-method" in Spring configuration.
*
* @throws Exception on generic exception
*/
public void init() throws Exception {
if (helper == null) {
helper = (AccessStatusHelper) pluginService.getSinglePlugin(AccessStatusHelper.class);
if (helper == null) {
throw new IllegalStateException("The AccessStatusHelper plugin was not defined in "
+ "DSpace configuration.");
}
// Defines the embargo forever date threshold for the access status.
// Look at EmbargoService.FOREVER for some improvements?
int year = configurationService.getIntProperty("access.status.embargo.forever.year");
int month = configurationService.getIntProperty("access.status.embargo.forever.month");
int day = configurationService.getIntProperty("access.status.embargo.forever.day");
forever_date = LocalDate.of(year, month, day)
.atStartOfDay()
.atZone(ZoneId.systemDefault())
.toLocalDate();
itemCalculationType = getAccessStatusCalculationType("access.status.for-user.item");
bitstreamCalculationType = getAccessStatusCalculationType("access.status.for-user.bitstream");
}
}
@Override
public AccessStatus getAccessStatus(Context context, Item item) throws SQLException {
return helper.getAccessStatusFromItem(context, item, forever_date, itemCalculationType);
}
@Override
public AccessStatus getAnonymousAccessStatus(Context context, Item item) throws SQLException {
return helper.getAnonymousAccessStatusFromItem(context, item, forever_date);
}
@Override
public AccessStatus getAccessStatus(Context context, Bitstream bitstream) throws SQLException {
return helper.getAccessStatusFromBitstream(context, bitstream, forever_date, bitstreamCalculationType);
}
private String getAccessStatusCalculationType(String key) {
String value = configurationService.getProperty(key, DefaultAccessStatusHelper.STATUS_FOR_ANONYMOUS);
if (!StringUtils.equalsIgnoreCase(value, DefaultAccessStatusHelper.STATUS_FOR_ANONYMOUS) &&
!StringUtils.equalsIgnoreCase(value, DefaultAccessStatusHelper.STATUS_FOR_CURRENT_USER)) {
log.warn("The configuration parameter \"" + key
+ "\" contains an invalid value. Valid values include: 'anonymous' and 'current'.");
value = DefaultAccessStatusHelper.STATUS_FOR_ANONYMOUS;
}
return value;
}
}

View File

@@ -1,312 +0,0 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.access.status;
import java.sql.SQLException;
import java.time.LocalDate;
import java.util.ArrayList;
import java.util.List;
import java.util.Objects;
import java.util.stream.Collectors;
import org.apache.commons.lang3.StringUtils;
import org.dspace.authorize.ResourcePolicy;
import org.dspace.authorize.factory.AuthorizeServiceFactory;
import org.dspace.authorize.service.AuthorizeService;
import org.dspace.authorize.service.ResourcePolicyService;
import org.dspace.content.AccessStatus;
import org.dspace.content.Bitstream;
import org.dspace.content.Bundle;
import org.dspace.content.DSpaceObject;
import org.dspace.content.Item;
import org.dspace.content.factory.ContentServiceFactory;
import org.dspace.content.service.ItemService;
import org.dspace.core.Constants;
import org.dspace.core.Context;
import org.dspace.eperson.EPerson;
import org.dspace.eperson.Group;
import org.dspace.eperson.factory.EPersonServiceFactory;
import org.dspace.eperson.service.GroupService;
/**
* Default plugin implementation of the access status helper.
*
* The methods provides a simple logic to calculate the access status
* of an item based on the policies of the primary or the first bitstream
* in the original bundle. Users can override those methods for
* enhanced functionality.
*/
public class DefaultAccessStatusHelper implements AccessStatusHelper {
public static final String STATUS_FOR_CURRENT_USER = "current";
public static final String STATUS_FOR_ANONYMOUS = "anonymous";
public static final String EMBARGO = "embargo";
public static final String METADATA_ONLY = "metadata.only";
public static final String OPEN_ACCESS = "open.access";
public static final String RESTRICTED = "restricted";
public static final String UNKNOWN = "unknown";
protected ItemService itemService =
ContentServiceFactory.getInstance().getItemService();
protected ResourcePolicyService resourcePolicyService =
AuthorizeServiceFactory.getInstance().getResourcePolicyService();
protected AuthorizeService authorizeService =
AuthorizeServiceFactory.getInstance().getAuthorizeService();
protected GroupService groupService =
EPersonServiceFactory.getInstance().getGroupService();
public DefaultAccessStatusHelper() {
super();
}
/**
* Look at the item's primary or first bitstream policies to determine an access status value.
* It is also considering a date threshold for embargoes and restrictions.
*
* If the item is null, simply returns the "unknown" value.
*
* @param context the DSpace context
* @param item the item to check for embargoes
* @param threshold the embargo threshold date
* @param type the type of calculation
* @return the access status
*/
@Override
public AccessStatus getAccessStatusFromItem(Context context, Item item, LocalDate threshold, String type)
throws SQLException {
if (item == null) {
return new AccessStatus(UNKNOWN, null);
}
Bitstream bitstream = getPrimaryOrFirstBitstreamInOriginalBundle(item);
if (bitstream == null) {
return new AccessStatus(METADATA_ONLY, null);
}
return getAccessStatusFromBitstream(context, bitstream, threshold, type);
}
/**
* Look at the bitstream policies to determine an access status value.
* It is also considering a date threshold for embargoes and restrictions.
*
* If the bitstream is null, simply returns the "unknown" value.
*
* @param context the DSpace context
* @param bitstream the bitstream to check for embargoes
* @param threshold the embargo threshold date
* @param type the type of calculation
* @return the access status
*/
@Override
public AccessStatus getAccessStatusFromBitstream(Context context,
Bitstream bitstream, LocalDate threshold, String type) throws SQLException {
if (bitstream == null) {
return new AccessStatus(UNKNOWN, null);
}
List<ResourcePolicy> policies = getReadPolicies(context, bitstream, type);
LocalDate availabilityDate = findAvailabilityDate(policies, threshold);
// Get the access status based on the availability date
String accessStatus = getAccessStatusFromAvailabilityDate(availabilityDate, threshold);
return new AccessStatus(accessStatus, availabilityDate);
}
/**
* Look at the anonymous policies of the primary (or first)
* bitstream of the item to retrieve its embargo.
*
* @param context the DSpace context
* @param item the item
* @param threshold the embargo threshold date
* @return the access status
*/
@Override
public AccessStatus getAnonymousAccessStatusFromItem(Context context, Item item, LocalDate threshold)
throws SQLException {
return getAccessStatusFromItem(context, item, threshold, STATUS_FOR_ANONYMOUS);
}
/**
* Look in the item's original bundle. First, try to get the primary bitstream.
* If the bitstream is null, simply returns the first one.
*
* @param item the DSpace item
* @return the bitstream
*/
private Bitstream getPrimaryOrFirstBitstreamInOriginalBundle(Item item) {
// Consider only the original bundles.
List<Bundle> bundles = item.getBundles(Constants.DEFAULT_BUNDLE_NAME);
// Check for primary bitstreams first.
Bitstream bitstream = bundles.stream()
.map(bundle -> bundle.getPrimaryBitstream())
.filter(Objects::nonNull)
.findFirst()
.orElse(null);
if (bitstream == null) {
// If there is no primary bitstream,
// take the first bitstream in the bundles.
bitstream = bundles.stream()
.map(bundle -> bundle.getBitstreams())
.flatMap(List::stream)
.findFirst()
.orElse(null);
}
return bitstream;
}
/**
* Retrieves the anonymous read policies for a DSpace object
*
* @param context the DSpace context
* @param dso the DSpace object
* @return a list of policies
*/
private List<ResourcePolicy> getAnonymousReadPolicies(Context context, DSpaceObject dso)
throws SQLException {
// Only consider read policies. Use the find without a group
// as it's not returning all expected values
List<ResourcePolicy> readPolicies = resourcePolicyService.find(context, dso, Constants.READ);
// Filter the policies with the anonymous group
List<ResourcePolicy> filteredPolicies = readPolicies.stream()
.filter(p -> p.getGroup() != null && StringUtils.equals(p.getGroup().getName(), Group.ANONYMOUS))
.collect(Collectors.toList());
return filteredPolicies;
}
/**
* Retrieves the current user read policies for a DSpace object
*
* @param context the DSpace context
* @param dso the DSpace object
* @return a list of policies
*/
private List<ResourcePolicy> getCurrentUserReadPolicies(Context context, DSpaceObject dso)
throws SQLException {
// First, look if the current user can read the object
boolean canRead = authorizeService.authorizeActionBoolean(context, dso, Constants.READ);
// If it's true, it can't be an embargo or a restriction, shortcircuit the process
// and return a null value (indicating an open access)
if (canRead) {
return null;
}
// Only consider read policies
List<ResourcePolicy> policies = resourcePolicyService.find(context, dso, Constants.READ);
// Only calculate the embargo date for the current user
EPerson currentUser = context.getCurrentUser();
List<ResourcePolicy> readPolicies = new ArrayList<ResourcePolicy>();
for (ResourcePolicy policy : policies) {
EPerson eperson = policy.getEPerson();
if (eperson != null && currentUser != null && eperson.getID() == currentUser.getID()) {
readPolicies.add(policy);
continue;
}
Group group = policy.getGroup();
if (group != null && groupService.isMember(context, currentUser, group)) {
readPolicies.add(policy);
}
}
return readPolicies;
}
/**
* Retrieves the read policies for a DSpace object based on the type
*
* If the type is current, consider the current logged in user
* If the type is anonymous, only consider the anonymous group
*
* @param context the DSpace context
* @param dso the DSpace object
* @param type the type of calculation
* @return a list of policies
*/
private List<ResourcePolicy> getReadPolicies(Context context, DSpaceObject dso, String type)
throws SQLException {
if (StringUtils.equalsIgnoreCase(type, STATUS_FOR_CURRENT_USER)) {
return getCurrentUserReadPolicies(context, dso);
} else {
// Only calculate the status for the anonymous group read policies
return getAnonymousReadPolicies(context, dso);
}
}
/**
* Look at the read policies to retrieve the access status availability date.
*
* @param readPolicies the read policies
* @param threshold the embargo threshold date
* @return an availability date
*/
private LocalDate findAvailabilityDate(List<ResourcePolicy> readPolicies, LocalDate threshold) {
// If the list is null, the object is readable
if (readPolicies == null) {
return null;
}
// If there's no policies, return the threshold date (restriction)
if (readPolicies.size() == 0) {
return threshold;
}
LocalDate availabilityDate = null;
LocalDate currentDate = LocalDate.now();
boolean takeMostRecentDate = true;
// Looks at all read policies
for (ResourcePolicy policy : readPolicies) {
boolean isValid = resourcePolicyService.isDateValid(policy);
// If any policy is valid, the object is accessible
if (isValid) {
return null;
}
// There may be an active embargo
LocalDate startDate = policy.getStartDate();
// Ignore policy with no start date or which is expired
if (startDate == null || startDate.isBefore(currentDate)) {
continue;
}
// Policy with a start date over the threshold (restriction)
// overrides the embargos
if (!startDate.isBefore(threshold)) {
takeMostRecentDate = false;
}
// Take the most recent embargo date if there is no restriction, otherwise
// take the highest date (account for rare cases where more than one resource
// policy exists)
if (availabilityDate == null) {
availabilityDate = startDate;
} else if (takeMostRecentDate) {
availabilityDate = startDate.isBefore(availabilityDate) ? startDate : availabilityDate;
} else {
availabilityDate = startDate.isAfter(availabilityDate) ? startDate : availabilityDate;
}
}
return availabilityDate;
}
/**
* Look at the DSpace object availability date to determine an access status value.
*
* If the object is null, returns the "metadata.only" value.
* If there's no availability date, returns the "open.access" value.
* If the availability date is after or equal to the embargo
* threshold date, returns the "restricted" value.
* Every other cases return the "embargo" value.
*
* @param availabilityDate the DSpace object availability date
* @param threshold the embargo threshold date
* @return an access status value
*/
private String getAccessStatusFromAvailabilityDate(LocalDate availabilityDate, LocalDate threshold) {
// If there is no availability date, it's an open access.
if (availabilityDate == null) {
return OPEN_ACCESS;
}
// If the policy start date have a value and if this value
// is equal or superior to the configured forever date, the
// access status is also restricted.
if (!availabilityDate.isBefore(threshold)) {
return RESTRICTED;
}
return EMBARGO;
}
}

View File

@@ -1,25 +0,0 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.access.status.factory;
import org.dspace.access.status.service.AccessStatusService;
import org.dspace.services.factory.DSpaceServicesFactory;
/**
* Abstract factory to get services for the access status package,
* use AccessStatusServiceFactory.getInstance() to retrieve an implementation.
*/
public abstract class AccessStatusServiceFactory {
public abstract AccessStatusService getAccessStatusService();
public static AccessStatusServiceFactory getInstance() {
return DSpaceServicesFactory.getInstance().getServiceManager()
.getServiceByName("accessStatusServiceFactory", AccessStatusServiceFactory.class);
}
}

View File

@@ -1,26 +0,0 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.access.status.factory;
import org.dspace.access.status.service.AccessStatusService;
import org.springframework.beans.factory.annotation.Autowired;
/**
* Factory implementation to get services for the access status package,
* use AccessStatusServiceFactory.getInstance() to retrieve an implementation.
*/
public class AccessStatusServiceFactoryImpl extends AccessStatusServiceFactory {
@Autowired(required = true)
private AccessStatusService accessStatusService;
@Override
public AccessStatusService getAccessStatusService() {
return accessStatusService;
}
}

View File

@@ -1,30 +0,0 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
/**
* <p>
* Access status allows the users to view the bitstreams availability before
* browsing into the item itself.
* </p>
* <p>
* The access status is calculated through a pluggable class:
* {@link org.dspace.access.status.AccessStatusHelper}.
* The {@link org.dspace.access.status.AccessStatusServiceImpl}
* must be configured to specify this class, as well as a forever embargo date
* threshold year, month and day.
* </p>
* <p>
* See {@link org.dspace.access.status.DefaultAccessStatusHelper} for a simple calculation
* based on the primary or the first bitstream of the original bundle. You can
* supply your own class to implement more complex access statuses.
* </p>
* <p>
* For now, the access status is calculated when the item is shown in a list.
* </p>
*/
package org.dspace.access.status;

View File

@@ -1,69 +0,0 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.access.status.service;
import java.sql.SQLException;
import org.dspace.content.AccessStatus;
import org.dspace.content.Bitstream;
import org.dspace.content.Item;
import org.dspace.core.Context;
/**
* Public interface to the access status subsystem.
* <p>
* Configuration properties: (with examples)
* {@code
* # values for the forever embargo date threshold
* # This threshold date is used in the default access status helper to determine if an item is
* # restricted or embargoed based on the start date of the primary (or first) file policies.
* # In this case, if the policy start date is inferior to the threshold date, the status will
* # be embargo, else it will be restricted.
* # You might want to change this threshold based on your needs. For example: some databases
* # doesn't accept a date superior to 31 december 9999.
* access.status.embargo.forever.year = 10000
* access.status.embargo.forever.month = 1
* access.status.embargo.forever.day = 1
* # implementation of access status helper plugin - replace with local implementation if applicable
* # This default access status helper provides an item status based on the policies of the primary
* # bitstream (or first bitstream in the original bundles if no primary file is specified).
* plugin.single.org.dspace.access.status.AccessStatusHelper = org.dspace.access.status.DefaultAccessStatusHelper
* }
*/
public interface AccessStatusService {
/**
* Calculate the access status for an Item while considering the forever embargo date threshold.
*
* @param context the DSpace context
* @param item the item
* @return the access status
* @throws SQLException An exception that provides information on a database access error or other errors.
*/
public AccessStatus getAccessStatus(Context context, Item item) throws SQLException;
/**
* Calculate the anonymous access status for an Item while considering the forever embargo date threshold.
*
* @param context the DSpace context
* @param item the item to check for embargo information
* @return the access status
* @throws SQLException An exception that provides information on a database access error or other errors.
*/
public AccessStatus getAnonymousAccessStatus(Context context, Item item) throws SQLException;
/**
* Calculate the access status for a bitstream while considering the forever embargo date threshold.
*
* @param context the DSpace context
* @param bitstream the bitstream
* @return the access status
* @throws SQLException An exception that provides information on a database access error or other errors.
*/
public AccessStatus getAccessStatus(Context context, Bitstream bitstream) throws SQLException;
}

View File

@@ -9,59 +9,43 @@ package org.dspace.administer;
import java.io.IOException; import java.io.IOException;
import java.sql.SQLException; import java.sql.SQLException;
import java.util.List;
import java.util.UUID;
import org.apache.commons.cli.CommandLine; import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.CommandLineParser; import org.apache.commons.cli.CommandLineParser;
import org.apache.commons.cli.DefaultParser;
import org.apache.commons.cli.HelpFormatter; import org.apache.commons.cli.HelpFormatter;
import org.apache.commons.cli.Options; import org.apache.commons.cli.Options;
import org.apache.commons.collections4.CollectionUtils; import org.apache.commons.cli.PosixParser;
import org.dspace.authorize.AuthorizeException; import org.dspace.authorize.AuthorizeException;
import org.dspace.content.Community; import org.dspace.content.Community;
import org.dspace.content.factory.ContentServiceFactory;
import org.dspace.content.service.CommunityService;
import org.dspace.core.Constants; import org.dspace.core.Constants;
import org.dspace.core.Context; import org.dspace.core.Context;
import org.dspace.handle.factory.HandleServiceFactory; import org.dspace.handle.HandleManager;
import org.dspace.handle.service.HandleService; import org.dspace.storage.rdbms.DatabaseManager;
/** /**
* A command-line tool for setting/removing community/sub-community * A command-line tool for setting/removing community/sub-community
* relationships. Takes community DB Id or handle arguments as inputs. * relationships. Takes community DB Id or handle arguments as inputs.
* *
* @author rrodgers * @author rrodgers
* @version $Revision$ * @version $Revision$
*/ */
public class CommunityFiliator { public class CommunityFiliator
{
protected CommunityService communityService; public static void main(String[] argv) throws Exception
protected HandleService handleService; {
public CommunityFiliator() {
communityService = ContentServiceFactory.getInstance().getCommunityService();
handleService = HandleServiceFactory.getInstance().getHandleService();
}
/**
* @param argv the command line arguments given
* @throws Exception if error
*/
public static void main(String[] argv) throws Exception {
// create an options object and populate it // create an options object and populate it
CommandLineParser parser = new DefaultParser(); CommandLineParser parser = new PosixParser();
Options options = new Options(); Options options = new Options();
options.addOption("s", "set", false, "set a parent/child relationship"); options.addOption("s", "set", false, "set a parent/child relationship");
options.addOption("r", "remove", false, options.addOption("r", "remove", false,
"remove a parent/child relationship"); "remove a parent/child relationship");
options.addOption("p", "parent", true, options.addOption("p", "parent", true,
"parent community (handle or database ID)"); "parent community (handle or database ID)");
options.addOption("c", "child", true, options.addOption("c", "child", true,
"child community (handle or databaseID)"); "child community (handle or databaseID)");
options.addOption("h", "help", false, "help"); options.addOption("h", "help", false, "help");
CommandLine line = parser.parse(options, argv); CommandLine line = parser.parse(options, argv);
@@ -70,48 +54,57 @@ public class CommunityFiliator {
String parentID = null; String parentID = null;
String childID = null; String childID = null;
if (line.hasOption('h')) { if (line.hasOption('h'))
{
HelpFormatter myhelp = new HelpFormatter(); HelpFormatter myhelp = new HelpFormatter();
myhelp.printHelp("CommunityFiliator\n", options); myhelp.printHelp("CommunityFiliator\n", options);
System.out System.out
.println("\nestablish a relationship: CommunityFiliator -s -p parentID -c childID"); .println("\nestablish a relationship: CommunityFiliator -s -p parentID -c childID");
System.out System.out
.println("remove a relationship: CommunityFiliator -r -p parentID -c childID"); .println("remove a relationship: CommunityFiliator -r -p parentID -c childID");
System.exit(0); System.exit(0);
} }
if (line.hasOption('s')) { if (line.hasOption('s'))
{
command = "set"; command = "set";
} }
if (line.hasOption('r')) { if (line.hasOption('r'))
{
command = "remove"; command = "remove";
} }
if (line.hasOption('p')) { // parent if (line.hasOption('p')) // parent
{
parentID = line.getOptionValue('p'); parentID = line.getOptionValue('p');
} }
if (line.hasOption('c')) { // child if (line.hasOption('c')) // child
{
childID = line.getOptionValue('c'); childID = line.getOptionValue('c');
} }
// now validate // now validate
// must have a command set // must have a command set
if (command == null) { if (command == null)
{
System.out System.out
.println("Error - must run with either set or remove (run with -h flag for details)"); .println("Error - must run with either set or remove (run with -h flag for details)");
System.exit(1); System.exit(1);
} }
if ("set".equals(command) || "remove".equals(command)) { if ("set".equals(command) || "remove".equals(command))
if (parentID == null) { {
if (parentID == null)
{
System.out.println("Error - a parentID must be specified (run with -h flag for details)"); System.out.println("Error - a parentID must be specified (run with -h flag for details)");
System.exit(1); System.exit(1);
} }
if (childID == null) { if (childID == null)
{
System.out.println("Error - a childID must be specified (run with -h flag for details)"); System.out.println("Error - a childID must be specified (run with -h flag for details)");
System.exit(1); System.exit(1);
} }
@@ -120,135 +113,149 @@ public class CommunityFiliator {
CommunityFiliator filiator = new CommunityFiliator(); CommunityFiliator filiator = new CommunityFiliator();
Context c = new Context(); Context c = new Context();
// we are superuser! // ve are superuser!
c.turnOffAuthorisationSystem(); c.setIgnoreAuthorization(true);
try { try
{
// validate and resolve the parent and child IDs into commmunities // validate and resolve the parent and child IDs into commmunities
Community parent = filiator.resolveCommunity(c, parentID); Community parent = filiator.resolveCommunity(c, parentID);
Community child = filiator.resolveCommunity(c, childID); Community child = filiator.resolveCommunity(c, childID);
if (parent == null) { if (parent == null)
{
System.out.println("Error, parent community cannot be found: " System.out.println("Error, parent community cannot be found: "
+ parentID); + parentID);
System.exit(1); System.exit(1);
} }
if (child == null) { if (child == null)
{
System.out.println("Error, child community cannot be found: " System.out.println("Error, child community cannot be found: "
+ childID); + childID);
System.exit(1); System.exit(1);
} }
if ("set".equals(command)) { if ("set".equals(command))
{
filiator.filiate(c, parent, child); filiator.filiate(c, parent, child);
} else { }
else
{
filiator.defiliate(c, parent, child); filiator.defiliate(c, parent, child);
} }
} catch (SQLException sqlE) { }
catch (SQLException sqlE)
{
System.out.println("Error - SQL exception: " + sqlE.toString()); System.out.println("Error - SQL exception: " + sqlE.toString());
} catch (AuthorizeException authE) { }
catch (AuthorizeException authE)
{
System.out.println("Error - Authorize exception: " System.out.println("Error - Authorize exception: "
+ authE.toString()); + authE.toString());
} catch (IOException ioE) { }
catch (IOException ioE)
{
System.out.println("Error - IO exception: " + ioE.toString()); System.out.println("Error - IO exception: " + ioE.toString());
} }
} }
/**
* @param c context
* @param parent parent Community
* @param child child community
* @throws SQLException if database error
* @throws AuthorizeException if authorize error
* @throws IOException if IO error
*/
public void filiate(Context c, Community parent, Community child) public void filiate(Context c, Community parent, Community child)
throws SQLException, AuthorizeException, IOException { throws SQLException, AuthorizeException, IOException
{
// check that a valid filiation would be established // check that a valid filiation would be established
// first test - proposed child must currently be an orphan (i.e. // first test - proposed child must currently be an orphan (i.e.
// top-level) // top-level)
Community childDad = CollectionUtils.isNotEmpty(child.getParentCommunities()) ? child.getParentCommunities() Community childDad = child.getParentCommunity();
.iterator().next() : null;
if (childDad != null) { if (childDad != null)
{
System.out.println("Error, child community: " + child.getID() System.out.println("Error, child community: " + child.getID()
+ " already a child of: " + childDad.getID()); + " already a child of: " + childDad.getID());
System.exit(1); System.exit(1);
} }
// second test - circularity: parent's parents can't include proposed // second test - circularity: parent's parents can't include proposed
// child // child
List<Community> parentDads = parent.getParentCommunities(); Community[] parentDads = parent.getAllParents();
if (parentDads.contains(child)) {
System.out.println("Error, circular parentage - child is parent of parent"); for (int i = 0; i < parentDads.length; i++)
System.exit(1); {
if (parentDads[i].getID() == child.getID())
{
System.out
.println("Error, circular parentage - child is parent of parent");
System.exit(1);
}
} }
// everthing's OK // everthing's OK
communityService.addSubcommunity(c, parent, child); parent.addSubcommunity(child);
// complete the pending transaction // complete the pending transaction
c.complete(); c.complete();
System.out.println("Filiation complete. Community: '" + parent.getID() System.out.println("Filiation complete. Community: '" + parent.getID()
+ "' is parent of community: '" + child.getID() + "'"); + "' is parent of community: '" + child.getID() + "'");
} }
/**
* @param c context
* @param parent parent Community
* @param child child community
* @throws SQLException if database error
* @throws AuthorizeException if authorize error
* @throws IOException if IO error
*/
public void defiliate(Context c, Community parent, Community child) public void defiliate(Context c, Community parent, Community child)
throws SQLException, AuthorizeException, IOException { throws SQLException, AuthorizeException, IOException
{
// verify that child is indeed a child of parent // verify that child is indeed a child of parent
List<Community> parentKids = parent.getSubcommunities(); Community[] parentKids = parent.getSubcommunities();
if (!parentKids.contains(child)) { boolean isChild = false;
System.out.println("Error, child community not a child of parent community");
for (int i = 0; i < parentKids.length; i++)
{
if (parentKids[i].getID() == child.getID())
{
isChild = true;
break;
}
}
if (!isChild)
{
System.out
.println("Error, child community not a child of parent community");
System.exit(1); System.exit(1);
} }
// OK remove the mappings - but leave the community, which will become // OK remove the mappings - but leave the community, which will become
// top-level // top-level
child.removeParentCommunity(parent); DatabaseManager.updateQuery(c,
parent.removeSubCommunity(child); "DELETE FROM community2community WHERE parent_comm_id= ? "+
communityService.update(c, child); "AND child_comm_id= ? ", parent.getID(), child.getID());
communityService.update(c, parent);
// complete the pending transaction // complete the pending transaction
c.complete(); c.complete();
System.out.println("Defiliation complete. Community: '" + child.getID() System.out.println("Defiliation complete. Community: '" + child.getID()
+ "' is no longer a child of community: '" + parent.getID() + "' is no longer a child of community: '" + parent.getID()
+ "'"); + "'");
} }
/** private Community resolveCommunity(Context c, String communityID)
* Find a community by ID throws SQLException
* {
* @param c context
* @param communityID community ID
* @return Community object
* @throws SQLException if database error
*/
protected Community resolveCommunity(Context c, String communityID)
throws SQLException {
Community community = null; Community community = null;
if (communityID.indexOf('/') != -1) { if (communityID.indexOf('/') != -1)
{
// has a / must be a handle // has a / must be a handle
community = (Community) handleService.resolveToObject(c, community = (Community) HandleManager.resolveToObject(c,
communityID); communityID);
// ensure it's a community // ensure it's a community
if ((community == null) if ((community == null)
|| (community.getType() != Constants.COMMUNITY)) { || (community.getType() != Constants.COMMUNITY))
{
community = null; community = null;
} }
} else { }
community = communityService.find(c, UUID.fromString(communityID)); else
{
community = Community.find(c, Integer.parseInt(communityID));
} }
return community; return community;

View File

@@ -7,25 +7,21 @@
*/ */
package org.dspace.administer; package org.dspace.administer;
import java.io.Console; import java.io.BufferedReader;
import java.util.Arrays; import java.io.InputStreamReader;
import java.util.Locale; import java.util.Locale;
import org.apache.commons.cli.CommandLine; import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.CommandLineParser; import org.apache.commons.cli.CommandLineParser;
import org.apache.commons.cli.DefaultParser;
import org.apache.commons.cli.HelpFormatter;
import org.apache.commons.cli.Options; import org.apache.commons.cli.Options;
import org.apache.commons.lang3.StringUtils; import org.apache.commons.cli.PosixParser;
import org.apache.commons.lang.StringUtils;
import org.dspace.core.ConfigurationManager;
import org.dspace.core.Context; import org.dspace.core.Context;
import org.dspace.core.I18nUtil; import org.dspace.core.I18nUtil;
import org.dspace.eperson.EPerson; import org.dspace.eperson.EPerson;
import org.dspace.eperson.Group; import org.dspace.eperson.Group;
import org.dspace.eperson.factory.EPersonServiceFactory;
import org.dspace.eperson.service.EPersonService;
import org.dspace.eperson.service.GroupService;
import org.dspace.services.ConfigurationService;
import org.dspace.services.factory.DSpaceServicesFactory;
/** /**
* A command-line tool for creating an initial administrator for setting up a * A command-line tool for creating an initial administrator for setting up a
@@ -35,266 +31,238 @@ import org.dspace.services.factory.DSpaceServicesFactory;
* <P> * <P>
* Alternatively, it can be used to take the email, first name, last name and * Alternatively, it can be used to take the email, first name, last name and
* desired password as arguments thus: * desired password as arguments thus:
* *
* CreateAdministrator -e [email] -f [first name] -l [last name] -p [password] * CreateAdministrator -e [email] -f [first name] -l [last name] -p [password]
* *
* This is particularly convenient for automated deploy scripts that require an * This is particularly convenient for automated deploy scripts that require an
* initial administrator, for example, before deployment can be completed * initial administrator, for example, before deployment can be completed
* *
* @author Robert Tansley * @author Robert Tansley
* @author Richard Jones * @author Richard Jones
*
* @version $Revision$ * @version $Revision$
*/ */
public final class CreateAdministrator { public final class CreateAdministrator
{
/** DSpace Context object */
private Context context;
/** /**
* DSpace Context object * For invoking via the command line. If called with no command line arguments,
*/
private final Context context;
protected EPersonService ePersonService;
protected GroupService groupService;
/**
* For invoking via the command line. If called with no command line arguments,
* it will negotiate with the user for the administrator details * it will negotiate with the user for the administrator details
* *
* @param argv the command line arguments given * @param argv
* @throws Exception if error * command-line arguments
*/ */
public static void main(String[] argv) public static void main(String[] argv)
throws Exception { throws Exception
CommandLineParser parser = new DefaultParser(); {
Options options = new Options(); CommandLineParser parser = new PosixParser();
Options options = new Options();
CreateAdministrator ca = new CreateAdministrator();
CreateAdministrator ca = new CreateAdministrator();
options.addOption("e", "email", true, "administrator email address");
options.addOption("f", "first", true, "administrator first name"); options.addOption("e", "email", true, "administrator email address");
options.addOption("h", "help", false, "explain create-administrator options"); options.addOption("f", "first", true, "administrator first name");
options.addOption("l", "last", true, "administrator last name"); options.addOption("l", "last", true, "administrator last name");
options.addOption("c", "language", true, "administrator language"); options.addOption("c", "language", true, "administrator language");
options.addOption("p", "password", true, "administrator password"); options.addOption("p", "password", true, "administrator password");
CommandLine line = null; CommandLine line = parser.parse(options, argv);
try { if (line.hasOption("e") && line.hasOption("f") && line.hasOption("l") &&
line.hasOption("c") && line.hasOption("p"))
line = parser.parse(options, argv); {
ca.createAdministrator(line.getOptionValue("e"),
} catch (Exception e) { line.getOptionValue("f"), line.getOptionValue("l"),
line.getOptionValue("c"), line.getOptionValue("p"));
System.out.println(e.getMessage() + "\nTry \"dspace create-administrator -h\" to print help information."); }
System.exit(1); else
{
} ca.negotiateAdministratorDetails();
}
if (line.hasOption("e") && line.hasOption("f") && line.hasOption("l") &&
line.hasOption("c") && line.hasOption("p")) {
ca.createAdministrator(line.getOptionValue("e"),
line.getOptionValue("f"), line.getOptionValue("l"),
line.getOptionValue("c"), line.getOptionValue("p"));
} else if (line.hasOption("h")) {
String header = "\nA command-line tool for creating an initial administrator for setting up a" +
" DSpace site. Unless all the required parameters are passed it will" +
" prompt for an e-mail address, last name, first name and password from" +
" standard input.. An administrator group is then created and the data passed" +
" in used to create an e-person in that group.\n\n";
String footer = "\n";
HelpFormatter formatter = new HelpFormatter();
formatter.printHelp("dspace create-administrator", header, options, footer, true);
return;
} else {
ca.negotiateAdministratorDetails(line);
}
} }
/** /**
* constructor, which just creates and object with a ready context * constructor, which just creates and object with a ready context
* *
* @throws Exception if error * @throws Exception
*/ */
protected CreateAdministrator() private CreateAdministrator()
throws Exception { throws Exception
context = new Context(); {
try { context = new Context();
context.getDBConfig();
} catch (NullPointerException npr) {
// if database is null, there is no point in continuing. Prior to this exception and catch,
// NullPointerException was thrown, that wasn't very helpful.
throw new IllegalStateException("Problem connecting to database. This " +
"indicates issue with either network or version (or possibly some other). " +
"If you are running this in docker-compose, please make sure dspace-cli was " +
"built from the same sources as running dspace container AND that they are in " +
"the same project/network.");
}
groupService = EPersonServiceFactory.getInstance().getGroupService();
ePersonService = EPersonServiceFactory.getInstance().getEPersonService();
} }
/** /**
* Method which will negotiate with the user via the command line to * Method which will negotiate with the user via the command line to
* obtain the administrator's details * obtain the administrator's details
* *
* @throws Exception if error * @throws Exception
*/ */
protected void negotiateAdministratorDetails(CommandLine line) private void negotiateAdministratorDetails()
throws Exception { throws Exception
Console console = System.console(); {
// For easier reading of typing
System.out.println("Creating an initial administrator account"); BufferedReader input = new BufferedReader(new InputStreamReader(System.in));
String email = line.getOptionValue('e'); System.out.println("Creating an initial administrator account");
String firstName = line.getOptionValue('f');
String lastName = line.getOptionValue('l'); boolean dataOK = false;
String language = I18nUtil.getDefaultLocale().getLanguage();
ConfigurationService cfg = DSpaceServicesFactory.getInstance().getConfigurationService(); String email = null;
boolean flag = line.hasOption('p'); String firstName = null;
char[] password = null; String lastName = null;
boolean dataOK = line.hasOption('f') && line.hasOption('e') && line.hasOption('l'); String password1 = null;
String password2 = null;
while (!dataOK) { String language = I18nUtil.DEFAULTLOCALE.getLanguage();
System.out.print("E-mail address: ");
System.out.flush(); while (!dataOK)
{
email = console.readLine(); System.out.print("E-mail address: ");
if (!StringUtils.isBlank(email)) { System.out.flush();
email = input.readLine();
if (email != null)
{
email = email.trim(); email = email.trim();
} else {
System.out.println("Please provide an email address.");
continue;
} }
System.out.print("First name: ");
System.out.flush();
firstName = input.readLine();
System.out.print("First name: "); if (firstName != null)
System.out.flush(); {
firstName = console.readLine();
if (firstName != null) {
firstName = firstName.trim(); firstName = firstName.trim();
} }
System.out.print("Last name: ");
System.out.flush();
lastName = input.readLine();
System.out.print("Last name: "); if (lastName != null)
System.out.flush(); {
lastName = console.readLine();
if (lastName != null) {
lastName = lastName.trim(); lastName = lastName.trim();
} }
if (cfg.hasProperty("webui.supported.locales")) {
System.out.println("Select one of the following languages: " if (ConfigurationManager.getProperty("webui.supported.locales") != null)
+ cfg.getProperty("webui.supported.locales")); {
System.out.println("Select one of the following languages: " + ConfigurationManager.getProperty("webui.supported.locales"));
System.out.print("Language: "); System.out.print("Language: ");
System.out.flush(); System.out.flush();
language = input.readLine();
language = console.readLine(); if (language != null)
{
if (language != null) {
language = language.trim(); language = language.trim();
language = I18nUtil.getSupportedLocale(new Locale(language)).getLanguage(); language = I18nUtil.getSupportedLocale(new Locale(language)).getLanguage();
} }
} }
System.out.println("WARNING: Password will appear on-screen.");
System.out.print("Password: ");
System.out.flush();
password1 = input.readLine();
System.out.print("Is the above data correct? (y or n): "); if (password1 != null)
System.out.flush(); {
password1 = password1.trim();
}
System.out.print("Again to confirm: ");
System.out.flush();
password2 = input.readLine();
String s = console.readLine(); if (password2 != null)
{
password2 = password2.trim();
}
if (!StringUtils.isEmpty(password1) && StringUtils.equals(password1, password2))
{
// password OK
System.out.print("Is the above data correct? (y or n): ");
System.out.flush();
String s = input.readLine();
if (s != null) { if (s != null)
s = s.trim(); {
if (s.toLowerCase().startsWith("y")) { s = s.trim();
dataOK = true; if (s.toLowerCase().startsWith("y"))
{
dataOK = true;
}
} }
} }
else
} {
if (!flag) { System.out.println("Passwords don't match");
password = getPassword(console); }
if (password == null) { }
return;
} // if we make it to here, we are ready to create an administrator
} else { createAdministrator(email, firstName, lastName, language, password1);
password = line.getOptionValue("p").toCharArray();
}
// if we make it to here, we are ready to create an administrator
createAdministrator(email, firstName, lastName, language, String.valueOf(password));
} }
private char[] getPassword(Console console) {
char[] password1 = null;
char[] password2 = null;
System.out.println("Password will not display on screen.");
System.out.print("Password: ");
System.out.flush();
password1 = console.readPassword();
System.out.print("Again to confirm: ");
System.out.flush();
password2 = console.readPassword();
// TODO real password validation
if (password1.length > 1 && Arrays.equals(password1, password2)) {
// password OK
Arrays.fill(password2, ' ');
return password1;
} else {
System.out.println("Passwords don't match");
return null;
}
}
/** /**
* Create the administrator with the given details. If the user * Create the administrator with the given details. If the user
* already exists then they are simply upped to administrator status * already exists then they are simply upped to administrator status
* *
* @param email the email for the user * @param email the email for the user
* @param first user's first name * @param first user's first name
* @param last user's last name * @param last user's last name
* @param language preferred language * @param ps desired password
* @param pw desired password *
* @throws Exception if error * @throws Exception
*/ */
protected void createAdministrator(String email, String first, String last, private void createAdministrator(String email, String first, String last,
String language, String pw) String language, String pw)
throws Exception { throws Exception
// Of course we aren't an administrator yet so we need to {
// circumvent authorisation // Of course we aren't an administrator yet so we need to
context.turnOffAuthorisationSystem(); // circumvent authorisation
context.setIgnoreAuthorization(true);
// Find administrator group
Group admins = groupService.findByName(context, Group.ADMIN); // Find administrator group
Group admins = Group.find(context, 1);
if (admins == null) {
throw new IllegalStateException("Error, no admin group (group 1) found"); if (admins == null)
} {
throw new IllegalStateException("Error, no admin group (group 1) found");
// Create the administrator e-person }
EPerson eperson = ePersonService.findByEmail(context, email);
// Create the administrator e-person
EPerson eperson = EPerson.findByEmail(context,email);
// check if the email belongs to a registered user, // check if the email belongs to a registered user,
// if not create a new user with this email // if not create a new user with this email
if (eperson == null) { if (eperson == null)
eperson = ePersonService.create(context); {
eperson = EPerson.create(context);
eperson.setEmail(email); eperson.setEmail(email);
eperson.setCanLogIn(true); eperson.setCanLogIn(true);
eperson.setRequireCertificate(false); eperson.setRequireCertificate(false);
eperson.setSelfRegistered(false); eperson.setSelfRegistered(false);
} }
eperson.setLastName(context, last); eperson.setLastName(last);
eperson.setFirstName(context, first); eperson.setFirstName(first);
eperson.setLanguage(context, language); eperson.setLanguage(language);
ePersonService.setPassword(eperson, pw); eperson.setPassword(pw);
ePersonService.update(context, eperson); eperson.update();
groupService.addMember(context, admins, eperson); admins.addMember(eperson);
groupService.update(context, admins); admins.update();
context.complete(); context.complete();
System.out.println("Administrator account created"); System.out.println("Administrator account created");
} }
} }

View File

@@ -0,0 +1,287 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.administer;
import java.io.IOException;
import java.sql.SQLException;
import org.dspace.authorize.AuthorizeException;
import org.dspace.content.MetadataField;
import org.dspace.content.MetadataSchema;
import org.dspace.content.NonUniqueMetadataException;
import org.dspace.core.Context;
/**
* Class representing a particular Dublin Core metadata type, with various
* utility methods. In general, only used for manipulating the registry of
* Dublin Core types in the system, so most users will not need this.
*
* <p>
* The DCType implementation has been deprecated, please use MetadataManager,
* MetadataSchema and MetadataField instead. For backward compatibility the this
* implementation has been updated to transparently call the new classes.
* </p>
*
* @author Robert Tansley
* @author Martin Hald
* @version $Revision$
* @deprecated
*/
public class DCType
{
/** Our context */
private Context ourContext;
/** The matching metadata field */
private MetadataField field = new MetadataField();
/**
* Create a DCType from an existing metadata field.
*
* @param context
* @param field
* @deprecated
*/
public DCType(Context context, MetadataField field)
{
this.ourContext = context;
this.field = field;
}
/**
* Default constructor.
*
* @param context
* @deprecated
*/
public DCType(Context context)
{
this.ourContext = context;
}
/**
* Utility method for quick access to an element and qualifier given the
* type ID.
*
* @param context
* context, in case DC types need to be read in from DB
* @param id
* the DC type ID
* @return a two-String array, string 0 is the element, string 1 is the
* qualifier
* @deprecated
*/
public static String[] quickFind(Context context, int id)
throws SQLException
{
MetadataField field = MetadataField.find(context, id);
String[] result = new String[2];
if (field == null)
{
return result;
}
else
{
result[0] = field.getElement();
result[1] = field.getQualifier();
return result;
}
}
/**
* Get a metadata field from the database.
*
* @param context
* DSpace context object
* @param id
* ID of the dublin core type
*
* @return the metadata field, or null if the ID is invalid.
* @deprecated
*/
public static DCType find(Context context, int id) throws SQLException
{
MetadataField field = MetadataField.find(context, id);
return new DCType(context, field);
}
/**
* Find a given Dublin Core type. Returns <code>null</code> if the Dublin
* Core type doesn't exist.
*
* @param context
* the DSpace context to use
* @param element
* the element to find
* @param qualifier
* the qualifier, or <code>null</code> to find an unqualified
* type
*
* @return the Dublin Core type, or <code>null</code> if there isn't a
* corresponding type in the registry
* @throws AuthorizeException
* @deprecated
*/
public static DCType findByElement(Context context, String element,
String qualifier) throws SQLException, AuthorizeException
{
MetadataField field = MetadataField.findByElement(context,
MetadataSchema.DC_SCHEMA_ID, element, qualifier);
if (field == null)
{
return null;
}
else
{
return new DCType(context, field);
}
}
/**
* Retrieve all Dublin Core types from the registry
*
* @return an array of all the Dublin Core types
* @deprecated
*/
public static DCType[] findAll(Context context) throws SQLException
{
MetadataField field[] = MetadataField.findAll(context);
DCType[] typeArray = new DCType[field.length];
for (int ii = 0; ii < field.length; ii++)
{
typeArray[ii] = new DCType(context, field[ii]);
}
// Return the array
return typeArray;
}
/**
* Create a new Dublin Core type
*
* @param context
* DSpace context object
* @return the newly created DCType
* @throws NonUniqueMetadataException
* @throws IOException
* @deprecated
*/
public static DCType create(Context context) throws SQLException,
AuthorizeException, IOException, NonUniqueMetadataException
{
MetadataField field = new MetadataField();
field.setSchemaID(MetadataSchema.DC_SCHEMA_ID);
field.create(context);
return new DCType(context, field);
}
/**
* Delete this DC type. This won't work if there are any DC values in the
* database of this type - they need to be updated first. An
* <code>SQLException</code> (referential integrity violation) will be
* thrown in this case.
* @deprecated
*/
public void delete() throws SQLException, AuthorizeException
{
field.delete(ourContext);
}
/**
* Get the internal identifier of this metadata field
*
* @return the internal identifier
*/
public int getID()
{
return field.getFieldID();
}
/**
* Get the DC element
*
* @return the element
*/
public String getElement()
{
return field.getElement();
}
/**
* Set the DC element
*
* @param s
* the new element
*/
public void setElement(String s)
{
field.setElement(s);
}
/**
* Get the DC qualifier, if any.
*
* @return the DC qualifier, or <code>null</code> if this is an
* unqualified element
*/
public String getQualifier()
{
return field.getQualifier();
}
/**
* Set the DC qualifier
*
* @param s
* the DC qualifier, or <code>null</code> if this is an
* unqualified element
*/
public void setQualifier(String s)
{
field.setQualifier(s);
}
/**
* Get the scope note - information about the DC type and its use
*
* @return the scope note
*/
public String getScopeNote()
{
return field.getScopeNote();
}
/**
* Set the scope note
*
* @param s
* the new scope note
*/
public void setScopeNote(String s)
{
field.setScopeNote(s);
}
/**
* Update the dublin core registry
*
* @throws IOException
* @throws NonUniqueMetadataException
* @deprecated
*/
public void update() throws SQLException, AuthorizeException,
NonUniqueMetadataException, IOException
{
field.update(ourContext);
}
}

View File

@@ -10,250 +10,228 @@ package org.dspace.administer;
import java.io.BufferedWriter; import java.io.BufferedWriter;
import java.io.FileWriter; import java.io.FileWriter;
import java.io.IOException; import java.io.IOException;
import java.io.Writer;
import java.sql.SQLException; import java.sql.SQLException;
import java.util.HashMap; import java.util.HashMap;
import java.util.List;
import java.util.Map; import java.util.Map;
import org.apache.commons.cli.CommandLine; import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.CommandLineParser; import org.apache.commons.cli.CommandLineParser;
import org.apache.commons.cli.DefaultParser;
import org.apache.commons.cli.Options; import org.apache.commons.cli.Options;
import org.apache.commons.cli.ParseException; import org.apache.commons.cli.ParseException;
import org.apache.commons.cli.PosixParser;
import org.apache.xml.serialize.Method;
import org.apache.xml.serialize.OutputFormat;
import org.apache.xml.serialize.XMLSerializer;
import org.dspace.content.MetadataField; import org.dspace.content.MetadataField;
import org.dspace.content.MetadataSchema; import org.dspace.content.MetadataSchema;
import org.dspace.content.factory.ContentServiceFactory;
import org.dspace.content.service.MetadataFieldService;
import org.dspace.content.service.MetadataSchemaService;
import org.dspace.core.Context; import org.dspace.core.Context;
import org.w3c.dom.DOMConfiguration; import org.xml.sax.SAXException;
import org.w3c.dom.Document;
import org.w3c.dom.Element;
import org.w3c.dom.bootstrap.DOMImplementationRegistry;
import org.w3c.dom.ls.DOMImplementationLS;
import org.w3c.dom.ls.LSOutput;
import org.w3c.dom.ls.LSSerializer;
/** /**
* @author Graham Triggs * @author Graham Triggs
* *
* This class creates an XML document as passed in the arguments and * This class creates an xml document as passed in the arguments and
* from the metadata schemas for the repository. * from the metadata schemas for the repository.
* *
* The form of the XML is as follows * The form of the XML is as follows
* {@code *
* <metadata-schemas> * <metadata-schemas>
* <schema> * <schema>
* <name>dc</name> * <name>dc</name>
* <namespace>http://dublincore.org/documents/dcmi-terms/</namespace> * <namespace>http://dublincore.org/documents/dcmi-terms/</namespace>
* </schema> * </schema>
* </metadata-schemas> * </metadata-schemas>
* }
*/ */
public class MetadataExporter { public class MetadataExporter
{
protected static MetadataSchemaService metadataSchemaService = ContentServiceFactory.getInstance()
.getMetadataSchemaService();
protected static MetadataFieldService metadataFieldService = ContentServiceFactory.getInstance()
.getMetadataFieldService();
/** /**
* Default constructor * @param args
* @throws ParseException
* @throws SAXException
* @throws IOException
* @throws SQLException
* @throws RegistryExportException
*/ */
private MetadataExporter() { } public static void main(String[] args) throws ParseException, SQLException, IOException, SAXException, RegistryExportException
{
/**
* @param args command line arguments
* @throws ParseException if parser error
* @throws IOException if IO error
* @throws SQLException if database error
* @throws RegistryExportException if export error
* @throws ClassNotFoundException if no suitable DOM implementation
* @throws InstantiationException if no suitable DOM implementation
* @throws IllegalAccessException if no suitable DOM implementation
*/
public static void main(String[] args)
throws ParseException, SQLException, IOException, RegistryExportException,
ClassNotFoundException, InstantiationException, IllegalAccessException {
// create an options object and populate it // create an options object and populate it
CommandLineParser parser = new DefaultParser(); CommandLineParser parser = new PosixParser();
Options options = new Options(); Options options = new Options();
options.addOption("f", "file", true, "output xml file for registry"); options.addOption("f", "file", true, "output xml file for registry");
options.addOption("s", "schema", true, "the name of the schema to export"); options.addOption("s", "schema", true, "the name of the schema to export");
CommandLine line = parser.parse(options, args); CommandLine line = parser.parse(options, args);
String file = null; String file = null;
String schema = null; String schema = null;
if (line.hasOption('f')) { if (line.hasOption('f'))
file = line.getOptionValue('f'); {
} else { file = line.getOptionValue('f');
}
else
{
usage(); usage();
System.exit(0); System.exit(0);
} }
if (line.hasOption('s')) { if (line.hasOption('s'))
{
schema = line.getOptionValue('s'); schema = line.getOptionValue('s');
} }
saveRegistry(file, schema); saveRegistry(file, schema);
} }
/** public static void saveRegistry(String file, String schema) throws SQLException, IOException, SAXException, RegistryExportException
* Save a registry to a file path {
*
* @param file file path
* @param schema schema definition to save
* @throws SQLException if database error
* @throws IOException if IO error
* @throws RegistryExportException if export error
* @throws ClassNotFoundException if no suitable DOM implementation
* @throws InstantiationException if no suitable DOM implementation
* @throws IllegalAccessException if no suitable DOM implementation
*/
public static void saveRegistry(String file, String schema)
throws SQLException, IOException, RegistryExportException,
ClassNotFoundException, InstantiationException, IllegalAccessException {
// create a context // create a context
Context context = new Context(); Context context = new Context();
context.turnOffAuthorisationSystem(); context.setIgnoreAuthorization(true);
// Initialize an XML document.
Document document = DOMImplementationRegistry.newInstance()
.getDOMImplementation("XML 3.0")
.createDocument(null, "dspace-dc-types", null);
OutputFormat xmlFormat = new OutputFormat(Method.XML, "UTF-8", true);
xmlFormat.setLineWidth(120);
xmlFormat.setIndent(4);
XMLSerializer xmlSerializer = new XMLSerializer(new BufferedWriter(new FileWriter(file)), xmlFormat);
// XMLSerializer xmlSerializer = new XMLSerializer(System.out, xmlFormat);
xmlSerializer.startDocument();
xmlSerializer.startElement("dspace-dc-types", null);
// Save the schema definition(s) // Save the schema definition(s)
saveSchema(context, document, schema); saveSchema(context, xmlSerializer, schema);
List<MetadataField> mdFields = null; MetadataField[] mdFields = null;
// If a single schema has been specified // If a single schema has been specified
if (schema != null && !"".equals(schema)) { if (schema != null && !"".equals(schema))
{
// Get the id of that schema // Get the id of that schema
MetadataSchema mdSchema = metadataSchemaService.find(context, schema); MetadataSchema mdSchema = MetadataSchema.find(context, schema);
if (mdSchema == null) { if (mdSchema == null)
{
throw new RegistryExportException("no schema to export"); throw new RegistryExportException("no schema to export");
} }
// Get the metadata fields only for the specified schema // Get the metadata fields only for the specified schema
mdFields = metadataFieldService.findAllInSchema(context, mdSchema); mdFields = MetadataField.findAllInSchema(context, mdSchema.getSchemaID());
} else { }
else
{
// Get the metadata fields for all the schemas // Get the metadata fields for all the schemas
mdFields = metadataFieldService.findAll(context); mdFields = MetadataField.findAll(context);
} }
// Compose the metadata fields // Output the metadata fields
for (MetadataField mdField : mdFields) { for (MetadataField mdField : mdFields)
saveType(context, document, mdField); {
saveType(context, xmlSerializer, mdField);
} }
// Serialize the completed document to the output file. xmlSerializer.endElement("dspace-dc-types");
try (Writer writer = new BufferedWriter(new FileWriter(file))) { xmlSerializer.endDocument();
DOMImplementationLS lsImplementation
= (DOMImplementationLS) DOMImplementationRegistry.newInstance()
.getDOMImplementation("LS");
LSSerializer serializer = lsImplementation.createLSSerializer();
DOMConfiguration configuration = serializer.getDomConfig();
configuration.setParameter("format-pretty-print", true);
LSOutput lsOutput = lsImplementation.createLSOutput();
lsOutput.setEncoding("UTF-8");
lsOutput.setCharacterStream(writer);
serializer.write(document, lsOutput);
}
// abort the context, as we shouldn't have changed it!! // abort the context, as we shouldn't have changed it!!
context.abort(); context.abort();
} }
/** /**
* Compose the schema registry. If the parameter 'schema' is null or empty, save all schemas. * Serialize the schema registry. If the parameter 'schema' is null or empty, save all schemas
* * @param context
* @param context DSpace Context * @param xmlSerializer
* @param document the document being built * @param schema
* @param schema schema (may be null to save all) * @throws SQLException
* @throws SQLException if database error * @throws SAXException
* @throws RegistryExportException if export error * @throws RegistryExportException
*/ */
public static void saveSchema(Context context, Document document, String schema) public static void saveSchema(Context context, XMLSerializer xmlSerializer, String schema) throws SQLException, SAXException, RegistryExportException
throws SQLException, RegistryExportException { {
if (schema != null && !"".equals(schema)) { if (schema != null && !"".equals(schema))
{
// Find a single named schema // Find a single named schema
MetadataSchema mdSchema = metadataSchemaService.find(context, schema); MetadataSchema mdSchema = MetadataSchema.find(context, schema);
saveSchema(document, mdSchema); saveSchema(xmlSerializer, mdSchema);
} else { }
else
{
// Find all schemas // Find all schemas
List<MetadataSchema> mdSchemas = metadataSchemaService.findAll(context); MetadataSchema[] mdSchemas = MetadataSchema.findAll(context);
for (MetadataSchema mdSchema : mdSchemas) { for (MetadataSchema mdSchema : mdSchemas)
saveSchema(document, mdSchema); {
saveSchema(xmlSerializer, mdSchema);
} }
} }
} }
/** /**
* Compose a single schema (namespace) registry entry * Serialize a single schema (namespace) registry entry
* *
* @param document the output document being built. * @param xmlSerializer
* @param mdSchema DSpace metadata schema * @param mdSchema
* @throws RegistryExportException if export error * @throws SAXException
* @throws RegistryExportException
*/ */
private static void saveSchema(Document document, MetadataSchema mdSchema) private static void saveSchema(XMLSerializer xmlSerializer, MetadataSchema mdSchema) throws SAXException, RegistryExportException
throws RegistryExportException { {
// If we haven't got a schema, it's an error // If we haven't got a schema, it's an error
if (mdSchema == null) { if (mdSchema == null)
{
throw new RegistryExportException("no schema to export"); throw new RegistryExportException("no schema to export");
} }
String name = mdSchema.getName(); String name = mdSchema.getName();
String namespace = mdSchema.getNamespace(); String namespace = mdSchema.getNamespace();
if (name == null || "".equals(name)) { if (name == null || "".equals(name))
{
System.out.println("name is null, skipping"); System.out.println("name is null, skipping");
return; return;
} }
if (namespace == null || "".equals(namespace)) { if (namespace == null || "".equals(namespace))
{
System.out.println("namespace is null, skipping"); System.out.println("namespace is null, skipping");
return; return;
} }
Element document_element = document.getDocumentElement(); // Output the parent tag
xmlSerializer.startElement("dc-schema", null);
// Output the schema name
xmlSerializer.startElement("name", null);
xmlSerializer.characters(name.toCharArray(), 0, name.length());
xmlSerializer.endElement("name");
// Compose the parent tag // Output the schema namespace
Element schema_element = document.createElement("dc-schema"); xmlSerializer.startElement("namespace", null);
document_element.appendChild(schema_element); xmlSerializer.characters(namespace.toCharArray(), 0, namespace.length());
xmlSerializer.endElement("namespace");
// Compose the schema name xmlSerializer.endElement("dc-schema");
Element name_element = document.createElement("name");
schema_element.appendChild(name_element);
name_element.setTextContent(name);
// Compose the schema namespace
Element namespace_element = document.createElement("namespace");
schema_element.appendChild(namespace_element);
namespace_element.setTextContent(namespace);
} }
/** /**
* Compose a single metadata field registry entry to XML. * Serialize a single metadata field registry entry to xml
* *
* @param context DSpace context * @param context
* @param document the output document being built. * @param xmlSerializer
* @param mdField DSpace metadata field * @param mdField
* @throws RegistryExportException if export error * @throws SAXException
* @throws SQLException if database error * @throws RegistryExportException
* @throws SQLException
* @throws IOException
*/ */
private static void saveType(Context context, Document document, MetadataField mdField) private static void saveType(Context context, XMLSerializer xmlSerializer, MetadataField mdField) throws SAXException, RegistryExportException, SQLException, IOException
throws RegistryExportException, SQLException { {
// If we haven't been given a field, it's an error // If we haven't been given a field, it's an error
if (mdField == null) { if (mdField == null)
{
throw new RegistryExportException("no field to export"); throw new RegistryExportException("no field to export");
} }
// Get the data from the metadata field // Get the data from the metadata field
String schemaName = getSchemaName(context, mdField); String schemaName = getSchemaName(context, mdField);
String element = mdField.getElement(); String element = mdField.getElement();
@@ -261,83 +239,87 @@ public class MetadataExporter {
String scopeNote = mdField.getScopeNote(); String scopeNote = mdField.getScopeNote();
// We must have a schema and element // We must have a schema and element
if (schemaName == null || element == null) { if (schemaName == null || element == null)
{
throw new RegistryExportException("incomplete field information"); throw new RegistryExportException("incomplete field information");
} }
Element document_element = document.getDocumentElement(); // Output the parent tag
xmlSerializer.startElement("dc-type", null);
// Compose the parent tag // Output the schema name
Element dc_type = document.createElement("dc-type"); xmlSerializer.startElement("schema", null);
document_element.appendChild(dc_type); xmlSerializer.characters(schemaName.toCharArray(), 0, schemaName.length());
xmlSerializer.endElement("schema");
// Compose the schema name // Output the element
Element schema_element = document.createElement("schema"); xmlSerializer.startElement("element", null);
dc_type.appendChild(schema_element); xmlSerializer.characters(element.toCharArray(), 0, element.length());
schema_element.setTextContent(schemaName); xmlSerializer.endElement("element");
// Compose the element // Output the qualifier, if present
Element element_element = document.createElement("element"); if (qualifier != null)
dc_type.appendChild(element_element); {
element_element.setTextContent(element); xmlSerializer.startElement("qualifier", null);
xmlSerializer.characters(qualifier.toCharArray(), 0, qualifier.length());
// Compose the qualifier, if present xmlSerializer.endElement("qualifier");
if (qualifier != null) {
Element qualifier_element = document.createElement("qualifier");
dc_type.appendChild(qualifier_element);
qualifier_element.setTextContent(qualifier);
} else {
dc_type.appendChild(document.createComment("unqualified"));
} }
else
// Compose the scope note, if present {
if (scopeNote != null) { xmlSerializer.comment("unqualified");
Element scope_element = document.createElement("scope_note");
dc_type.appendChild(scope_element);
scope_element.setTextContent(scopeNote);
} else {
dc_type.appendChild(document.createComment("no scope note"));
} }
// Output the scope note, if present
if (scopeNote != null)
{
xmlSerializer.startElement("scope_note", null);
xmlSerializer.characters(scopeNote.toCharArray(), 0, scopeNote.length());
xmlSerializer.endElement("scope_note");
}
else
{
xmlSerializer.comment("no scope note");
}
xmlSerializer.endElement("dc-type");
} }
static Map<Integer, String> schemaMap = new HashMap<Integer, String>();
/** /**
* Helper method to retrieve a schema name for the field. * Helper method to retrieve a schema name for the field.
* Caches the name after looking up the id. * Caches the name after looking up the id.
*
* @param context DSpace Context
* @param mdField DSpace metadata field
* @return name of schema
* @throws SQLException if database error
* @throws RegistryExportException if export error
*/ */
private static String getSchemaName(Context context, MetadataField mdField) static Map<Integer, String> schemaMap = new HashMap<Integer, String>();
throws SQLException, RegistryExportException { private static String getSchemaName(Context context, MetadataField mdField) throws SQLException, RegistryExportException
{
// Get name from cache // Get name from cache
String name = schemaMap.get(mdField.getMetadataSchema().getID()); String name = schemaMap.get(Integer.valueOf(mdField.getSchemaID()));
if (name == null) { if (name == null)
{
// Name not retrieved before, so get the schema now // Name not retrieved before, so get the schema now
MetadataSchema mdSchema = metadataSchemaService.find(context, mdField.getMetadataSchema().getID()); MetadataSchema mdSchema = MetadataSchema.find(context, mdField.getSchemaID());
if (mdSchema != null) { if (mdSchema != null)
{
name = mdSchema.getName(); name = mdSchema.getName();
schemaMap.put(mdSchema.getID(), name); schemaMap.put(Integer.valueOf(mdSchema.getSchemaID()), name);
} else { }
else
{
// Can't find the schema // Can't find the schema
throw new RegistryExportException("Can't get schema name for field"); throw new RegistryExportException("Can't get schema name for field");
} }
} }
return name; return name;
} }
/** /**
* Print the usage message to standard output * Print the usage message to stdout
*/ */
public static void usage() { public static void usage()
{
String usage = "Use this class with the following options:\n" + String usage = "Use this class with the following options:\n" +
" -f <xml output file> : specify the output file for the schemas\n" + " -f <xml output file> : specify the output file for the schemas\n" +
" -s <schema> : name of the schema to export\n"; " -s <schema> : name of the schema to export\n";
System.out.println(usage); System.out.println(usage);
} }
} }

View File

@@ -9,243 +9,210 @@ package org.dspace.administer;
import java.io.IOException; import java.io.IOException;
import java.sql.SQLException; import java.sql.SQLException;
import javax.xml.parsers.ParserConfigurationException; import javax.xml.parsers.ParserConfigurationException;
import javax.xml.transform.TransformerException; import javax.xml.transform.TransformerException;
import javax.xml.xpath.XPath;
import javax.xml.xpath.XPathConstants;
import javax.xml.xpath.XPathExpressionException;
import javax.xml.xpath.XPathFactory;
import org.apache.commons.cli.CommandLine; import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.CommandLineParser; import org.apache.commons.cli.CommandLineParser;
import org.apache.commons.cli.DefaultParser;
import org.apache.commons.cli.Options; import org.apache.commons.cli.Options;
import org.apache.commons.cli.ParseException; import org.apache.commons.cli.ParseException;
import org.apache.logging.log4j.LogManager; import org.apache.commons.cli.PosixParser;
import org.apache.logging.log4j.Logger;
import org.apache.xpath.XPathAPI;
import org.dspace.authorize.AuthorizeException; import org.dspace.authorize.AuthorizeException;
import org.dspace.content.MetadataField; import org.dspace.content.MetadataField;
import org.dspace.content.MetadataSchema; import org.dspace.content.MetadataSchema;
import org.dspace.content.MetadataSchemaEnum;
import org.dspace.content.NonUniqueMetadataException; import org.dspace.content.NonUniqueMetadataException;
import org.dspace.content.factory.ContentServiceFactory;
import org.dspace.content.service.MetadataFieldService;
import org.dspace.content.service.MetadataSchemaService;
import org.dspace.core.Context; import org.dspace.core.Context;
import org.w3c.dom.Document; import org.w3c.dom.Document;
import org.w3c.dom.Node; import org.w3c.dom.Node;
import org.w3c.dom.NodeList; import org.w3c.dom.NodeList;
import org.xml.sax.SAXException; import org.xml.sax.SAXException;
/** /**
* @author Richard Jones * @author Richard Jones
* *
* This class takes an XML document as passed in the arguments and * This class takes an xml document as passed in the arguments and
* uses it to create metadata elements in the Metadata Registry if * uses it to create metadata elements in the Metadata Registry if
* they do not already exist. * they do not already exist
* *
* The format of the XML file is as follows: * The format of the XML file is as follows:
* *
* {@code
* <dspace-dc-types> * <dspace-dc-types>
* <dc-type> * <dc-type>
* <schema>icadmin</schema> * <schema>icadmin</schema>
* <element>status</element> * <element>status</element>
* <qualifier>dateset</qualifier> * <qualifier>dateset</qualifier>
* <scope_note>the workflow status of an item</scope_note> * <scope_note>the workflow status of an item</scope_note>
* </dc-type> * </dc-type>
* *
* [....] * [....]
* *
* </dspace-dc-types> * </dspace-dc-types>
* }
*/ */
public class MetadataImporter { public class MetadataImporter
protected static MetadataSchemaService metadataSchemaService = ContentServiceFactory.getInstance() {
.getMetadataSchemaService(); /**
protected static MetadataFieldService metadataFieldService = ContentServiceFactory.getInstance() * main method for reading user input from the command line
.getMetadataFieldService(); */
/**
* logging category
*/
private static final Logger log = LogManager.getLogger();
/**
* Default constructor
*/
private MetadataImporter() { }
/**
* main method for reading user input from the command line
*
* @param args the command line arguments given
* @throws ParseException if parse error
* @throws SQLException if database error
* @throws IOException if IO error
* @throws TransformerException if transformer error
* @throws ParserConfigurationException if configuration error
* @throws AuthorizeException if authorization error
* @throws SAXException if parser error
* @throws NonUniqueMetadataException if duplicate metadata
* @throws RegistryImportException if import fails
* @throws XPathExpressionException passed through
**/
public static void main(String[] args) public static void main(String[] args)
throws ParseException, SQLException, IOException, TransformerException, throws ParseException, SQLException, IOException, TransformerException,
ParserConfigurationException, AuthorizeException, SAXException, ParserConfigurationException, AuthorizeException, SAXException,
NonUniqueMetadataException, RegistryImportException, XPathExpressionException { NonUniqueMetadataException, RegistryImportException
{
boolean forceUpdate = false;
// create an options object and populate it // create an options object and populate it
CommandLineParser parser = new DefaultParser(); CommandLineParser parser = new PosixParser();
Options options = new Options(); Options options = new Options();
options.addOption("f", "file", true, "source xml file for DC fields"); options.addOption("f", "file", true, "source xml file for DC fields");
options.addOption("u", "update", false, "update an existing schema"); options.addOption("u", "update", false, "update an existing schema");
CommandLine line = parser.parse(options, args); CommandLine line = parser.parse(options, args);
if (line.hasOption('f')) { String file = null;
String file = line.getOptionValue('f'); if (line.hasOption('f'))
boolean forceUpdate = line.hasOption('u'); {
loadRegistry(file, forceUpdate); file = line.getOptionValue('f');
} else { }
usage(); else
System.exit(1); {
usage();
System.exit(0);
} }
}
forceUpdate = line.hasOption('u');
loadRegistry(file, forceUpdate);
}
/** /**
* Load the data from the specified file path into the database * Load the data from the specified file path into the database
* *
* @param file the file path containing the source data * @param file the file path containing the source data
* @param forceUpdate whether to force update
* @throws SQLException if database error
* @throws IOException if IO error
* @throws TransformerException if transformer error
* @throws ParserConfigurationException if configuration error
* @throws AuthorizeException if authorization error
* @throws SAXException if parser error
* @throws NonUniqueMetadataException if duplicate metadata
* @throws RegistryImportException if import fails
* @throws XPathExpressionException passed through
*/ */
public static void loadRegistry(String file, boolean forceUpdate) public static void loadRegistry(String file, boolean forceUpdate)
throws SQLException, IOException, TransformerException, ParserConfigurationException, AuthorizeException, throws SQLException, IOException, TransformerException, ParserConfigurationException,
SAXException, NonUniqueMetadataException, RegistryImportException, XPathExpressionException { AuthorizeException, SAXException, NonUniqueMetadataException, RegistryImportException
Context context = null; {
// create a context
Context context = new Context();
context.setIgnoreAuthorization(true);
// read the XML
Document document = RegistryImporter.loadXML(file);
try { // Get the nodes corresponding to types
// create a context NodeList schemaNodes = XPathAPI.selectNodeList(document, "/dspace-dc-types/dc-schema");
context = new Context();
context.turnOffAuthorisationSystem(); // Add each one as a new format to the registry
for (int i = 0; i < schemaNodes.getLength(); i++)
// read the XML {
Document document = RegistryImporter.loadXML(file); Node n = schemaNodes.item(i);
loadSchema(context, n, forceUpdate);
// Get the nodes corresponding to types
XPath xPath = XPathFactory.newInstance().newXPath();
NodeList schemaNodes = (NodeList) xPath.compile("/dspace-dc-types/dc-schema")
.evaluate(document, XPathConstants.NODESET);
// Add each one as a new format to the registry
for (int i = 0; i < schemaNodes.getLength(); i++) {
Node n = schemaNodes.item(i);
loadSchema(context, n, forceUpdate);
}
// Get the nodes corresponding to types
NodeList typeNodes = (NodeList) xPath.compile("/dspace-dc-types/dc-type")
.evaluate(document, XPathConstants.NODESET);
// Add each one as a new format to the registry
for (int i = 0; i < typeNodes.getLength(); i++) {
Node n = typeNodes.item(i);
loadType(context, n);
}
context.restoreAuthSystemState();
context.complete();
} finally {
// Clean up our context, if it still exists & it was never completed
if (context != null && context.isValid()) {
context.abort();
}
} }
}
// Get the nodes corresponding to types
NodeList typeNodes = XPathAPI.selectNodeList(document, "/dspace-dc-types/dc-type");
// Add each one as a new format to the registry
for (int i = 0; i < typeNodes.getLength(); i++)
{
Node n = typeNodes.item(i);
loadType(context, n);
}
context.complete();
}
/** /**
* Process a node in the metadata registry XML file. If the * Process a node in the metadata registry XML file. If the
* schema already exists, it will not be recreated * schema already exists, it will not be recreated
* *
* @param context DSpace context object * @param context
* @param node the node in the DOM tree * DSpace context object
* @throws SQLException if database error * @param node
* @throws IOException if IO error * the node in the DOM tree
* @throws TransformerException if transformer error * @throws NonUniqueMetadataException
* @throws AuthorizeException if authorization error
* @throws NonUniqueMetadataException if duplicate metadata
* @throws RegistryImportException if import fails
*/ */
private static void loadSchema(Context context, Node node, boolean updateExisting) private static void loadSchema(Context context, Node node, boolean updateExisting)
throws SQLException, AuthorizeException, NonUniqueMetadataException, RegistryImportException, throws SQLException, IOException, TransformerException,
XPathExpressionException { AuthorizeException, NonUniqueMetadataException, RegistryImportException
{
// Get the values // Get the values
String name = RegistryImporter.getElementData(node, "name"); String name = RegistryImporter.getElementData(node, "name");
String namespace = RegistryImporter.getElementData(node, "namespace"); String namespace = RegistryImporter.getElementData(node, "namespace");
if (name == null || "".equals(name)) { if (name == null || "".equals(name))
{
throw new RegistryImportException("Name of schema must be supplied"); throw new RegistryImportException("Name of schema must be supplied");
} }
if (namespace == null || "".equals(namespace)) { if (namespace == null || "".equals(namespace))
{
throw new RegistryImportException("Namespace of schema must be supplied"); throw new RegistryImportException("Namespace of schema must be supplied");
} }
System.out.print("Registering Schema: " + name + " - " + namespace + " ... ");
// check to see if the schema already exists // check to see if the schema already exists
MetadataSchema s = metadataSchemaService.find(context, name); MetadataSchema s = MetadataSchema.find(context, name);
if (s == null) { if (s == null)
{
// Schema does not exist - create // Schema does not exist - create
log.info("Registering Schema {}({})", name, namespace); MetadataSchema schema = new MetadataSchema(namespace, name);
metadataSchemaService.create(context, name, namespace); schema.create(context);
} else { System.out.println("created");
}
else
{
// Schema exists - if it's the same namespace, allow the type imports to continue // Schema exists - if it's the same namespace, allow the type imports to continue
if (s.getNamespace().equals(namespace)) { if (s.getNamespace().equals(namespace))
// This schema already exists with this namespace, skipping it {
System.out.println("already exists, skipping to type import");
return; return;
} }
// It's a different namespace - have we been told to update? // It's a different namespace - have we been told to update?
if (updateExisting) { if (updateExisting)
{
// Update the existing schema namespace and continue to type import // Update the existing schema namespace and continue to type import
log.info("Updating Schema {}: New namespace {}", name, namespace);
s.setNamespace(namespace); s.setNamespace(namespace);
metadataSchemaService.update(context, s); s.update(context);
} else { System.out.println("namespace updated (" + name + " = " + namespace + ")");
throw new RegistryImportException( }
"Schema " + name + " already registered with different namespace " + namespace + ". Rerun with " + else
"'update' option enabled if you wish to update this schema."); {
// Don't update the existing namespace - abort abort abort
System.out.println("schema exists, but with different namespace");
System.out.println("was: " + s.getNamespace());
System.out.println("xml: " + namespace);
System.out.println("aborting - use -u to force the update");
throw new RegistryImportException("schema already registered with different namespace - use -u to update");
} }
} }
} }
/** /**
* Process a node in the metadata registry XML file. The node must * Process a node in the metadata registry XML file. The node must
* be a "dc-type" node. If the type already exists, then it * be a "dc-type" node. If the type already exists, then it
* will not be re-imported. * will not be reimported
* *
* @param context DSpace context object * @param context
* @param node the node in the DOM tree * DSpace context object
* @throws SQLException if database error * @param node
* @throws IOException if IO error * the node in the DOM tree
* @throws TransformerException if transformer error * @throws NonUniqueMetadataException
* @throws AuthorizeException if authorization error
* @throws NonUniqueMetadataException if duplicate metadata
* @throws RegistryImportException if import fails
*/ */
private static void loadType(Context context, Node node) private static void loadType(Context context, Node node)
throws SQLException, IOException, AuthorizeException, NonUniqueMetadataException, RegistryImportException, throws SQLException, IOException, TransformerException,
XPathExpressionException { AuthorizeException, NonUniqueMetadataException, RegistryImportException
{
// Get the values // Get the values
String schema = RegistryImporter.getElementData(node, "schema"); String schema = RegistryImporter.getElementData(node, "schema");
String element = RegistryImporter.getElementData(node, "element"); String element = RegistryImporter.getElementData(node, "element");
@@ -253,41 +220,45 @@ public class MetadataImporter {
String scopeNote = RegistryImporter.getElementData(node, "scope_note"); String scopeNote = RegistryImporter.getElementData(node, "scope_note");
// If the schema is not provided default to DC // If the schema is not provided default to DC
if (schema == null) { if (schema == null)
schema = MetadataSchemaEnum.DC.getName(); {
schema = MetadataSchema.DC_SCHEMA;
} }
System.out.print("Registering Metadata: " + schema + "." + element + "." + qualifier + " ... ");
// Find the matching schema object // Find the matching schema object
MetadataSchema schemaObj = metadataSchemaService.find(context, schema); MetadataSchema schemaObj = MetadataSchema.find(context, schema);
if (schemaObj == null) { if (schemaObj == null)
throw new RegistryImportException("Schema '" + schema + "' is not registered and does not exist."); {
throw new RegistryImportException("Schema '" + schema + "' is not registered");
} }
MetadataField mf = metadataFieldService.findByElement(context, schemaObj, element, qualifier); MetadataField mf = MetadataField.findByElement(context, schemaObj.getSchemaID(), element, qualifier);
if (mf != null) { if (mf != null)
// Metadata field already exists, skipping it {
System.out.println("already exists, skipping");
return; return;
} }
// Actually create this metadata field as it doesn't yet exist MetadataField field = new MetadataField();
String fieldName = schema + "." + element + "." + qualifier; field.setSchemaID(schemaObj.getSchemaID());
if (qualifier == null) { field.setElement(element);
fieldName = schema + "." + element; field.setQualifier(qualifier);
} field.setScopeNote(scopeNote);
log.info("Registering metadata field {}", fieldName); field.create(context);
MetadataField field = metadataFieldService.create(context, schemaObj, element, qualifier, scopeNote); System.out.println("created");
metadataFieldService.update(context, field);
} }
/** /**
* Print the usage message to stdout * Print the usage message to stdout
*/ */
public static void usage() { public static void usage()
{
String usage = "Use this class with the following option:\n" + String usage = "Use this class with the following option:\n" +
" -f <xml source file> : specify which xml source file " + " -f <xml source file> : specify which xml source file " +
"contains the DC fields to import.\n"; "contains the DC fields to import.\n";
System.out.println(usage); System.out.println(usage);
} }
} }

View File

@@ -1,140 +0,0 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.administer;
import java.io.IOException;
import java.sql.SQLException;
import java.time.Instant;
import java.time.temporal.ChronoUnit;
import java.util.ArrayList;
import java.util.List;
import org.apache.commons.cli.ParseException;
import org.dspace.authorize.AuthorizeException;
import org.dspace.content.ProcessStatus;
import org.dspace.core.Context;
import org.dspace.scripts.DSpaceRunnable;
import org.dspace.scripts.Process;
import org.dspace.scripts.factory.ScriptServiceFactory;
import org.dspace.scripts.service.ProcessService;
import org.dspace.services.ConfigurationService;
import org.dspace.services.factory.DSpaceServicesFactory;
import org.dspace.utils.DSpace;
/**
* Script to cleanup the old processes in the specified state.
*
* @author Luca Giamminonni (luca.giamminonni at 4science.it)
*
*/
public class ProcessCleaner extends DSpaceRunnable<ProcessCleanerConfiguration<ProcessCleaner>> {
private ConfigurationService configurationService;
private ProcessService processService;
private boolean cleanCompleted = false;
private boolean cleanFailed = false;
private boolean cleanRunning = false;
private boolean help = false;
private Integer days;
@Override
public void setup() throws ParseException {
this.configurationService = DSpaceServicesFactory.getInstance().getConfigurationService();
this.processService = ScriptServiceFactory.getInstance().getProcessService();
this.help = commandLine.hasOption('h');
this.cleanFailed = commandLine.hasOption('f');
this.cleanRunning = commandLine.hasOption('r');
this.cleanCompleted = commandLine.hasOption('c') || (!cleanFailed && !cleanRunning);
this.days = configurationService.getIntProperty("process-cleaner.days", 14);
if (this.days <= 0) {
throw new IllegalStateException("The number of days must be a positive integer.");
}
}
@Override
public void internalRun() throws Exception {
if (help) {
printHelp();
return;
}
Context context = new Context();
try {
context.turnOffAuthorisationSystem();
performDeletion(context);
} finally {
context.restoreAuthSystemState();
context.complete();
}
}
/**
* Delete the processes based on the specified statuses and the configured days
* from their creation.
*/
private void performDeletion(Context context) throws SQLException, IOException, AuthorizeException {
List<ProcessStatus> statuses = getProcessToDeleteStatuses();
Instant creationDate = calculateCreationDate();
handler.logInfo("Searching for processes with status: " + statuses);
List<Process> processes = processService.findByStatusAndCreationTimeOlderThan(context, statuses, creationDate);
handler.logInfo("Found " + processes.size() + " processes to be deleted");
for (Process process : processes) {
processService.delete(context, process);
}
handler.logInfo("Process cleanup completed");
}
/**
* Returns the list of Process statuses do be deleted.
*/
private List<ProcessStatus> getProcessToDeleteStatuses() {
List<ProcessStatus> statuses = new ArrayList<ProcessStatus>();
if (cleanCompleted) {
statuses.add(ProcessStatus.COMPLETED);
}
if (cleanFailed) {
statuses.add(ProcessStatus.FAILED);
}
if (cleanRunning) {
statuses.add(ProcessStatus.RUNNING);
}
return statuses;
}
private Instant calculateCreationDate() {
return Instant.now().minus(days, ChronoUnit.DAYS);
}
@Override
@SuppressWarnings("unchecked")
public ProcessCleanerConfiguration<ProcessCleaner> getScriptConfiguration() {
return new DSpace().getServiceManager()
.getServiceByName("process-cleaner", ProcessCleanerConfiguration.class);
}
}

View File

@@ -1,18 +0,0 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.administer;
/**
* The {@link ProcessCleaner} for CLI.
*
* @author Luca Giamminonni (luca.giamminonni at 4science.it)
*
*/
public class ProcessCleanerCli extends ProcessCleaner {
}

View File

@@ -1,18 +0,0 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.administer;
/**
* The {@link ProcessCleanerConfiguration} for CLI.
*
* @author Luca Giamminonni (luca.giamminonni at 4science.it)
*
*/
public class ProcessCleanerCliConfiguration extends ProcessCleanerConfiguration<ProcessCleanerCli> {
}

View File

@@ -1,53 +0,0 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.administer;
import org.apache.commons.cli.Options;
import org.dspace.scripts.configuration.ScriptConfiguration;
/**
* The {@link ScriptConfiguration} for the {@link ProcessCleaner} script.
*/
public class ProcessCleanerConfiguration<T extends ProcessCleaner> extends ScriptConfiguration<T> {
private Class<T> dspaceRunnableClass;
@Override
public Options getOptions() {
if (options == null) {
Options options = new Options();
options.addOption("h", "help", false, "help");
options.addOption("r", "running", false, "delete the process with RUNNING status");
options.getOption("r").setType(boolean.class);
options.addOption("f", "failed", false, "delete the process with FAILED status");
options.getOption("f").setType(boolean.class);
options.addOption("c", "completed", false,
"delete the process with COMPLETED status (default if no statuses are specified)");
options.getOption("c").setType(boolean.class);
super.options = options;
}
return options;
}
@Override
public Class<T> getDspaceRunnableClass() {
return dspaceRunnableClass;
}
@Override
public void setDspaceRunnableClass(Class<T> dspaceRunnableClass) {
this.dspaceRunnableClass = dspaceRunnableClass;
}
}

View File

@@ -12,40 +12,45 @@ package org.dspace.administer;
* *
* An exception to report any problems with registry exports * An exception to report any problems with registry exports
*/ */
public class RegistryExportException extends Exception { public class RegistryExportException extends Exception
{
/** /**
* Create an empty authorize exception * Create an empty authorize exception
*/ */
public RegistryExportException() { public RegistryExportException()
{
super(); super();
} }
/** /**
* create an exception with only a message * create an exception with only a message
* *
* @param message exception message * @param message
*/ */
public RegistryExportException(String message) { public RegistryExportException(String message)
{
super(message); super(message);
} }
/** /**
* create an exception with an inner exception and a message * create an exception with an inner exception and a message
* *
* @param message exception message * @param message
* @param e reference to Throwable * @param e
*/ */
public RegistryExportException(String message, Throwable e) { public RegistryExportException(String message, Throwable e)
{
super(message, e); super(message, e);
} }
/** /**
* create an exception with an inner exception * create an exception with an inner exception
* *
* @param e reference to Throwable * @param e
*/ */
public RegistryExportException(Throwable e) { public RegistryExportException(Throwable e)
{
super(e); super(e);
} }
} }

View File

@@ -12,40 +12,45 @@ package org.dspace.administer;
* *
* An exception to report any problems with registry imports * An exception to report any problems with registry imports
*/ */
public class RegistryImportException extends Exception { public class RegistryImportException extends Exception
{
/** /**
* Create an empty authorize exception * Create an empty authorize exception
*/ */
public RegistryImportException() { public RegistryImportException()
{
super(); super();
} }
/** /**
* create an exception with only a message * create an exception with only a message
* *
* @param message error message * @param message
*/ */
public RegistryImportException(String message) { public RegistryImportException(String message)
{
super(message); super(message);
} }
/** /**
* create an exception with an inner exception and a message * create an exception with an inner exception and a message
* *
* @param message error message * @param message
* @param e throwable * @param e
*/ */
public RegistryImportException(String message, Throwable e) { public RegistryImportException(String message, Throwable e)
super(message, e); {
super(message, e);
} }
/** /**
* create an exception with an inner exception * create an exception with an inner exception
* *
* @param e throwable * @param e
*/ */
public RegistryImportException(Throwable e) { public RegistryImportException(Throwable e)
super(e); {
super(e);
} }
} }

View File

@@ -9,18 +9,18 @@ package org.dspace.administer;
import java.io.File; import java.io.File;
import java.io.IOException; import java.io.IOException;
import javax.xml.parsers.DocumentBuilder; import javax.xml.parsers.DocumentBuilder;
import javax.xml.parsers.DocumentBuilderFactory;
import javax.xml.parsers.ParserConfigurationException; import javax.xml.parsers.ParserConfigurationException;
import javax.xml.transform.TransformerException; import javax.xml.transform.TransformerException;
import javax.xml.xpath.XPath;
import javax.xml.xpath.XPathConstants;
import javax.xml.xpath.XPathExpressionException;
import javax.xml.xpath.XPathFactory;
import org.dspace.app.util.XMLUtils; import org.apache.xpath.XPathAPI;
import org.w3c.dom.Document; import org.w3c.dom.Document;
import org.w3c.dom.Node; import org.w3c.dom.Node;
import org.w3c.dom.NodeList; import org.w3c.dom.NodeList;
import org.xml.sax.SAXException; import org.xml.sax.SAXException;
/** /**
@@ -31,33 +31,27 @@ import org.xml.sax.SAXException;
* I am the author, really I ripped these methods off from other * I am the author, really I ripped these methods off from other
* classes * classes
*/ */
public class RegistryImporter { public class RegistryImporter
{
/**
* Default constructor
*/
private RegistryImporter() { }
/** /**
* Load in the XML from file. * Load in the XML from file.
* *
* @param filename the filename to load from * @param filename
* the filename to load from
*
* @return the DOM representation of the XML file * @return the DOM representation of the XML file
* @throws IOException if IO error
* @throws ParserConfigurationException if configuration parse error
* @throws SAXException if XML parse error
*/ */
public static Document loadXML(String filename) public static Document loadXML(String filename)
throws IOException, ParserConfigurationException, SAXException { throws IOException, ParserConfigurationException, SAXException
// This XML builder will *not* disable external entities as XML {
// registries are considered trusted content DocumentBuilder builder = DocumentBuilderFactory.newInstance()
DocumentBuilder builder = XMLUtils.getTrustedDocumentBuilder(); .newDocumentBuilder();
Document document = builder.parse(new File(filename)); Document document = builder.parse(new File(filename));
return document; return document;
} }
/** /**
* Get the CDATA of a particular element. For example, if the XML document * Get the CDATA of a particular element. For example, if the XML document
* contains: * contains:
@@ -69,19 +63,22 @@ public class RegistryImporter {
* return <code>application/pdf</code>. * return <code>application/pdf</code>.
* </P> * </P>
* Why this isn't a core part of the XML API I do not know... * Why this isn't a core part of the XML API I do not know...
* *
* @param parentElement the element, whose child element you want the CDATA from * @param parentElement
* @param childName the name of the element you want the CDATA from * the element, whose child element you want the CDATA from
* @param childName
* the name of the element you want the CDATA from
*
* @return the CDATA as a <code>String</code> * @return the CDATA as a <code>String</code>
* @throws TransformerException if error
*/ */
public static String getElementData(Node parentElement, String childName) public static String getElementData(Node parentElement, String childName)
throws XPathExpressionException { throws TransformerException
{
// Grab the child node // Grab the child node
XPath xPath = XPathFactory.newInstance().newXPath(); Node childNode = XPathAPI.selectSingleNode(parentElement, childName);
Node childNode = (Node) xPath.compile(childName).evaluate(parentElement, XPathConstants.NODE);
if (childNode == null) { if (childNode == null)
{
// No child node, so no values // No child node, so no values
return null; return null;
} }
@@ -89,7 +86,8 @@ public class RegistryImporter {
// Get the #text // Get the #text
Node dataNode = childNode.getFirstChild(); Node dataNode = childNode.getFirstChild();
if (dataNode == null) { if (dataNode == null)
{
return null; return null;
} }
@@ -105,29 +103,32 @@ public class RegistryImporter {
* <P> * <P>
* <code> * <code>
* &lt;foo&gt; * &lt;foo&gt;
* &lt;bar&gt;val1&lt;/bar&gt; * &lt;bar&gt;val1&lt;/bar&gt;
* &lt;bar&gt;val2&lt;/bar&gt; * &lt;bar&gt;val2&lt;/bar&gt;
* &lt;/foo&gt; * &lt;/foo&gt;
* </code> * </code>
* passing this the <code>foo</code> node and <code>bar</code> will * passing this the <code>foo</code> node and <code>bar</code> will
* return <code>val1</code> and <code>val2</code>. * return <code>val1</code> and <code>val2</code>.
* </P> * </P>
* Why this also isn't a core part of the XML API I do not know... * Why this also isn't a core part of the XML API I do not know...
* *
* @param parentElement the element, whose child element you want the CDATA from * @param parentElement
* @param childName the name of the element you want the CDATA from * the element, whose child element you want the CDATA from
* @param childName
* the name of the element you want the CDATA from
*
* @return the CDATA as a <code>String</code> * @return the CDATA as a <code>String</code>
* @throws TransformerException if error
*/ */
public static String[] getRepeatedElementData(Node parentElement, public static String[] getRepeatedElementData(Node parentElement,
String childName) throws XPathExpressionException { String childName) throws TransformerException
{
// Grab the child node // Grab the child node
XPath xPath = XPathFactory.newInstance().newXPath(); NodeList childNodes = XPathAPI.selectNodeList(parentElement, childName);
NodeList childNodes = (NodeList) xPath.compile(childName).evaluate(parentElement, XPathConstants.NODESET);
String[] data = new String[childNodes.getLength()]; String[] data = new String[childNodes.getLength()];
for (int i = 0; i < childNodes.getLength(); i++) { for (int i = 0; i < childNodes.getLength(); i++)
{
// Get the #text node // Get the #text node
Node dataNode = childNodes.item(i).getFirstChild(); Node dataNode = childNodes.item(i).getFirstChild();

View File

@@ -10,31 +10,21 @@ package org.dspace.administer;
import java.io.File; import java.io.File;
import java.io.IOException; import java.io.IOException;
import java.sql.SQLException; import java.sql.SQLException;
import java.util.ArrayList;
import java.util.Arrays;
import javax.xml.parsers.DocumentBuilder; import javax.xml.parsers.DocumentBuilder;
import javax.xml.parsers.DocumentBuilderFactory;
import javax.xml.parsers.ParserConfigurationException; import javax.xml.parsers.ParserConfigurationException;
import javax.xml.transform.TransformerException; import javax.xml.transform.TransformerException;
import javax.xml.xpath.XPath;
import javax.xml.xpath.XPathConstants;
import javax.xml.xpath.XPathExpressionException;
import javax.xml.xpath.XPathFactory;
import org.apache.commons.cli.CommandLine; import org.apache.log4j.Logger;
import org.apache.commons.cli.CommandLineParser; import org.apache.xpath.XPathAPI;
import org.apache.commons.cli.DefaultParser;
import org.apache.commons.cli.HelpFormatter;
import org.apache.commons.cli.Options;
import org.apache.commons.cli.ParseException;
import org.apache.commons.lang3.StringUtils;
import org.apache.logging.log4j.Logger;
import org.dspace.app.util.XMLUtils;
import org.dspace.authorize.AuthorizeException; import org.dspace.authorize.AuthorizeException;
import org.dspace.content.BitstreamFormat; import org.dspace.content.BitstreamFormat;
import org.dspace.content.factory.ContentServiceFactory; import org.dspace.content.MetadataField;
import org.dspace.content.service.BitstreamFormatService; import org.dspace.content.MetadataSchema;
import org.dspace.content.NonUniqueMetadataException;
import org.dspace.core.Context; import org.dspace.core.Context;
import org.dspace.core.LogHelper; import org.dspace.core.LogManager;
import org.w3c.dom.Document; import org.w3c.dom.Document;
import org.w3c.dom.Node; import org.w3c.dom.Node;
import org.w3c.dom.NodeList; import org.w3c.dom.NodeList;
@@ -48,170 +38,123 @@ import org.xml.sax.SAXException;
* <P> * <P>
* <code>RegistryLoader -bitstream bitstream-formats.xml</code> * <code>RegistryLoader -bitstream bitstream-formats.xml</code>
* <P> * <P>
* <code>RegistryLoader -metadata dc-types.xml</code> * <code>RegistryLoader -dc dc-types.xml</code>
* *
* @author Robert Tansley * @author Robert Tansley
* @version $Revision$ * @version $Revision$
*/ */
public class RegistryLoader { public class RegistryLoader
/** {
* log4j category /** log4j category */
*/ private static Logger log = Logger.getLogger(RegistryLoader.class);
private static final Logger log = org.apache.logging.log4j.LogManager.getLogger(RegistryLoader.class);
protected static BitstreamFormatService bitstreamFormatService = ContentServiceFactory.getInstance()
.getBitstreamFormatService();
/**
* Default constructor
*/
private RegistryLoader() { }
/** /**
* For invoking via the command line * For invoking via the command line
* *
* @param argv the command line arguments given * @param argv
* @throws Exception if error * command-line arguments
*/ */
public static void main(String[] argv) throws Exception { public static void main(String[] argv) throws Exception
// Set up command-line options and parse arguments {
CommandLineParser parser = new DefaultParser(); String usage = "Usage: " + RegistryLoader.class.getName()
Options options = createCommandLineOptions(); + " (-bitstream | -dc) registry-file.xml";
try { Context context = null;
CommandLine line = parser.parse(options, argv);
// Check if help option was entered or no options provided try
if (line.hasOption('h') || line.getOptions().length == 0) { {
printHelp(options); context = new Context();
System.exit(0);
}
Context context = new Context();
// Can't update registries anonymously, so we need to turn off // Can't update registries anonymously, so we need to turn off
// authorisation // authorisation
context.turnOffAuthorisationSystem(); context.setIgnoreAuthorization(true);
try { // Work out what we're loading
// Work out what we're loading if (argv[0].equalsIgnoreCase("-bitstream"))
if (line.hasOption('b')) { {
String filename = line.getOptionValue('b'); RegistryLoader.loadBitstreamFormats(context, argv[1]);
if (StringUtils.isEmpty(filename)) {
System.err.println("No file path provided for bitstream format registry");
printHelp(options);
System.exit(1);
}
RegistryLoader.loadBitstreamFormats(context, filename);
} else if (line.hasOption('m')) {
String filename = line.getOptionValue('m');
if (StringUtils.isEmpty(filename)) {
System.err.println("No file path provided for metadata registry");
printHelp(options);
System.exit(1);
}
// Call MetadataImporter, as it handles Metadata schema updates
MetadataImporter.loadRegistry(filename, true);
} else {
System.err.println("No registry type specified");
printHelp(options);
System.exit(1);
}
// Commit changes and close Context
context.complete();
System.exit(0);
} catch (Exception e) {
log.fatal(LogHelper.getHeader(context, "error_loading_registries", ""), e);
System.err.println("Error: \n - " + e.getMessage());
System.exit(1);
} finally {
// Clean up our context, if it still exists & it was never completed
if (context != null && context.isValid()) {
context.abort();
}
} }
} catch (ParseException e) { else if (argv[0].equalsIgnoreCase("-dc"))
System.err.println("Error parsing command-line arguments: " + e.getMessage()); {
printHelp(options); loadDublinCoreTypes(context, argv[1]);
}
else
{
System.err.println(usage);
}
context.complete();
System.exit(0);
}
catch (ArrayIndexOutOfBoundsException ae)
{
System.err.println(usage);
if (context != null)
{
context.abort();
}
System.exit(1);
}
catch (Exception e)
{
log.fatal(LogManager.getHeader(context, "error_loading_registries",
""), e);
if (context != null)
{
context.abort();
}
System.err.println("Error: \n - " + e.getMessage());
System.exit(1); System.exit(1);
} }
} }
/**
* Create the command-line options
* @return the command-line options
*/
private static Options createCommandLineOptions() {
Options options = new Options();
options.addOption("b", "bitstream", true, "load bitstream format registry from specified file");
options.addOption("m", "metadata", true, "load metadata registry from specified file");
options.addOption("h", "help", false, "print this help message");
return options;
}
/**
* Print the help message
* @param options the command-line options
*/
private static void printHelp(Options options) {
HelpFormatter formatter = new HelpFormatter();
formatter.printHelp("RegistryLoader",
"Load bitstream format or metadata registries into the database\n",
options,
"\nExamples:\n" +
" RegistryLoader -b bitstream-formats.xml\n" +
" RegistryLoader -m dc-types.xml",
true);
}
/** /**
* Load Bitstream Format metadata * Load Bitstream Format metadata
* *
* @param context DSpace context object * @param context
* @param filename the filename of the XML file to load * DSpace context object
* @throws SQLException if database error * @param filename
* @throws IOException if IO error * the filename of the XML file to load
* @throws TransformerException if transformer error
* @throws ParserConfigurationException if config error
* @throws AuthorizeException if authorization error
* @throws SAXException if parser error
*/ */
public static void loadBitstreamFormats(Context context, String filename) public static void loadBitstreamFormats(Context context, String filename)
throws SQLException, IOException, ParserConfigurationException, throws SQLException, IOException, ParserConfigurationException,
SAXException, TransformerException, AuthorizeException, XPathExpressionException { SAXException, TransformerException, AuthorizeException
{
Document document = loadXML(filename); Document document = loadXML(filename);
// Get the nodes corresponding to formats // Get the nodes corresponding to formats
XPath xPath = XPathFactory.newInstance().newXPath(); NodeList typeNodes = XPathAPI.selectNodeList(document,
NodeList typeNodes = (NodeList) xPath.compile("dspace-bitstream-types/bitstream-type") "dspace-bitstream-types/bitstream-type");
.evaluate(document, XPathConstants.NODESET);
// Add each one as a new format to the registry // Add each one as a new format to the registry
for (int i = 0; i < typeNodes.getLength(); i++) { for (int i = 0; i < typeNodes.getLength(); i++)
{
Node n = typeNodes.item(i); Node n = typeNodes.item(i);
loadFormat(context, n); loadFormat(context, n);
} }
log.info(LogHelper.getHeader(context, "load_bitstream_formats", log.info(LogManager.getHeader(context, "load_bitstream_formats",
"number_loaded=" + typeNodes.getLength())); "number_loaded=" + typeNodes.getLength()));
} }
/** /**
* Process a node in the bitstream format registry XML file. The node must * Process a node in the bitstream format registry XML file. The node must
* be a "bitstream-type" node * be a "bitstream-type" node
* *
* @param context DSpace context object * @param context
* @param node the node in the DOM tree * DSpace context object
* @throws SQLException if database error * @param node
* @throws IOException if IO error * the node in the DOM tree
* @throws TransformerException if transformer error
* @throws AuthorizeException if authorization error
*/ */
private static void loadFormat(Context context, Node node) private static void loadFormat(Context context, Node node)
throws SQLException, AuthorizeException, XPathExpressionException { throws SQLException, IOException, TransformerException,
AuthorizeException
{
// Get the values // Get the values
String mimeType = getElementData(node, "mimetype"); String mimeType = getElementData(node, "mimetype");
String shortDesc = getElementData(node, "short_description"); String shortDesc = getElementData(node, "short_description");
@@ -225,50 +168,141 @@ public class RegistryLoader {
String[] extensions = getRepeatedElementData(node, "extension"); String[] extensions = getRepeatedElementData(node, "extension");
// Check if this format already exists in our registry (by mime type) // Create the format object
BitstreamFormat exists = bitstreamFormatService.findByMIMEType(context, mimeType); BitstreamFormat format = BitstreamFormat.create(context);
// If not found by mimeType, check by short description (since this must also be unique) // Fill it out with the values
if (exists == null) { format.setMIMEType(mimeType);
exists = bitstreamFormatService.findByShortDescription(context, shortDesc); format.setShortDescription(shortDesc);
format.setDescription(desc);
format.setSupportLevel(supportLevel);
format.setInternal(internal);
format.setExtensions(extensions);
// Write to database
format.update();
}
/**
* Load Dublin Core types
*
* @param context
* DSpace context object
* @param filename
* the filename of the XML file to load
* @throws NonUniqueMetadataException
*/
public static void loadDublinCoreTypes(Context context, String filename)
throws SQLException, IOException, ParserConfigurationException,
SAXException, TransformerException, AuthorizeException,
NonUniqueMetadataException
{
Document document = loadXML(filename);
// Get the nodes corresponding to schemas
NodeList schemaNodes = XPathAPI.selectNodeList(document,
"/dspace-dc-types/dc-schema");
// Add each schema
for (int i = 0; i < schemaNodes.getLength(); i++)
{
Node n = schemaNodes.item(i);
loadMDSchema(context, n);
}
// Get the nodes corresponding to fields
NodeList typeNodes = XPathAPI.selectNodeList(document,
"/dspace-dc-types/dc-type");
// Add each one as a new field to the schema
for (int i = 0; i < typeNodes.getLength(); i++)
{
Node n = typeNodes.item(i);
loadDCType(context, n);
} }
// If it doesn't exist, create it..otherwise skip it. log.info(LogManager.getHeader(context, "load_dublin_core_types",
if (exists == null) { "number_loaded=" + typeNodes.getLength()));
// Create the format object }
BitstreamFormat format = bitstreamFormatService.create(context);
// Fill it out with the values /**
format.setMIMEType(mimeType); * Load Dublin Core Schemas
bitstreamFormatService.setShortDescription(context, format, shortDesc); *
format.setDescription(desc); * @param context
format.setSupportLevel(supportLevel); * @param node
format.setInternal(internal); */
ArrayList<String> extensionList = new ArrayList<>(); private static void loadMDSchema(Context context, Node node)
extensionList.addAll(Arrays.asList(extensions)); throws TransformerException, SQLException, AuthorizeException,
format.setExtensions(extensionList); NonUniqueMetadataException
{
// Get the values
String shortname = getElementData(node, "name");
String namespace = getElementData(node, "namespace");
// Write to database // Check if the schema exists already
bitstreamFormatService.update(context, format); MetadataSchema schema = MetadataSchema.find(context, shortname);
if (schema == null)
{
// If not create it.
schema = new MetadataSchema();
schema.setNamespace(namespace);
schema.setName(shortname);
schema.create(context);
} }
} }
/**
* Process a node in the bitstream format registry XML file. The node must
* be a "bitstream-type" node
*
* @param context
* DSpace context object
* @param node
* the node in the DOM tree
* @throws NonUniqueMetadataException
*/
private static void loadDCType(Context context, Node node)
throws SQLException, IOException, TransformerException,
AuthorizeException, NonUniqueMetadataException
{
// Get the values
String schema = getElementData(node, "schema");
String element = getElementData(node, "element");
String qualifier = getElementData(node, "qualifier");
String scopeNote = getElementData(node, "scope_note");
// If the schema is not provided default to DC
if (schema == null)
{
schema = MetadataSchema.DC_SCHEMA;
}
// Find the matching schema object
MetadataSchema schemaObj = MetadataSchema.find(context, schema);
MetadataField field = new MetadataField();
field.setSchemaID(schemaObj.getSchemaID());
field.setElement(element);
field.setQualifier(qualifier);
field.setScopeNote(scopeNote);
field.create(context);
}
// ===================== XML Utility Methods ========================= // ===================== XML Utility Methods =========================
/** /**
* Load in the XML from file. * Load in the XML from file.
* *
* @param filename the filename to load from * @param filename
* the filename to load from
*
* @return the DOM representation of the XML file * @return the DOM representation of the XML file
* @throws IOException if IO error
* @throws ParserConfigurationException if config error
* @throws SAXException if parser error
*/ */
private static Document loadXML(String filename) throws IOException, private static Document loadXML(String filename) throws IOException,
ParserConfigurationException, SAXException { ParserConfigurationException, SAXException
// This XML builder will *not* disable external entities as XML {
// registries are considered trusted content DocumentBuilder builder = DocumentBuilderFactory.newInstance()
DocumentBuilder builder = XMLUtils.getTrustedDocumentBuilder(); .newDocumentBuilder();
return builder.parse(new File(filename)); return builder.parse(new File(filename));
} }
@@ -278,25 +312,28 @@ public class RegistryLoader {
* contains: * contains:
* <P> * <P>
* <code> * <code>
* <foo><mimetype>application/pdf</mimetype></foo> * &lt;foo&gt;&lt;mimetype&gt;application/pdf&lt;/mimetype&gt;&lt;/foo&gt;
* </code> * </code>
* passing this the <code>foo</code> node and <code>mimetype</code> will * passing this the <code>foo</code> node and <code>mimetype</code> will
* return <code>application/pdf</code>. * return <code>application/pdf</code>.
* </P> * </P>
* Why this isn't a core part of the XML API I do not know... * Why this isn't a core part of the XML API I do not know...
* *
* @param parentElement the element, whose child element you want the CDATA from * @param parentElement
* @param childName the name of the element you want the CDATA from * the element, whose child element you want the CDATA from
* @param childName
* the name of the element you want the CDATA from
*
* @return the CDATA as a <code>String</code> * @return the CDATA as a <code>String</code>
* @throws TransformerException if transformer error
*/ */
private static String getElementData(Node parentElement, String childName) private static String getElementData(Node parentElement, String childName)
throws XPathExpressionException { throws TransformerException
{
// Grab the child node // Grab the child node
XPath xPath = XPathFactory.newInstance().newXPath(); Node childNode = XPathAPI.selectSingleNode(parentElement, childName);
Node childNode = (Node) xPath.compile(childName).evaluate(parentElement, XPathConstants.NODE);
if (childNode == null) { if (childNode == null)
{
// No child node, so no values // No child node, so no values
return null; return null;
} }
@@ -304,7 +341,8 @@ public class RegistryLoader {
// Get the #text // Get the #text
Node dataNode = childNode.getFirstChild(); Node dataNode = childNode.getFirstChild();
if (dataNode == null) { if (dataNode == null)
{
return null; return null;
} }
@@ -319,30 +357,33 @@ public class RegistryLoader {
* document contains: * document contains:
* <P> * <P>
* <code> * <code>
* <foo> * &lt;foo&gt;
* <bar>val1</bar> * &lt;bar&gt;val1&lt;/bar&gt;
* <bar>val2</bar> * &lt;bar&gt;val2&lt;/bar&gt;
* </foo> * &lt;/foo&gt;
* </code> * </code>
* passing this the <code>foo</code> node and <code>bar</code> will * passing this the <code>foo</code> node and <code>bar</code> will
* return <code>val1</code> and <code>val2</code>. * return <code>val1</code> and <code>val2</code>.
* </P> * </P>
* Why this also isn't a core part of the XML API I do not know... * Why this also isn't a core part of the XML API I do not know...
* *
* @param parentElement the element, whose child element you want the CDATA from * @param parentElement
* @param childName the name of the element you want the CDATA from * the element, whose child element you want the CDATA from
* @param childName
* the name of the element you want the CDATA from
*
* @return the CDATA as a <code>String</code> * @return the CDATA as a <code>String</code>
* @throws TransformerException if transformer error
*/ */
private static String[] getRepeatedElementData(Node parentElement, private static String[] getRepeatedElementData(Node parentElement,
String childName) throws XPathExpressionException { String childName) throws TransformerException
{
// Grab the child node // Grab the child node
XPath xPath = XPathFactory.newInstance().newXPath(); NodeList childNodes = XPathAPI.selectNodeList(parentElement, childName);
NodeList childNodes = (NodeList) xPath.compile(childName).evaluate(parentElement, XPathConstants.NODESET);
String[] data = new String[childNodes.getLength()]; String[] data = new String[childNodes.getLength()];
for (int i = 0; i < childNodes.getLength(); i++) { for (int i = 0; i < childNodes.getLength(); i++)
{
// Get the #text node // Get the #text node
Node dataNode = childNodes.item(i).getFirstChild(); Node dataNode = childNodes.item(i).getFirstChild();

View File

@@ -0,0 +1,97 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.administer;
import org.dspace.content.DCDate;
import org.dspace.core.Context;
import org.dspace.storage.rdbms.DatabaseManager;
import org.dspace.storage.rdbms.TableRow;
import org.dspace.storage.rdbms.TableRowIterator;
/**
* A command-line tool for performing necessary tweaks in the database for the
* new last_modified column in the item table.
*
* @author Robert Tansley
* @version $Revision$
*/
public class Upgrade101To11
{
/**
* For invoking via the command line
*
* @param argv
* command-line arguments
*/
public static void main(String[] argv)
{
Context context = null;
try
{
context = new Context();
// Deal with withdrawn items first.
// last_modified takes the value of the deletion date
TableRowIterator tri = DatabaseManager.queryTable(context, "item",
"SELECT * FROM item WHERE withdrawal_date IS NOT NULL");
while (tri.hasNext())
{
TableRow row = tri.next();
DCDate d = new DCDate(row.getStringColumn("withdrawal_date"));
row.setColumn("last_modified", d.toDate());
DatabaseManager.update(context, row);
}
tri.close();
// Next, update those items with a date.available
tri = DatabaseManager.query(context,
"SELECT item.item_id, dcvalue.text_value FROM item, dctyperegistry, "+
"dcvalue WHERE item.item_id=dcvalue.item_id AND dcvalue.dc_type_id="+
"dctyperegistry.dc_type_id AND dctyperegistry.element LIKE 'date' "+
"AND dctyperegistry.qualifier LIKE 'available'");
while (tri.hasNext())
{
TableRow resultRow = tri.next();
DCDate d = new DCDate(resultRow.getStringColumn("text_value"));
// Can't update the row, have to do a separate query
TableRow itemRow = DatabaseManager.find(context, "item",
resultRow.getIntColumn("item_id"));
itemRow.setColumn("last_modified", d.toDate());
DatabaseManager.update(context, itemRow);
}
tri.close();
// Finally, for all items that have no date.available or withdrawal
// date, set the update time to now!
DatabaseManager.updateQuery(context,
"UPDATE item SET last_modified=now() WHERE last_modified IS NULL");
context.complete();
System.out.println("Last modified dates set");
System.exit(0);
}
catch (Exception e)
{
System.err.println("Exception occurred:" + e);
e.printStackTrace();
if (context != null)
{
context.abort();
}
System.exit(1);
}
}
}

View File

@@ -0,0 +1,198 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.administer;
import org.dspace.content.Bitstream;
import org.dspace.content.BitstreamFormat;
import org.dspace.content.Bundle;
import org.dspace.content.Collection;
import org.dspace.content.Item;
import org.dspace.content.ItemIterator;
import org.dspace.core.Context;
/**
* Command-line tool for making changes to DSpace database when updating from
* version 1.1/1.1.1 to 1.2.
* <P>
* The changes are:
* <ul>
* <li>Setting owning collection field for items
* <li>Reorganising content bitstreams into one bundle named ORIGINAL, license
* bitstreams into a bundle named LICENSE
* <li>Setting the sequence_id numbers in the bitstream table. This happens as
* item.update() is called on every item.
* <li>If a (newly-reorganised) 'ORIGINAL' bundle contains a text/html
* bitstream, that bitstream is set to the primary bitstream for HTML support.
* </ul>
*/
public class Upgrade11To12
{
public static void main(String[] argv) throws Exception
{
Context c = new Context();
// ve are superuser!
c.setIgnoreAuthorization(true);
ItemIterator ii = null;
// first set owning Collections
Collection[] collections = Collection.findAll(c);
System.out.println("Setting item owningCollection fields in database");
for (int q = 0; q < collections.length; q++)
{
ii = collections[q].getItems();
while (ii.hasNext())
{
Item myItem = ii.next();
// set it if it's not already set
if (myItem.getOwningCollection() == null)
{
myItem.setOwningCollection(collections[q]);
myItem.update();
System.out.println("Set owner of item " + myItem.getID()
+ " to collection " + collections[q].getID());
}
}
}
// commit pending transactions before continuing
c.commit();
// now combine some bundles
ii = Item.findAll(c);
while (ii.hasNext())
{
boolean skipItem = false;
Item myItem = ii.next();
int licenseBundleIndex = -1; // array index of license bundle (we'll
// skip this one often)
int primaryBundleIndex = -1; // array index of our primary bundle
// (all bitstreams assemble here)
System.out.println("Processing item #: " + myItem.getID());
Bundle[] myBundles = myItem.getBundles();
// look for bundles with multiple bitstreams
// (if any found, we'll skip this item)
for (int i = 0; i < myBundles.length; i++)
{
// skip if bundle is already named
if (myBundles[i].getName() != null)
{
System.out
.println("Skipping this item - named bundles already found");
skipItem = true;
break;
}
Bitstream[] bitstreams = myBundles[i].getBitstreams();
// skip this item if we already have bundles combined in this
// item
if (bitstreams.length > 1)
{
System.out
.println("Skipping this item - compound bundles already found");
skipItem = true;
break;
}
// is this the license? check the format
BitstreamFormat bf = bitstreams[0].getFormat();
if ("License".equals(bf.getShortDescription()))
{
System.out.println("Found license!");
if (licenseBundleIndex == -1)
{
licenseBundleIndex = i;
System.out.println("License bundle set to: " + i);
}
else
{
System.out
.println("ERROR - multiple license bundles in item - skipping");
skipItem = true;
break;
}
}
else
{
// not a license, if primary isn't set yet, set it
if (primaryBundleIndex == -1)
{
primaryBundleIndex = i;
System.out.println("Primary bundle set to: " + i);
}
}
}
if (!skipItem)
{
// name the primary and license bundles
if (primaryBundleIndex != -1)
{
myBundles[primaryBundleIndex].setName("ORIGINAL");
myBundles[primaryBundleIndex].update();
}
if (licenseBundleIndex != -1)
{
myBundles[licenseBundleIndex].setName("LICENSE");
myBundles[licenseBundleIndex].update();
}
for (int i = 0; i < myBundles.length; i++)
{
Bitstream[] bitstreams = myBundles[i].getBitstreams();
// now we can safely assume no bundles with multiple
// bitstreams
if (bitstreams.length > 0 && (i != primaryBundleIndex) && (i != licenseBundleIndex))
{
// only option left is a bitstream to be combined
// with primary bundle
// and remove now-redundant bundle
myBundles[primaryBundleIndex]
.addBitstream(bitstreams[0]); // add to
// primary
myItem.removeBundle(myBundles[i]); // remove this
// bundle
System.out.println("Bitstream from bundle " + i
+ " moved to primary bundle");
// flag if HTML bitstream
if (bitstreams[0].getFormat().getMIMEType().equals(
"text/html"))
{
System.out
.println("Set primary bitstream to HTML file in item #"
+ myItem.getID()
+ " for HTML support.");
}
}
}
}
}
c.complete();
}
}

View File

@@ -1,54 +0,0 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.alerts;
/**
* Enum representing the options for allowing sessions:
* ALLOW_ALL_SESSIONS - Will allow all users to log in and continue their sessions
* ALLOW_CURRENT_SESSIONS_ONLY - Will prevent non admin users from logging in, however logged-in users
* will remain logged in
* ALLOW_ADMIN_SESSIONS_ONLY - Only admin users can log in, non admin sessions will be interrupted
*
* NOTE: This functionality can be stored in the database, but no support is present right now to interrupt and prevent
* sessions.
*/
public enum AllowSessionsEnum {
ALLOW_ALL_SESSIONS("all"),
ALLOW_CURRENT_SESSIONS_ONLY("current"),
ALLOW_ADMIN_SESSIONS_ONLY("admin");
private String allowSessionsType;
AllowSessionsEnum(String allowSessionsType) {
this.allowSessionsType = allowSessionsType;
}
public String getValue() {
return allowSessionsType;
}
public static AllowSessionsEnum fromString(String alertAllowSessionType) {
if (alertAllowSessionType == null) {
return AllowSessionsEnum.ALLOW_ALL_SESSIONS;
}
switch (alertAllowSessionType) {
case "all":
return AllowSessionsEnum.ALLOW_ALL_SESSIONS;
case "current":
return AllowSessionsEnum.ALLOW_CURRENT_SESSIONS_ONLY;
case "admin" :
return AllowSessionsEnum.ALLOW_ADMIN_SESSIONS_ONLY;
default:
throw new IllegalArgumentException("No corresponding enum value for provided string: "
+ alertAllowSessionType);
}
}
}

View File

@@ -1,176 +0,0 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.alerts;
import java.time.ZonedDateTime;
import jakarta.persistence.Cacheable;
import jakarta.persistence.Column;
import jakarta.persistence.Entity;
import jakarta.persistence.GeneratedValue;
import jakarta.persistence.GenerationType;
import jakarta.persistence.Id;
import jakarta.persistence.SequenceGenerator;
import jakarta.persistence.Table;
import org.apache.commons.lang3.builder.EqualsBuilder;
import org.apache.commons.lang3.builder.HashCodeBuilder;
import org.dspace.core.ReloadableEntity;
import org.hibernate.annotations.CacheConcurrencyStrategy;
/**
* Database object representing system-wide alerts
*/
@Entity
@Cacheable
@org.hibernate.annotations.Cache(usage = CacheConcurrencyStrategy.NONSTRICT_READ_WRITE, include = "non-lazy")
@Table(name = "systemwidealert")
public class SystemWideAlert implements ReloadableEntity<Integer> {
@Id
@GeneratedValue(strategy = GenerationType.SEQUENCE, generator = "alert_id_seq")
@SequenceGenerator(name = "alert_id_seq", sequenceName = "alert_id_seq", allocationSize = 1)
@Column(name = "alert_id", unique = true, nullable = false)
private Integer alertId;
@Column(name = "message", nullable = false)
private String message;
@Column(name = "allow_sessions")
private String allowSessions;
@Column(name = "countdown_to")
private ZonedDateTime countdownTo;
@Column(name = "active")
private boolean active;
protected SystemWideAlert() {
}
/**
* This method returns the ID that the system-wide alert holds within the database
*
* @return The ID that the system-wide alert holds within the database
*/
@Override
public Integer getID() {
return alertId;
}
/**
* Set the ID for the system-wide alert
*
* @param alertID The ID to set
*/
public void setID(final Integer alertID) {
this.alertId = alertID;
}
/**
* Retrieve the message of the system-wide alert
*
* @return the message of the system-wide alert
*/
public String getMessage() {
return message;
}
/**
* Set the message of the system-wide alert
*
* @param message The message to set
*/
public void setMessage(final String message) {
this.message = message;
}
/**
* Retrieve what kind of sessions are allowed while the system-wide alert is active
*
* @return what kind of sessions are allowed while the system-wide alert is active
*/
public AllowSessionsEnum getAllowSessions() {
return AllowSessionsEnum.fromString(allowSessions);
}
/**
* Set what kind of sessions are allowed while the system-wide alert is active
*
* @param allowSessions Integer representing what kind of sessions are allowed
*/
public void setAllowSessions(AllowSessionsEnum allowSessions) {
this.allowSessions = allowSessions.getValue();
}
/**
* Retrieve the date to which will be count down when the system-wide alert is active
*
* @return the date to which will be count down when the system-wide alert is active
*/
public ZonedDateTime getCountdownTo() {
return countdownTo;
}
/**
* Set the date to which will be count down when the system-wide alert is active
*
* @param countdownTo The date to which will be count down
*/
public void setCountdownTo(final ZonedDateTime countdownTo) {
this.countdownTo = countdownTo;
}
/**
* Retrieve whether the system-wide alert is active
*
* @return whether the system-wide alert is active
*/
public boolean isActive() {
return active;
}
/**
* Set whether the system-wide alert is active
*
* @param active Whether the system-wide alert is active
*/
public void setActive(final boolean active) {
this.active = active;
}
/**
* Return <code>true</code> if <code>other</code> is the same SystemWideAlert
* as this object, <code>false</code> otherwise
*
* @param other object to compare to
* @return <code>true</code> if object passed in represents the same
* system-wide alert as this object
*/
@Override
public boolean equals(Object other) {
return (other instanceof SystemWideAlert &&
new EqualsBuilder().append(this.getID(), ((SystemWideAlert) other).getID())
.append(this.getMessage(), ((SystemWideAlert) other).getMessage())
.append(this.getAllowSessions(), ((SystemWideAlert) other).getAllowSessions())
.append(this.getCountdownTo(), ((SystemWideAlert) other).getCountdownTo())
.append(this.isActive(), ((SystemWideAlert) other).isActive())
.isEquals());
}
@Override
public int hashCode() {
return new HashCodeBuilder(17, 37)
.append(this.getID())
.append(this.getMessage())
.append(this.getAllowSessions())
.append(this.getCountdownTo())
.append(this.isActive())
.toHashCode();
}
}

View File

@@ -1,129 +0,0 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.alerts;
import java.io.IOException;
import java.sql.SQLException;
import java.time.ZonedDateTime;
import java.util.List;
import org.apache.logging.log4j.Logger;
import org.dspace.alerts.dao.SystemWideAlertDAO;
import org.dspace.alerts.service.SystemWideAlertService;
import org.dspace.authorize.AuthorizeException;
import org.dspace.authorize.service.AuthorizeService;
import org.dspace.core.Context;
import org.dspace.core.LogHelper;
import org.dspace.eperson.EPerson;
import org.springframework.beans.factory.annotation.Autowired;
/**
* The implementation for the {@link SystemWideAlertService} class
*/
public class SystemWideAlertServiceImpl implements SystemWideAlertService {
private static final Logger log = org.apache.logging.log4j.LogManager.getLogger(SystemWideAlertService.class);
@Autowired
private SystemWideAlertDAO systemWideAlertDAO;
@Autowired
private AuthorizeService authorizeService;
@Override
public SystemWideAlert create(final Context context, final String message,
final AllowSessionsEnum allowSessionsType,
final ZonedDateTime countdownTo, final boolean active) throws SQLException,
AuthorizeException {
if (!authorizeService.isAdmin(context)) {
throw new AuthorizeException(
"Only administrators can create a system-wide alert");
}
SystemWideAlert systemWideAlert = new SystemWideAlert();
systemWideAlert.setMessage(message);
systemWideAlert.setAllowSessions(allowSessionsType);
systemWideAlert.setCountdownTo(countdownTo);
systemWideAlert.setActive(active);
SystemWideAlert createdAlert = systemWideAlertDAO.create(context, systemWideAlert);
log.info(LogHelper.getHeader(context, "system_wide_alert_create",
"System Wide Alert has been created with message: '" + message + "' and ID "
+ createdAlert.getID() + " and allowSessionsType " + allowSessionsType +
" and active set to " + active));
return createdAlert;
}
@Override
public SystemWideAlert find(final Context context, final int alertId) throws SQLException {
return systemWideAlertDAO.findByID(context, SystemWideAlert.class, alertId);
}
@Override
public List<SystemWideAlert> findAll(final Context context) throws SQLException {
return systemWideAlertDAO.findAll(context, SystemWideAlert.class);
}
@Override
public List<SystemWideAlert> findAll(final Context context, final int limit, final int offset) throws SQLException {
return systemWideAlertDAO.findAll(context, limit, offset);
}
@Override
public List<SystemWideAlert> findAllActive(final Context context, final int limit, final int offset)
throws SQLException {
return systemWideAlertDAO.findAllActive(context, limit, offset);
}
@Override
public void delete(final Context context, final SystemWideAlert systemWideAlert)
throws SQLException, IOException, AuthorizeException {
if (!authorizeService.isAdmin(context)) {
throw new AuthorizeException(
"Only administrators can create a system-wide alert");
}
systemWideAlertDAO.delete(context, systemWideAlert);
log.info(LogHelper.getHeader(context, "system_wide_alert_create",
"System Wide Alert with ID " + systemWideAlert.getID() + " has been deleted"));
}
@Override
public void update(final Context context, final SystemWideAlert systemWideAlert)
throws SQLException, AuthorizeException {
if (!authorizeService.isAdmin(context)) {
throw new AuthorizeException(
"Only administrators can create a system-wide alert");
}
systemWideAlertDAO.save(context, systemWideAlert);
}
@Override
public boolean canNonAdminUserLogin(Context context) throws SQLException {
List<SystemWideAlert> active = findAllActive(context, 1, 0);
if (active == null || active.isEmpty()) {
return true;
}
return active.get(0).getAllowSessions() == AllowSessionsEnum.ALLOW_ALL_SESSIONS;
}
@Override
public boolean canUserMaintainSession(Context context, EPerson ePerson) throws SQLException {
if (authorizeService.isAdmin(context, ePerson)) {
return true;
}
List<SystemWideAlert> active = findAllActive(context, 1, 0);
if (active == null || active.isEmpty()) {
return true;
}
return active.get(0).getAllowSessions() != AllowSessionsEnum.ALLOW_ADMIN_SESSIONS_ONLY;
}
}

View File

@@ -1,45 +0,0 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.alerts.dao;
import java.sql.SQLException;
import java.util.List;
import org.dspace.alerts.SystemWideAlert;
import org.dspace.core.Context;
import org.dspace.core.GenericDAO;
/**
* This is the Data Access Object for the {@link SystemWideAlert} object
*/
public interface SystemWideAlertDAO extends GenericDAO<SystemWideAlert> {
/**
* Returns a list of all SystemWideAlert objects in the database
*
* @param context The relevant DSpace context
* @param limit The limit for the amount of SystemWideAlerts returned
* @param offset The offset for the Processes to be returned
* @return The list of all SystemWideAlert objects in the Database
* @throws SQLException If something goes wrong
*/
List<SystemWideAlert> findAll(Context context, int limit, int offset) throws SQLException;
/**
* Returns a list of all active SystemWideAlert objects in the database
*
* @param context The relevant DSpace context
* @param limit The limit for the amount of SystemWideAlerts returned
* @param offset The offset for the Processes to be returned
* @return The list of all SystemWideAlert objects in the Database
* @throws SQLException If something goes wrong
*/
List<SystemWideAlert> findAllActive(Context context, int limit, int offset) throws SQLException;
}

View File

@@ -1,48 +0,0 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.alerts.dao.impl;
import java.sql.SQLException;
import java.util.List;
import jakarta.persistence.criteria.CriteriaBuilder;
import jakarta.persistence.criteria.CriteriaQuery;
import jakarta.persistence.criteria.Root;
import org.dspace.alerts.SystemWideAlert;
import org.dspace.alerts.SystemWideAlert_;
import org.dspace.alerts.dao.SystemWideAlertDAO;
import org.dspace.core.AbstractHibernateDAO;
import org.dspace.core.Context;
/**
* Implementation class for the {@link SystemWideAlertDAO}
*/
public class SystemWideAlertDAOImpl extends AbstractHibernateDAO<SystemWideAlert> implements SystemWideAlertDAO {
public List<SystemWideAlert> findAll(final Context context, final int limit, final int offset) throws SQLException {
CriteriaBuilder criteriaBuilder = getCriteriaBuilder(context);
CriteriaQuery criteriaQuery = getCriteriaQuery(criteriaBuilder, SystemWideAlert.class);
Root<SystemWideAlert> alertRoot = criteriaQuery.from(SystemWideAlert.class);
criteriaQuery.select(alertRoot);
return list(context, criteriaQuery, false, SystemWideAlert.class, limit, offset);
}
public List<SystemWideAlert> findAllActive(final Context context, final int limit, final int offset)
throws SQLException {
CriteriaBuilder criteriaBuilder = getCriteriaBuilder(context);
CriteriaQuery criteriaQuery = getCriteriaQuery(criteriaBuilder, SystemWideAlert.class);
Root<SystemWideAlert> alertRoot = criteriaQuery.from(SystemWideAlert.class);
criteriaQuery.select(alertRoot);
criteriaQuery.where(criteriaBuilder.equal(alertRoot.get(SystemWideAlert_.active), true));
return list(context, criteriaQuery, false, SystemWideAlert.class, limit, offset);
}
}

View File

@@ -1,118 +0,0 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.alerts.service;
import java.io.IOException;
import java.sql.SQLException;
import java.time.ZonedDateTime;
import java.util.List;
import org.dspace.alerts.AllowSessionsEnum;
import org.dspace.alerts.SystemWideAlert;
import org.dspace.authorize.AuthorizeException;
import org.dspace.core.Context;
import org.dspace.eperson.EPerson;
/**
* An interface for the SystemWideAlertService with methods regarding the SystemWideAlert workload
*/
public interface SystemWideAlertService {
/**
* This method will create a SystemWideAlert object in the database
*
* @param context The relevant DSpace context
* @param message The message of the system-wide alert
* @param allowSessionsType Which sessions need to be allowed for the system-wide alert
* @param countdownTo The date to which to count down to when the system-wide alert is active
* @param active Whether the system-wide alert os active
* @return The created SystemWideAlert object
* @throws SQLException If something goes wrong
*/
SystemWideAlert create(Context context, String message, AllowSessionsEnum allowSessionsType,
ZonedDateTime countdownTo, boolean active
) throws SQLException, AuthorizeException;
/**
* This method will retrieve a SystemWideAlert object from the Database with the given ID
*
* @param context The relevant DSpace context
* @param alertId The alert id on which we'll search for in the database
* @return The system-wide alert that holds the given alert id
* @throws SQLException If something goes wrong
*/
SystemWideAlert find(Context context, int alertId) throws SQLException;
/**
* Returns a list of all SystemWideAlert objects in the database
*
* @param context The relevant DSpace context
* @return The list of all SystemWideAlert objects in the Database
* @throws SQLException If something goes wrong
*/
List<SystemWideAlert> findAll(Context context) throws SQLException;
/**
* Returns a list of all SystemWideAlert objects in the database
*
* @param context The relevant DSpace context
* @param limit The limit for the amount of system-wide alerts returned
* @param offset The offset for the system-wide alerts to be returned
* @return The list of all SystemWideAlert objects in the Database
* @throws SQLException If something goes wrong
*/
List<SystemWideAlert> findAll(Context context, int limit, int offset) throws SQLException;
/**
* Returns a list of all active SystemWideAlert objects in the database
*
* @param context The relevant DSpace context
* @return The list of all active SystemWideAlert objects in the database
* @throws SQLException If something goes wrong
*/
List<SystemWideAlert> findAllActive(Context context, int limit, int offset) throws SQLException;
/**
* This method will delete the given SystemWideAlert object from the database
*
* @param context The relevant DSpace context
* @param systemWideAlert The SystemWideAlert object to be deleted
* @throws SQLException If something goes wrong
*/
void delete(Context context, SystemWideAlert systemWideAlert)
throws SQLException, IOException, AuthorizeException;
/**
* This method will be used to update the given SystemWideAlert object in the database
*
* @param context The relevant DSpace context
* @param systemWideAlert The SystemWideAlert object to be updated
* @throws SQLException If something goes wrong
*/
void update(Context context, SystemWideAlert systemWideAlert) throws SQLException, AuthorizeException;
/**
* Verifies if the user connected to the current context can retain its session
*
* @param context The relevant DSpace context
* @return if the user connected to the current context can retain its session
*/
boolean canUserMaintainSession(Context context, EPerson ePerson) throws SQLException;
/**
* Verifies if a non admin user can log in
*
* @param context The relevant DSpace context
* @return if a non admin user can log in
*/
boolean canNonAdminUserLogin(Context context) throws SQLException;
}

View File

@@ -1,689 +0,0 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.bulkaccesscontrol;
import static org.apache.commons.collections4.CollectionUtils.isEmpty;
import static org.apache.commons.collections4.CollectionUtils.isNotEmpty;
import static org.dspace.authorize.ResourcePolicy.TYPE_CUSTOM;
import static org.dspace.authorize.ResourcePolicy.TYPE_INHERITED;
import static org.dspace.core.Constants.CONTENT_BUNDLE_NAME;
import java.io.IOException;
import java.io.InputStream;
import java.sql.SQLException;
import java.time.LocalDate;
import java.time.ZoneOffset;
import java.time.format.DateTimeFormatter;
import java.util.Arrays;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Optional;
import java.util.TimeZone;
import java.util.UUID;
import java.util.function.Function;
import java.util.stream.Collectors;
import com.fasterxml.jackson.databind.ObjectMapper;
import org.apache.commons.cli.ParseException;
import org.apache.commons.lang3.StringUtils;
import org.dspace.app.bulkaccesscontrol.exception.BulkAccessControlException;
import org.dspace.app.bulkaccesscontrol.model.AccessCondition;
import org.dspace.app.bulkaccesscontrol.model.AccessConditionBitstream;
import org.dspace.app.bulkaccesscontrol.model.AccessConditionItem;
import org.dspace.app.bulkaccesscontrol.model.BulkAccessConditionConfiguration;
import org.dspace.app.bulkaccesscontrol.model.BulkAccessControlInput;
import org.dspace.app.bulkaccesscontrol.service.BulkAccessConditionConfigurationService;
import org.dspace.app.mediafilter.factory.MediaFilterServiceFactory;
import org.dspace.app.mediafilter.service.MediaFilterService;
import org.dspace.app.util.DSpaceObjectUtilsImpl;
import org.dspace.app.util.service.DSpaceObjectUtils;
import org.dspace.authorize.AuthorizeException;
import org.dspace.authorize.factory.AuthorizeServiceFactory;
import org.dspace.authorize.service.ResourcePolicyService;
import org.dspace.content.Bitstream;
import org.dspace.content.Collection;
import org.dspace.content.DSpaceObject;
import org.dspace.content.Item;
import org.dspace.content.factory.ContentServiceFactory;
import org.dspace.content.service.ItemService;
import org.dspace.core.Constants;
import org.dspace.core.Context;
import org.dspace.discovery.DiscoverQuery;
import org.dspace.discovery.SearchService;
import org.dspace.discovery.SearchServiceException;
import org.dspace.discovery.SearchUtils;
import org.dspace.discovery.indexobject.IndexableItem;
import org.dspace.eperson.EPerson;
import org.dspace.eperson.factory.EPersonServiceFactory;
import org.dspace.eperson.service.EPersonService;
import org.dspace.scripts.DSpaceRunnable;
import org.dspace.services.ConfigurationService;
import org.dspace.services.factory.DSpaceServicesFactory;
import org.dspace.submit.model.AccessConditionOption;
import org.dspace.utils.DSpace;
/**
* Implementation of {@link DSpaceRunnable} to perform a bulk access control via json file.
*
* @author Mohamed Eskander (mohamed.eskander at 4science.it)
*
*/
public class BulkAccessControl extends DSpaceRunnable<BulkAccessControlScriptConfiguration<BulkAccessControl>> {
private DSpaceObjectUtils dSpaceObjectUtils;
private SearchService searchService;
private ItemService itemService;
private String filename;
private List<String> uuids;
private Context context;
private BulkAccessConditionConfigurationService bulkAccessConditionConfigurationService;
private ResourcePolicyService resourcePolicyService;
protected EPersonService epersonService;
private ConfigurationService configurationService;
private MediaFilterService mediaFilterService;
private Map<String, AccessConditionOption> itemAccessConditions;
private Map<String, AccessConditionOption> uploadAccessConditions;
private final String ADD_MODE = "add";
private final String REPLACE_MODE = "replace";
private boolean help = false;
protected String eperson = null;
@Override
@SuppressWarnings("unchecked")
public void setup() throws ParseException {
this.searchService = SearchUtils.getSearchService();
this.itemService = ContentServiceFactory.getInstance().getItemService();
this.resourcePolicyService = AuthorizeServiceFactory.getInstance().getResourcePolicyService();
this.epersonService = EPersonServiceFactory.getInstance().getEPersonService();
this.configurationService = DSpaceServicesFactory.getInstance().getConfigurationService();
mediaFilterService = MediaFilterServiceFactory.getInstance().getMediaFilterService();
mediaFilterService.setLogHandler(handler);
this.bulkAccessConditionConfigurationService = new DSpace().getServiceManager().getServiceByName(
"bulkAccessConditionConfigurationService", BulkAccessConditionConfigurationService.class);
this.dSpaceObjectUtils = new DSpace().getServiceManager().getServiceByName(
DSpaceObjectUtilsImpl.class.getName(), DSpaceObjectUtilsImpl.class);
BulkAccessConditionConfiguration bulkAccessConditionConfiguration =
bulkAccessConditionConfigurationService.getBulkAccessConditionConfiguration("default");
itemAccessConditions = bulkAccessConditionConfiguration
.getItemAccessConditionOptions()
.stream()
.collect(Collectors.toMap(AccessConditionOption::getName, Function.identity()));
uploadAccessConditions = bulkAccessConditionConfiguration
.getBitstreamAccessConditionOptions()
.stream()
.collect(Collectors.toMap(AccessConditionOption::getName, Function.identity()));
help = commandLine.hasOption('h');
filename = commandLine.getOptionValue('f');
uuids = commandLine.hasOption('u') ? Arrays.asList(commandLine.getOptionValues('u')) : null;
}
@Override
public void internalRun() throws Exception {
if (help) {
printHelp();
return;
}
ObjectMapper mapper = new ObjectMapper();
mapper.setTimeZone(TimeZone.getTimeZone(ZoneOffset.UTC));
BulkAccessControlInput accessControl;
context = new Context(Context.Mode.BATCH_EDIT);
setEPerson(context);
if (!isAuthorized(context)) {
handler.logError("Current user is not eligible to execute script bulk-access-control");
throw new AuthorizeException("Current user is not eligible to execute script bulk-access-control");
}
if (uuids == null || uuids.size() == 0) {
handler.logError("A target uuid must be provided with at least on uuid (run with -h flag for details)");
throw new IllegalArgumentException("At least one target uuid must be provided");
}
InputStream inputStream = handler.getFileStream(context, filename)
.orElseThrow(() -> new IllegalArgumentException("Error reading file, the file couldn't be "
+ "found for filename: " + filename));
try {
accessControl = mapper.readValue(inputStream, BulkAccessControlInput.class);
} catch (IOException e) {
handler.logError("Error parsing json file " + e.getMessage());
throw new IllegalArgumentException("Error parsing json file", e);
}
try {
validate(accessControl);
updateItemsAndBitstreamsPolices(accessControl);
context.complete();
} catch (Exception e) {
handler.handleException(e);
context.abort();
}
}
/**
* check the validation of mapped json data, it must
* provide item or bitstream information or both of them
* and check the validation of item node if provided,
* and check the validation of bitstream node if provided.
*
* @param accessControl mapped json data
* @throws SQLException if something goes wrong in the database
* @throws BulkAccessControlException if accessControl is invalid
*/
private void validate(BulkAccessControlInput accessControl) throws SQLException {
AccessConditionItem item = accessControl.getItem();
AccessConditionBitstream bitstream = accessControl.getBitstream();
if (Objects.isNull(item) && Objects.isNull(bitstream)) {
handler.logError("item or bitstream node must be provided");
throw new BulkAccessControlException("item or bitstream node must be provided");
}
if (Objects.nonNull(item)) {
validateItemNode(item);
}
if (Objects.nonNull(bitstream)) {
validateBitstreamNode(bitstream);
}
}
/**
* check the validation of item node, the item mode
* must be provided with value 'add' or 'replace'
* if mode equals to add so the information
* of accessCondition must be provided,
* also checking that accessConditions information are valid.
*
* @param item the item node
* @throws BulkAccessControlException if item node is invalid
*/
private void validateItemNode(AccessConditionItem item) {
String mode = item.getMode();
List<AccessCondition> accessConditions = item.getAccessConditions();
if (StringUtils.isEmpty(mode)) {
handler.logError("item mode node must be provided");
throw new BulkAccessControlException("item mode node must be provided");
} else if (!(StringUtils.equalsAny(mode, ADD_MODE, REPLACE_MODE))) {
handler.logError("wrong value for item mode<" + mode + ">");
throw new BulkAccessControlException("wrong value for item mode<" + mode + ">");
} else if (ADD_MODE.equals(mode) && isEmpty(accessConditions)) {
handler.logError("accessConditions of item must be provided with mode<" + ADD_MODE + ">");
throw new BulkAccessControlException(
"accessConditions of item must be provided with mode<" + ADD_MODE + ">");
}
for (AccessCondition accessCondition : accessConditions) {
validateAccessCondition(accessCondition);
}
}
/**
* check the validation of bitstream node, the bitstream mode
* must be provided with value 'add' or 'replace'
* if mode equals to add so the information of accessConditions
* must be provided,
* also checking that constraint information is valid,
* also checking that accessConditions information are valid.
*
* @param bitstream the bitstream node
* @throws SQLException if something goes wrong in the database
* @throws BulkAccessControlException if bitstream node is invalid
*/
private void validateBitstreamNode(AccessConditionBitstream bitstream) throws SQLException {
String mode = bitstream.getMode();
List<AccessCondition> accessConditions = bitstream.getAccessConditions();
if (StringUtils.isEmpty(mode)) {
handler.logError("bitstream mode node must be provided");
throw new BulkAccessControlException("bitstream mode node must be provided");
} else if (!(StringUtils.equalsAny(mode, ADD_MODE, REPLACE_MODE))) {
handler.logError("wrong value for bitstream mode<" + mode + ">");
throw new BulkAccessControlException("wrong value for bitstream mode<" + mode + ">");
} else if (ADD_MODE.equals(mode) && isEmpty(accessConditions)) {
handler.logError("accessConditions of bitstream must be provided with mode<" + ADD_MODE + ">");
throw new BulkAccessControlException(
"accessConditions of bitstream must be provided with mode<" + ADD_MODE + ">");
}
validateConstraint(bitstream);
for (AccessCondition accessCondition : bitstream.getAccessConditions()) {
validateAccessCondition(accessCondition);
}
}
/**
* check the validation of constraint node if provided,
* constraint isn't supported when multiple uuids are provided
* or when uuid isn't an Item
*
* @param bitstream the bitstream node
* @throws SQLException if something goes wrong in the database
* @throws BulkAccessControlException if constraint node is invalid
*/
private void validateConstraint(AccessConditionBitstream bitstream) throws SQLException {
if (uuids.size() > 1 && containsConstraints(bitstream)) {
handler.logError("constraint isn't supported when multiple uuids are provided");
throw new BulkAccessControlException("constraint isn't supported when multiple uuids are provided");
} else if (uuids.size() == 1 && containsConstraints(bitstream)) {
DSpaceObject dso =
dSpaceObjectUtils.findDSpaceObject(context, UUID.fromString(uuids.get(0)));
if (Objects.nonNull(dso) && dso.getType() != Constants.ITEM) {
handler.logError("constraint is not supported when uuid isn't an Item");
throw new BulkAccessControlException("constraint is not supported when uuid isn't an Item");
}
}
}
/**
* check the validation of access condition,
* the access condition name must equal to one of configured access conditions,
* then call {@link AccessConditionOption#validateResourcePolicy(
* Context, String, LocalDate, LocalDate)} if exception happens so, it's invalid.
*
* @param accessCondition the accessCondition
* @throws BulkAccessControlException if the accessCondition is invalid
*/
private void validateAccessCondition(AccessCondition accessCondition) {
if (!itemAccessConditions.containsKey(accessCondition.getName())) {
handler.logError("wrong access condition <" + accessCondition.getName() + ">");
throw new BulkAccessControlException("wrong access condition <" + accessCondition.getName() + ">");
}
try {
itemAccessConditions.get(accessCondition.getName()).validateResourcePolicy(
context, accessCondition.getName(), accessCondition.getStartDate(), accessCondition.getEndDate());
} catch (Exception e) {
handler.logError("invalid access condition, " + e.getMessage());
handler.handleException(e);
}
}
/**
* find all items of provided {@link #uuids} from solr,
* then update the resource policies of items
* or bitstreams of items (only bitstreams of ORIGINAL bundles)
* and derivative bitstreams, or both of them.
*
* @param accessControl the access control input
* @throws SQLException if something goes wrong in the database
* @throws SearchServiceException if a search error occurs
* @throws AuthorizeException if an authorization error occurs
*/
private void updateItemsAndBitstreamsPolices(BulkAccessControlInput accessControl)
throws SQLException, SearchServiceException, AuthorizeException {
int counter = 0;
int start = 0;
int limit = 20;
String query = buildSolrQuery(uuids);
Iterator<Item> itemIterator = findItems(query, start, limit);
while (itemIterator.hasNext()) {
Item item = context.reloadEntity(itemIterator.next());
if (Objects.nonNull(accessControl.getItem())) {
updateItemPolicies(item, accessControl);
}
if (Objects.nonNull(accessControl.getBitstream())) {
updateBitstreamsPolicies(item, accessControl);
}
context.commit();
context.uncacheEntity(item);
counter++;
if (counter == limit) {
counter = 0;
start += limit;
itemIterator = findItems(query, start, limit);
}
}
}
private String buildSolrQuery(List<String> uuids) throws SQLException {
String [] query = new String[uuids.size()];
for (int i = 0 ; i < query.length ; i++) {
DSpaceObject dso = dSpaceObjectUtils.findDSpaceObject(context, UUID.fromString(uuids.get(i)));
if (dso.getType() == Constants.COMMUNITY) {
query[i] = "location.comm:" + dso.getID();
} else if (dso.getType() == Constants.COLLECTION) {
query[i] = "location.coll:" + dso.getID();
} else if (dso.getType() == Constants.ITEM) {
query[i] = "search.resourceid:" + dso.getID();
}
}
return StringUtils.joinWith(" OR ", query);
}
private Iterator<Item> findItems(String query, int start, int limit)
throws SearchServiceException {
DiscoverQuery discoverQuery = buildDiscoveryQuery(query, start, limit);
return searchService.search(context, discoverQuery)
.getIndexableObjects()
.stream()
.map(indexableObject ->
((IndexableItem) indexableObject).getIndexedObject())
.collect(Collectors.toList())
.iterator();
}
private DiscoverQuery buildDiscoveryQuery(String query, int start, int limit) {
DiscoverQuery discoverQuery = new DiscoverQuery();
discoverQuery.setDSpaceObjectFilter(IndexableItem.TYPE);
discoverQuery.setQuery(query);
discoverQuery.setStart(start);
discoverQuery.setMaxResults(limit);
discoverQuery.setSortField("search.resourceid", DiscoverQuery.SORT_ORDER.asc);
return discoverQuery;
}
/**
* update the item resource policies,
* when mode equals to 'replace' will remove
* all current resource polices of types 'TYPE_CUSTOM'
* and 'TYPE_INHERITED' then, set the new resource policies.
*
* @param item the item
* @param accessControl the access control input
* @throws SQLException if something goes wrong in the database
* @throws AuthorizeException if an authorization error occurs
*/
private void updateItemPolicies(Item item, BulkAccessControlInput accessControl)
throws SQLException, AuthorizeException {
AccessConditionItem acItem = accessControl.getItem();
if (REPLACE_MODE.equals(acItem.getMode())) {
removeReadPolicies(item, TYPE_CUSTOM);
removeReadPolicies(item, TYPE_INHERITED);
}
setItemPolicies(item, accessControl);
logInfo(acItem.getAccessConditions(), acItem.getMode(), item);
}
/**
* create the new resource policies of item.
* then, call {@link ItemService#adjustItemPolicies(
* Context, Item, Collection)} to adjust item's default policies.
*
* @param item the item
* @param accessControl the access control input
* @throws SQLException if something goes wrong in the database
* @throws AuthorizeException if an authorization error occurs
*/
private void setItemPolicies(Item item, BulkAccessControlInput accessControl)
throws SQLException, AuthorizeException {
accessControl
.getItem()
.getAccessConditions()
.forEach(accessCondition -> createResourcePolicy(item, accessCondition,
itemAccessConditions.get(accessCondition.getName())));
itemService.adjustItemPolicies(context, item, item.getOwningCollection(), false);
}
/**
* update the resource policies of all item's bitstreams
* or bitstreams specified into constraint node,
* and derivative bitstreams.
*
* <strong>NOTE:</strong> only bitstreams of ORIGINAL bundles
*
* @param item the item contains bitstreams
* @param accessControl the access control input
*/
private void updateBitstreamsPolicies(Item item, BulkAccessControlInput accessControl) {
AccessConditionBitstream.Constraint constraints = accessControl.getBitstream().getConstraints();
// look over all the bundles and force initialization of bitstreams collection
// to avoid lazy initialization exception
long count = item.getBundles()
.stream()
.flatMap(bundle ->
bundle.getBitstreams().stream())
.count();
item.getBundles(CONTENT_BUNDLE_NAME).stream()
.flatMap(bundle -> bundle.getBitstreams().stream())
.filter(bitstream -> constraints == null ||
constraints.getUuid() == null ||
constraints.getUuid().size() == 0 ||
constraints.getUuid().contains(bitstream.getID().toString()))
.forEach(bitstream -> updateBitstreamPolicies(bitstream, item, accessControl));
}
/**
* check that the bitstream node is existed,
* and contains constraint node,
* and constraint contains uuids.
*
* @param bitstream the bitstream node
* @return true when uuids of constraint of bitstream is not empty,
* otherwise false
*/
private boolean containsConstraints(AccessConditionBitstream bitstream) {
return Objects.nonNull(bitstream) &&
Objects.nonNull(bitstream.getConstraints()) &&
isNotEmpty(bitstream.getConstraints().getUuid());
}
/**
* update the bitstream resource policies,
* when mode equals to replace will remove
* all current resource polices of types 'TYPE_CUSTOM'
* and 'TYPE_INHERITED' then, set the new resource policies.
*
* @param bitstream the bitstream
* @param item the item of bitstream
* @param accessControl the access control input
* @throws RuntimeException if something goes wrong in the database
* or an authorization error occurs
*/
private void updateBitstreamPolicies(Bitstream bitstream, Item item, BulkAccessControlInput accessControl) {
AccessConditionBitstream acBitstream = accessControl.getBitstream();
if (REPLACE_MODE.equals(acBitstream.getMode())) {
removeReadPolicies(bitstream, TYPE_CUSTOM);
removeReadPolicies(bitstream, TYPE_INHERITED);
}
try {
setBitstreamPolicies(bitstream, item, accessControl);
logInfo(acBitstream.getAccessConditions(), acBitstream.getMode(), bitstream);
} catch (SQLException | AuthorizeException e) {
throw new RuntimeException(e);
}
}
/**
* remove dspace object's read policies.
*
* @param dso the dspace object
* @param type resource policy type
* @throws BulkAccessControlException if something goes wrong
* in the database or an authorization error occurs
*/
private void removeReadPolicies(DSpaceObject dso, String type) {
try {
resourcePolicyService.removePolicies(context, dso, type, Constants.READ);
} catch (SQLException | AuthorizeException e) {
throw new BulkAccessControlException(e);
}
}
/**
* create the new resource policies of bitstream.
* then, call {@link ItemService#adjustItemPolicies(
* Context, Item, Collection)} to adjust bitstream's default policies.
* and also update the resource policies of its derivative bitstreams.
*
* @param bitstream the bitstream
* @param item the item of bitstream
* @param accessControl the access control input
* @throws SQLException if something goes wrong in the database
* @throws AuthorizeException if an authorization error occurs
*/
private void setBitstreamPolicies(Bitstream bitstream, Item item, BulkAccessControlInput accessControl)
throws SQLException, AuthorizeException {
accessControl.getBitstream()
.getAccessConditions()
.forEach(accessCondition -> createResourcePolicy(bitstream, accessCondition,
uploadAccessConditions.get(accessCondition.getName())));
itemService.adjustBitstreamPolicies(context, item, item.getOwningCollection(), bitstream);
mediaFilterService.updatePoliciesOfDerivativeBitstreams(context, item, bitstream);
}
/**
* create the resource policy from the information
* comes from the access condition.
*
* @param obj the dspace object
* @param accessCondition the access condition
* @param accessConditionOption the access condition option
* @throws BulkAccessControlException if an exception occurs
*/
private void createResourcePolicy(DSpaceObject obj, AccessCondition accessCondition,
AccessConditionOption accessConditionOption) {
String name = accessCondition.getName();
String description = accessCondition.getDescription();
LocalDate startDate = accessCondition.getStartDate();
LocalDate endDate = accessCondition.getEndDate();
try {
accessConditionOption.createResourcePolicy(context, obj, name, description, startDate, endDate);
} catch (Exception e) {
throw new BulkAccessControlException(e);
}
}
/**
* Set the eperson in the context
*
* @param context the context
* @throws SQLException if database error
*/
protected void setEPerson(Context context) throws SQLException {
EPerson myEPerson = epersonService.find(context, this.getEpersonIdentifier());
if (myEPerson == null) {
handler.logError("EPerson cannot be found: " + this.getEpersonIdentifier());
throw new UnsupportedOperationException("EPerson cannot be found: " + this.getEpersonIdentifier());
}
context.setCurrentUser(myEPerson);
}
private void logInfo(List<AccessCondition> accessConditions, String mode, DSpaceObject dso) {
String type = dso.getClass().getSimpleName();
if (REPLACE_MODE.equals(mode) && isEmpty(accessConditions)) {
handler.logInfo("Cleaning " + type + " {" + dso.getID() + "} policies");
handler.logInfo("Inheriting policies from owning Collection in " + type + " {" + dso.getID() + "}");
return;
}
StringBuilder message = new StringBuilder();
message.append(mode.equals(ADD_MODE) ? "Adding " : "Replacing ")
.append(type)
.append(" {")
.append(dso.getID())
.append("} policy")
.append(mode.equals(ADD_MODE) ? " with " : " to ")
.append("access conditions:");
AppendAccessConditionsInfo(message, accessConditions);
handler.logInfo(message.toString());
if (REPLACE_MODE.equals(mode) && isAppendModeEnabled()) {
handler.logInfo("Inheriting policies from owning Collection in " + type + " {" + dso.getID() + "}");
}
}
private void AppendAccessConditionsInfo(StringBuilder message, List<AccessCondition> accessConditions) {
DateTimeFormatter dateFormat = DateTimeFormatter.ISO_LOCAL_DATE;
message.append("{");
for (int i = 0; i < accessConditions.size(); i++) {
message.append(accessConditions.get(i).getName());
Optional.ofNullable(accessConditions.get(i).getStartDate())
.ifPresent(date -> message.append(", start_date=" + dateFormat.format(date)));
Optional.ofNullable(accessConditions.get(i).getEndDate())
.ifPresent(date -> message.append(", end_date=" + dateFormat.format(date)));
if (i != accessConditions.size() - 1) {
message.append(", ");
}
}
message.append("}");
}
private boolean isAppendModeEnabled() {
return configurationService.getBooleanProperty("core.authorization.installitem.inheritance-read.append-mode");
}
protected boolean isAuthorized(Context context) {
return true;
}
@Override
@SuppressWarnings("unchecked")
public BulkAccessControlScriptConfiguration<BulkAccessControl> getScriptConfiguration() {
return new DSpace().getServiceManager()
.getServiceByName("bulk-access-control", BulkAccessControlScriptConfiguration.class);
}
}

View File

@@ -1,66 +0,0 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.bulkaccesscontrol;
import java.sql.SQLException;
import java.util.Arrays;
import java.util.UUID;
import java.util.stream.Collectors;
import org.apache.commons.lang3.StringUtils;
import org.dspace.core.Context;
import org.dspace.eperson.EPerson;
import org.dspace.scripts.DSpaceCommandLineParameter;
/**
* Extension of {@link BulkAccessControl} for CLI.
*
* @author Mohamed Eskander (mohamed.eskander at 4science.it)
*
*/
public class BulkAccessControlCli extends BulkAccessControl {
@Override
protected void setEPerson(Context context) throws SQLException {
EPerson myEPerson;
eperson = commandLine.getOptionValue('e');
if (eperson == null) {
handler.logError("An eperson to do the the Bulk Access Control must be specified " +
"(run with -h flag for details)");
throw new UnsupportedOperationException("An eperson to do the Bulk Access Control must be specified");
}
if (StringUtils.contains(eperson, '@')) {
myEPerson = epersonService.findByEmail(context, eperson);
} else {
myEPerson = epersonService.find(context, UUID.fromString(eperson));
}
if (myEPerson == null) {
handler.logError("EPerson cannot be found: " + eperson + " (run with -h flag for details)");
throw new UnsupportedOperationException("EPerson cannot be found: " + eperson);
}
context.setCurrentUser(myEPerson);
}
@Override
protected boolean isAuthorized(Context context) {
if (context.getCurrentUser() == null) {
return false;
}
return getScriptConfiguration().isAllowedToExecute(context,
Arrays.stream(commandLine.getOptions())
.map(option ->
new DSpaceCommandLineParameter("-" + option.getOpt(), option.getValue()))
.collect(Collectors.toList()));
}
}

View File

@@ -1,42 +0,0 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.bulkaccesscontrol;
import java.io.InputStream;
import org.apache.commons.cli.Options;
/**
* Extension of {@link BulkAccessControlScriptConfiguration} for CLI.
*
* @author Mohamed Eskander (mohamed.eskander at 4science.it)
*
*/
public class BulkAccessControlCliScriptConfiguration<T extends BulkAccessControlCli>
extends BulkAccessControlScriptConfiguration<T> {
@Override
public Options getOptions() {
Options options = new Options();
options.addOption("u", "uuid", true, "target uuids of communities/collections/items");
options.getOption("u").setType(String.class);
options.getOption("u").setRequired(true);
options.addOption("f", "file", true, "source json file");
options.getOption("f").setType(InputStream.class);
options.getOption("f").setRequired(true);
options.addOption("e", "eperson", true, "email of EPerson used to perform actions");
options.getOption("e").setRequired(true);
options.addOption("h", "help", false, "help");
return options;
}
}

View File

@@ -1,110 +0,0 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.bulkaccesscontrol;
import java.io.InputStream;
import java.sql.SQLException;
import java.util.List;
import java.util.Objects;
import java.util.UUID;
import java.util.stream.Collectors;
import org.apache.commons.cli.Options;
import org.dspace.app.util.DSpaceObjectUtilsImpl;
import org.dspace.app.util.service.DSpaceObjectUtils;
import org.dspace.content.DSpaceObject;
import org.dspace.core.Context;
import org.dspace.scripts.DSpaceCommandLineParameter;
import org.dspace.scripts.configuration.ScriptConfiguration;
import org.dspace.utils.DSpace;
/**
* Script configuration for {@link BulkAccessControl}.
*
* @author Mohamed Eskander (mohamed.eskander at 4science.it)
*
* @param <T> the {@link BulkAccessControl} type
*/
public class BulkAccessControlScriptConfiguration<T extends BulkAccessControl> extends ScriptConfiguration<T> {
private Class<T> dspaceRunnableClass;
@Override
public boolean isAllowedToExecute(Context context, List<DSpaceCommandLineParameter> commandLineParameters) {
try {
if (Objects.isNull(commandLineParameters)) {
return authorizeService.isAdmin(context) || authorizeService.isComColAdmin(context)
|| authorizeService.isItemAdmin(context);
} else {
List<String> dspaceObjectIDs =
commandLineParameters.stream()
.filter(parameter -> "-u".equals(parameter.getName()))
.map(DSpaceCommandLineParameter::getValue)
.collect(Collectors.toList());
DSpaceObjectUtils dSpaceObjectUtils = new DSpace().getServiceManager().getServiceByName(
DSpaceObjectUtilsImpl.class.getName(), DSpaceObjectUtilsImpl.class);
for (String dspaceObjectID : dspaceObjectIDs) {
DSpaceObject dso = dSpaceObjectUtils.findDSpaceObject(context, UUID.fromString(dspaceObjectID));
if (Objects.isNull(dso)) {
throw new IllegalArgumentException();
}
if (!authorizeService.isAdmin(context, dso)) {
return false;
}
}
}
} catch (SQLException e) {
throw new RuntimeException(e);
}
return true;
}
@Override
public Options getOptions() {
if (options == null) {
Options options = new Options();
options.addOption("u", "uuid", true, "target uuids of communities/collections/items");
options.getOption("u").setType(String.class);
options.getOption("u").setRequired(true);
options.addOption("f", "file", true, "source json file");
options.getOption("f").setType(InputStream.class);
options.getOption("f").setRequired(true);
options.addOption("h", "help", false, "help");
super.options = options;
}
return options;
}
@Override
public Class<T> getDspaceRunnableClass() {
return dspaceRunnableClass;
}
/**
* Generic setter for the dspaceRunnableClass
*
* @param dspaceRunnableClass The dspaceRunnableClass to be set on this
* BulkImportScriptConfiguration
*/
@Override
public void setDspaceRunnableClass(Class<T> dspaceRunnableClass) {
this.dspaceRunnableClass = dspaceRunnableClass;
}
}

View File

@@ -1,48 +0,0 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.bulkaccesscontrol.exception;
/**
* Exception for errors that occurs during the bulk access control
*
* @author Mohamed Eskander (mohamed.eskander at 4science.it)
*
*/
public class BulkAccessControlException extends RuntimeException {
private static final long serialVersionUID = -74730626862418515L;
/**
* Constructor with error message and cause.
*
* @param message the error message
* @param cause the error cause
*/
public BulkAccessControlException(String message, Throwable cause) {
super(message, cause);
}
/**
* Constructor with error message.
*
* @param message the error message
*/
public BulkAccessControlException(String message) {
super(message);
}
/**
* Constructor with error cause.
*
* @param cause the error cause
*/
public BulkAccessControlException(Throwable cause) {
super(cause);
}
}

View File

@@ -1,59 +0,0 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.bulkaccesscontrol.model;
import java.time.LocalDate;
import com.fasterxml.jackson.databind.annotation.JsonDeserialize;
import org.dspace.app.bulkaccesscontrol.BulkAccessControl;
import org.dspace.util.MultiFormatDateDeserializer;
/**
* Class that model the values of an Access Condition as expressed in the {@link BulkAccessControl} input file
*
* @author Mohamed Eskander (mohamed.eskander at 4science.it)
*/
public class AccessCondition {
private String name;
private String description;
@JsonDeserialize(using = MultiFormatDateDeserializer.class)
private LocalDate startDate;
@JsonDeserialize(using = MultiFormatDateDeserializer.class)
private LocalDate endDate;
public AccessCondition() {
}
public AccessCondition(String name, String description, LocalDate startDate, LocalDate endDate) {
this.name = name;
this.description = description;
this.startDate = startDate;
this.endDate = endDate;
}
public String getName() {
return name;
}
public String getDescription() {
return description;
}
public LocalDate getStartDate() {
return startDate;
}
public LocalDate getEndDate() {
return endDate;
}
}

View File

@@ -1,69 +0,0 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.bulkaccesscontrol.model;
import java.util.ArrayList;
import java.util.List;
import org.dspace.app.bulkaccesscontrol.BulkAccessControl;
/**
* Class that model the value of bitstream node
* from json file of the {@link BulkAccessControl}
*
* @author Mohamed Eskander (mohamed.eskander at 4science.it)
*/
public class AccessConditionBitstream {
private String mode;
private Constraint constraints;
private List<AccessCondition> accessConditions;
public String getMode() {
return mode;
}
public void setMode(String mode) {
this.mode = mode;
}
public Constraint getConstraints() {
return constraints;
}
public void setConstraints(Constraint constraints) {
this.constraints = constraints;
}
public List<AccessCondition> getAccessConditions() {
if (accessConditions == null) {
return new ArrayList<>();
}
return accessConditions;
}
public void setAccessConditions(List<AccessCondition> accessConditions) {
this.accessConditions = accessConditions;
}
public class Constraint {
private List<String> uuid;
public List<String> getUuid() {
return uuid;
}
public void setUuid(List<String> uuid) {
this.uuid = uuid;
}
}
}

View File

@@ -1,45 +0,0 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.bulkaccesscontrol.model;
import java.util.ArrayList;
import java.util.List;
import org.dspace.app.bulkaccesscontrol.BulkAccessControl;
/**
* Class that model the value of item node
* from json file of the {@link BulkAccessControl}
*
* @author Mohamed Eskander (mohamed.eskander at 4science.it)
*/
public class AccessConditionItem {
String mode;
List<AccessCondition> accessConditions;
public String getMode() {
return mode;
}
public void setMode(String mode) {
this.mode = mode;
}
public List<AccessCondition> getAccessConditions() {
if (accessConditions == null) {
return new ArrayList<>();
}
return accessConditions;
}
public void setAccessConditions(List<AccessCondition> accessConditions) {
this.accessConditions = accessConditions;
}
}

View File

@@ -1,50 +0,0 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.bulkaccesscontrol.model;
import java.util.List;
import org.dspace.submit.model.AccessConditionOption;
/**
* A collection of conditions to be met when bulk access condition.
*
* @author Mohamed Eskander (mohamed.eskander at 4science.it)
*/
public class BulkAccessConditionConfiguration {
private String name;
private List<AccessConditionOption> itemAccessConditionOptions;
private List<AccessConditionOption> bitstreamAccessConditionOptions;
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public List<AccessConditionOption> getItemAccessConditionOptions() {
return itemAccessConditionOptions;
}
public void setItemAccessConditionOptions(
List<AccessConditionOption> itemAccessConditionOptions) {
this.itemAccessConditionOptions = itemAccessConditionOptions;
}
public List<AccessConditionOption> getBitstreamAccessConditionOptions() {
return bitstreamAccessConditionOptions;
}
public void setBitstreamAccessConditionOptions(
List<AccessConditionOption> bitstreamAccessConditionOptions) {
this.bitstreamAccessConditionOptions = bitstreamAccessConditionOptions;
}
}

View File

@@ -1,72 +0,0 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.bulkaccesscontrol.model;
import org.dspace.app.bulkaccesscontrol.BulkAccessControl;
/**
* Class that model the content of the JSON file used as input for the {@link BulkAccessControl}
*
* <code> <br/>
* { <br/>
* item: { <br/>
* mode: "replace", <br/>
* accessConditions: [ <br/>
* { <br/>
* "name": "openaccess" <br/>
* } <br/>
* ] <br/>
* }, <br/>
* bitstream: { <br/>
* constraints: { <br/>
* uuid: [bit-uuid1, bit-uuid2, ..., bit-uuidN], <br/>
* }, <br/>
* mode: "add", <br/>
* accessConditions: [ <br/>
* { <br/>
* "name": "embargo", <br/>
* "startDate": "2024-06-24T23:59:59.999+0000" <br/>
* } <br/>
* ] <br/>
* } <br/>
* }
* </code>
*
* @author Mohamed Eskander (mohamed.eskander at 4science.it)
*/
public class BulkAccessControlInput {
AccessConditionItem item;
AccessConditionBitstream bitstream;
public BulkAccessControlInput() {
}
public BulkAccessControlInput(AccessConditionItem item,
AccessConditionBitstream bitstream) {
this.item = item;
this.bitstream = bitstream;
}
public AccessConditionItem getItem() {
return item;
}
public void setItem(AccessConditionItem item) {
this.item = item;
}
public AccessConditionBitstream getBitstream() {
return bitstream;
}
public void setBitstream(AccessConditionBitstream bitstream) {
this.bitstream = bitstream;
}
}

View File

@@ -1,45 +0,0 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.bulkaccesscontrol.service;
import java.util.ArrayList;
import java.util.List;
import org.apache.commons.collections4.CollectionUtils;
import org.dspace.app.bulkaccesscontrol.model.BulkAccessConditionConfiguration;
import org.springframework.beans.factory.annotation.Autowired;
/**
* Simple bean to manage different Bulk Access Condition configurations
*
* @author Mohamed Eskander (mohamed.eskander at 4science.it)
*/
public class BulkAccessConditionConfigurationService {
@Autowired
private List<BulkAccessConditionConfiguration> bulkAccessConditionConfigurations;
public List<BulkAccessConditionConfiguration> getBulkAccessConditionConfigurations() {
if (CollectionUtils.isEmpty(bulkAccessConditionConfigurations)) {
return new ArrayList<>();
}
return bulkAccessConditionConfigurations;
}
public BulkAccessConditionConfiguration getBulkAccessConditionConfiguration(String name) {
return getBulkAccessConditionConfigurations().stream()
.filter(x -> name.equals(x.getName()))
.findFirst()
.orElse(null);
}
public void setBulkAccessConditionConfigurations(
List<BulkAccessConditionConfiguration> bulkAccessConditionConfigurations) {
this.bulkAccessConditionConfigurations = bulkAccessConditionConfigurations;
}
}

View File

@@ -1,406 +1,338 @@
/** /**
* The contents of this file are subject to the license and copyright * The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source * detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at * tree and available online at
* *
* http://www.dspace.org/license/ * http://www.dspace.org/license/
*/ */
package org.dspace.app.bulkedit; package org.dspace.app.bulkedit;
import java.util.ArrayList; import org.dspace.content.Item;
import java.util.List; import org.dspace.content.DCValue;
import org.dspace.content.Collection;
import org.dspace.content.Collection;
import org.dspace.content.Item; import java.util.ArrayList;
import java.util.List;
/**
* Utility class to store changes to item that may occur during a batch edit. /**
* * Utility class to store changes to item that may occur during a batch edit.
* @author Stuart Lewis *
*/ * @author Stuart Lewis
public class BulkEditChange { */
public class BulkEditChange
/** {
* The item these changes relate to /** The item these changes relate to */
*/ private Item item;
private Item item;
/** The List of hashtables with the new elements */
/** private List<DCValue> adds;
* The List of hashtables with the new elements
*/ /** The List of hashtables with the removed elements */
private List<BulkEditMetadataValue> adds; private List<DCValue> removes;
/** /** The List of hashtables with the unchanged elements */
* The List of hashtables with the removed elements private List<DCValue> constant;
*/
private List<BulkEditMetadataValue> removes; /** The List of the complete set of new values (constant + adds) */
private List<DCValue> complete;
/**
* The List of hashtables with the unchanged elements /** The list of old collections the item used to be mapped to */
*/ private List<Collection> oldMappedCollections;
private List<BulkEditMetadataValue> constant;
/** The list of new collections the item has been mapped into */
/** private List<Collection> newMappedCollections;
* The List of the complete set of new values (constant + adds)
*/ /** The old owning collection */
private List<BulkEditMetadataValue> complete; private Collection oldOwningCollection;
/** /** The new owning collection */
* The list of old collections the item used to be mapped to private Collection newOwningCollection;
*/
private List<Collection> oldMappedCollections; /** Is this a new item */
private boolean newItem;
/**
* The list of new collections the item has been mapped into /** Have any changes actually been made? */
*/ private boolean empty;
private List<Collection> newMappedCollections;
/** /**
* The old owning collection * Initialise a change holder for a new item
*/ */
private Collection oldOwningCollection; public BulkEditChange()
{
/** // Set the item to be null
* The new owning collection item = null;
*/ newItem = true;
private Collection newOwningCollection; empty = true;
oldOwningCollection = null;
/** newOwningCollection = null;
* Is this a new item
*/ // Initialise the arrays
private boolean newItem; adds = new ArrayList<DCValue>();
removes = new ArrayList<DCValue>();
/** constant = new ArrayList<DCValue>();
* Has this item been deleted? complete = new ArrayList<DCValue>();
*/ oldMappedCollections = new ArrayList<Collection>();
private boolean deleted; newMappedCollections = new ArrayList<Collection>();
}
/**
* Has this item been withdrawn? /**
*/ * Initialise a new change holder for an existing item
private boolean withdrawn; *
* @param i The Item to store
/** */
* Has this item been reinstated? public BulkEditChange(Item i)
*/ {
private boolean reinstated; // Store the item
item = i;
/** newItem = false;
* Have any changes actually been made? empty = true;
*/
private boolean empty; // Initialise the arrays
adds = new ArrayList<DCValue>();
removes = new ArrayList<DCValue>();
/** constant = new ArrayList<DCValue>();
* Initialise a change holder for a new item complete = new ArrayList<DCValue>();
*/ oldMappedCollections = new ArrayList<Collection>();
public BulkEditChange() { newMappedCollections = new ArrayList<Collection>();
// Set the item to be null }
item = null;
newItem = true; /**
empty = true; * Store the item - used when a new item is created
oldOwningCollection = null; *
newOwningCollection = null; * @param i The item
*/
// Initialise the arrays public void setItem(Item i)
adds = new ArrayList<>(); {
removes = new ArrayList<>(); // Store the item
constant = new ArrayList<>(); item = i;
complete = new ArrayList<>(); }
oldMappedCollections = new ArrayList<>();
newMappedCollections = new ArrayList<>(); /**
} * Add an added metadata value
*
/** * @param dcv The value to add
* Initialise a new change holder for an existing item */
* public void registerAdd(DCValue dcv)
* @param i The Item to store {
*/ // Add the added value
public BulkEditChange(Item i) { adds.add(dcv);
// Store the item complete.add(dcv);
item = i; empty = false;
newItem = false; }
empty = true;
/**
// Initialise the arrays * Add a removed metadata value
adds = new ArrayList<>(); *
removes = new ArrayList<>(); * @param dcv The value to remove
constant = new ArrayList<>(); */
complete = new ArrayList<>(); public void registerRemove(DCValue dcv)
oldMappedCollections = new ArrayList<>(); {
newMappedCollections = new ArrayList<>(); // Add the removed value
} removes.add(dcv);
empty = false;
/** }
* Store the item - used when a new item is created
* /**
* @param i The item * Add an unchanged metadata value
*/ *
public void setItem(Item i) { * @param dcv The value to keep unchanged
// Store the item */
item = i; public void registerConstant(DCValue dcv)
} {
// Add the removed value
/** constant.add(dcv);
* Add an added metadata value complete.add(dcv);
* }
* @param dcv The value to add
*/ /**
public void registerAdd(BulkEditMetadataValue dcv) { * Add a new mapped Collection
// Add the added value *
adds.add(dcv); * @param c The new mapped Collection
complete.add(dcv); */
empty = false; public void registerNewMappedCollection(Collection c)
} {
// Add the new owning Collection
/** newMappedCollections.add(c);
* Add a removed metadata value empty = false;
* }
* @param dcv The value to remove
*/ /**
public void registerRemove(BulkEditMetadataValue dcv) { * Add an old mapped Collection
// Add the removed value *
removes.add(dcv); * @param c The old mapped Collection
empty = false; */
} public void registerOldMappedCollection(Collection c)
{
/** // Add the old owning Collection (if it isn't there already, or is an old collection)
* Add an unchanged metadata value boolean found = false;
*
* @param dcv The value to keep unchanged if ((this.getOldOwningCollection() != null) &&
*/ (this.getOldOwningCollection().getHandle().equals(c.getHandle())))
public void registerConstant(BulkEditMetadataValue dcv) { {
// Add the removed value found = true;
constant.add(dcv); }
complete.add(dcv);
} for (Collection collection : oldMappedCollections)
{
/** if (collection.getHandle().equals(c.getHandle()))
* Add a new mapped Collection {
* found = true;
* @param c The new mapped Collection }
*/ }
public void registerNewMappedCollection(Collection c) {
// Add the new owning Collection if (!found)
newMappedCollections.add(c); {
empty = false; oldMappedCollections.add(c);
} empty = false;
}
/** }
* Add an old mapped Collection
* /**
* @param c The old mapped Collection * Register a change to the owning collection
*/ *
public void registerOldMappedCollection(Collection c) { * @param oldC The old owning collection
// Add the old owning Collection (if it isn't there already, or is an old collection) * @param newC The new owning collection
boolean found = false; */
public void changeOwningCollection(Collection oldC, Collection newC)
if ((this.getOldOwningCollection() != null) && {
(this.getOldOwningCollection().getHandle().equals(c.getHandle()))) { // Store the old owning collection
found = true; oldOwningCollection = oldC;
}
// Store the new owning collection
for (Collection collection : oldMappedCollections) { newOwningCollection = newC;
if (collection.getHandle().equals(c.getHandle())) { empty = false;
found = true; }
}
} /**
* Set the owning collection of an item
if (!found) { *
oldMappedCollections.add(c); * @param newC The new owning collection
empty = false; */
} public void setOwningCollection(Collection newC)
} {
// Store the new owning collection
/** newOwningCollection = newC;
* Register a change to the owning collection //empty = false;
* }
* @param oldC The old owning collection
* @param newC The new owning collection /**
*/ * Get the DSpace Item that these changes are applicable to.
public void changeOwningCollection(Collection oldC, Collection newC) { *
// Store the old owning collection * @return The item
oldOwningCollection = oldC; */
public Item getItem()
// Store the new owning collection {
newOwningCollection = newC; // Return the item
empty = false; return item;
} }
/** /**
* Set the owning collection of an item * Get the list of elements and their values that have been added.
* *
* @param newC The new owning collection * @return the list of elements and their values that have been added.
*/ */
public void setOwningCollection(Collection newC) { public List<DCValue> getAdds()
// Store the new owning collection {
newOwningCollection = newC; // Return the array
//empty = false; return adds;
} }
/** /**
* Get the DSpace Item that these changes are applicable to. * Get the list of elements and their values that have been removed.
* *
* @return The item * @return the list of elements and their values that have been removed.
*/ */
public Item getItem() { public List<DCValue> getRemoves()
// Return the item {
return item; // Return the array
} return removes;
}
/**
* Get the list of elements and their values that have been added. /**
* * Get the list of unchanged values
* @return the list of elements and their values that have been added. *
*/ * @return the list of unchanged values
public List<BulkEditMetadataValue> getAdds() { */
// Return the array public List<DCValue> getConstant()
return adds; {
} // Return the array
return constant;
/** }
* Get the list of elements and their values that have been removed.
* /**
* @return the list of elements and their values that have been removed. * Get the list of all values
*/ *
public List<BulkEditMetadataValue> getRemoves() { * @return the list of all values
// Return the array */
return removes; public List<DCValue> getComplete()
} {
// Return the array
/** return complete;
* Get the list of unchanged values }
*
* @return the list of unchanged values /**
*/ * Get the list of new mapped Collections
public List<BulkEditMetadataValue> getConstant() { *
// Return the array * @return the list of new mapped collections
return constant; */
} public List<Collection> getNewMappedCollections()
{
/** // Return the array
* Get the list of all values return newMappedCollections;
* }
* @return the list of all values
*/ /**
public List<BulkEditMetadataValue> getComplete() { * Get the list of old mapped Collections
// Return the array *
return complete; * @return the list of old mapped collections
} */
public List<Collection> getOldMappedCollections()
/** {
* Get the list of new mapped Collections // Return the array
* return oldMappedCollections;
* @return the list of new mapped collections }
*/
public List<Collection> getNewMappedCollections() { /**
// Return the array * Get the old owning collection
return newMappedCollections; *
} * @return the old owning collection
*/
/** public Collection getOldOwningCollection()
* Get the list of old mapped Collections {
* // Return the old owning collection
* @return the list of old mapped collections return oldOwningCollection;
*/ }
public List<Collection> getOldMappedCollections() {
// Return the array /**
return oldMappedCollections; * Get the new owning collection
} *
* @return the new owning collection
/** */
* Get the old owning collection public Collection getNewOwningCollection()
* {
* @return the old owning collection // Return the new owning collection
*/ return newOwningCollection;
public Collection getOldOwningCollection() { }
// Return the old owning collection
return oldOwningCollection; /**
} * Does this change object represent a new item?
*
/** * @return Whether or not this is for a new item
* Get the new owning collection */
* public boolean isNewItem()
* @return the new owning collection {
*/ // Return the new item status
public Collection getNewOwningCollection() { return newItem;
// Return the new owning collection }
return newOwningCollection;
} /**
* Have any changes actually been recorded, or is this empty?
/** *
* Does this change object represent a new item? * @return Whether or not changes have been made
* */
* @return Whether or not this is for a new item public boolean hasChanges()
*/ {
public boolean isNewItem() { return !empty;
// Return the new item status }
return newItem; }
}
/**
* Does this change object represent a deleted item?
*
* @return Whether or not this is for a deleted item
*/
public boolean isDeleted() {
// Return the new item status
return deleted;
}
/**
* Set that this item has been deleted
*/
public void setDeleted() {
// Store the setting
deleted = true;
empty = false;
}
/**
* Does this change object represent a withdrawn item?
*
* @return Whether or not this is for a withdrawn item
*/
public boolean isWithdrawn() {
// Return the new item status
return withdrawn;
}
/**
* Set that this item has been withdrawn
*/
public void setWithdrawn() {
// Store the setting
withdrawn = true;
empty = false;
}
/**
* Does this change object represent a reinstated item?
*
* @return Whether or not this is for a reinstated item
*/
public boolean isReinstated() {
// Return the new item status
return reinstated;
}
/**
* Set that this item has been deleted
*/
public void setReinstated() {
// Store the setting
reinstated = true;
empty = false;
}
/**
* Have any changes actually been recorded, or is this empty?
*
* @return Whether or not changes have been made
*/
public boolean hasChanges() {
return !empty;
}
}

View File

@@ -1,83 +0,0 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.bulkedit;
/**
* Value class used for metadata value edits used by the bulk edit.
*
* @author kevinvandevelde at atmire.com
*/
public class BulkEditMetadataValue {
private String schema;
private String element;
private String qualifier;
private String language;
private String value;
private String authority;
private int confidence;
public BulkEditMetadataValue() {
}
public void setSchema(String schema) {
this.schema = schema;
}
public void setElement(String element) {
this.element = element;
}
public void setQualifier(String qualifier) {
this.qualifier = qualifier;
}
public void setLanguage(String language) {
this.language = language;
}
public void setValue(String value) {
this.value = value;
}
public void setAuthority(String authority) {
this.authority = authority;
}
public void setConfidence(int confidence) {
this.confidence = confidence;
}
public String getSchema() {
return schema;
}
public String getElement() {
return element;
}
public String getQualifier() {
return qualifier;
}
public String getLanguage() {
return language;
}
public String getValue() {
return value;
}
public String getAuthority() {
return authority;
}
public int getConfidence() {
return confidence;
}
}

File diff suppressed because it is too large Load Diff

View File

@@ -1,213 +1,176 @@
/** /**
* The contents of this file are subject to the license and copyright * The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source * detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at * tree and available online at
* *
* http://www.dspace.org/license/ * http://www.dspace.org/license/
*/ */
package org.dspace.app.bulkedit; package org.dspace.app.bulkedit;
import java.io.Serializable; import java.util.ArrayList;
import java.util.ArrayList; import java.util.HashMap;
import java.util.Comparator; import java.util.List;
import java.util.List; import java.util.Map;
import java.util.Map; import java.util.Set;
import java.util.Set;
import java.util.TreeMap; /**
import java.util.UUID; * Utility class to store a line from a CSV file
*
import org.dspace.authority.AuthorityValue; * @author Stuart Lewis
import org.dspace.authority.factory.AuthorityServiceFactory; */
import org.dspace.authority.service.AuthorityValueService; public class DSpaceCSVLine
{
/** /** The item id of the item represented by this line. -1 is for a new item */
* Utility class to store a line from a CSV file private int id;
*
* @author Stuart Lewis /** The elements in this line in a hashtable, keyed by the metadata type */
*/ private Map<String, ArrayList> items;
public class DSpaceCSVLine implements Serializable {
/** /**
* The item id of the item represented by this line. -1 is for a new item * Create a new CSV line
*/ *
private final UUID id; * @param id The item ID of the line
*/
/** public DSpaceCSVLine(int itemId)
* The elements in this line in a hashtable, keyed by the metadata type {
*/ // Store the ID + separator, and initialise the hashtable
private final Map<String, ArrayList> items; this.id = itemId;
items = new HashMap<String, ArrayList>();
protected transient final AuthorityValueService authorityValueService }
= AuthorityServiceFactory.getInstance().getAuthorityValueService();
/**
/** * Create a new CSV line for a new item
* ensuring that the order-sensible columns of the csv are processed in the correct order */
*/ public DSpaceCSVLine()
private transient final Comparator<? super String> headerComparator = new Comparator<String>() { {
@Override // Set the ID to be -1, and initialise the hashtable
public int compare(String md1, String md2) { this.id = -1;
// The metadata coming from an external source should be processed after the others this.items = new HashMap<String, ArrayList>();
AuthorityValue source1 = authorityValueService.getAuthorityValueType(md1); }
AuthorityValue source2 = authorityValueService.getAuthorityValueType(md2);
/**
int compare; * Get the item ID that this line represents
if (source1 == null && source2 != null) { *
compare = -1; * @return The item ID
} else if (source1 != null && source2 == null) { */
compare = 1; public int getID()
} else { {
// the order of the rest does not matter // Return the ID
compare = md1.compareTo(md2); return id;
} }
return compare;
} /**
}; * Add a new metadata value to this line
*
/** * @param key The metadata key (e.g. dc.contributor.author)
* Create a new CSV line * @param value The metadata value
* */
* @param itemId The item ID of the line public void add(String key, String value)
*/ {
public DSpaceCSVLine(UUID itemId) { // Create the array list if we need to
// Store the ID + separator, and initialise the hashtable if (items.get(key) == null)
this.id = itemId; {
items = new TreeMap<>(headerComparator); items.put(key, new ArrayList<String>());
// this.items = new HashMap<String, ArrayList>(); }
}
// Store the item if it is not null
/** if (value != null)
* Create a new CSV line for a new item {
*/ items.get(key).add(value);
public DSpaceCSVLine() { }
// Set the ID to be null, and initialise the hashtable }
this.id = null;
this.items = new TreeMap<>(headerComparator); /**
} * Get all the values that match the given metadata key. Will be null if none exist.
*
/** * @param key The metadata key
* Get the item ID that this line represents * @return All the elements that match
* */
* @return The item ID public List<String> get(String key)
*/ {
public UUID getID() { // Return any relevant values
// Return the ID return items.get(key);
return id; }
}
/**
/** * Get all the metadata keys that are represented in this line
* Add a new metadata value to this line *
* * @return An enumeration of all the keys
* @param key The metadata key (e.g. dc.contributor.author) */
* @param value The metadata value public Set<String> keys()
*/ {
public void add(String key, String value) { // Return the keys
// Create the array list if we need to return items.keySet();
if (items.get(key) == null) { }
items.put(key, new ArrayList<String>());
} /**
* Write this line out as a CSV formatted string, in the order given by the headings provided
// Store the item if it is not null *
if (value != null) { * @param headings The headings which define the order the elements must be presented in
items.get(key).add(value); * @return The CSV formatted String
} */
} protected String toCSV(List<String> headings)
{
/** StringBuilder bits = new StringBuilder();
* Get all the values that match the given metadata key. Will be null if none exist.
* // Add the id
* @param key The metadata key bits.append("\"").append(id).append("\"").append(DSpaceCSV.fieldSeparator);
* @return All the elements that match bits.append(valueToCSV(items.get("collection")));
*/
public List<String> get(String key) { // Add the rest of the elements
// Return any relevant values for (String heading : headings)
return items.get(key); {
} bits.append(DSpaceCSV.fieldSeparator);
List<String> values = items.get(heading);
/** if (values != null && !"collection".equals(heading))
* Get any action associated with this line {
* bits.append(valueToCSV(values));
* @return The action (may be blank, 'withdraw', 'reinstate' or 'delete') }
*/ }
public String getAction() {
if (items.containsKey("action")) { return bits.toString();
ArrayList actions = items.get("action"); }
if (actions.size() > 0) {
return ((String) actions.get(0)).trim(); /**
} * Internal method to create a CSV formatted String joining a given set of elements
} *
return ""; * @param values The values to create the string from
} * @return The line as a CSV formatted String
*/
/** protected String valueToCSV(List<String> values)
* Get all the metadata keys that are represented in this line {
* // Check there is some content
* @return An enumeration of all the keys if (values == null)
*/ {
public Set<String> keys() { return "";
// Return the keys }
return items.keySet();
} // Get on with the work
String s;
/** if (values.size() == 1)
* Write this line out as a CSV formatted string, in the order given by the headings provided {
* s = values.get(0);
* @param headings The headings which define the order the elements must be presented in }
* @param fieldSeparator separator between metadata fields else
* @param valueSeparator separator between metadata values (within a field) {
* @return The CSV formatted String // Concatenate any fields together
*/ StringBuilder str = new StringBuilder();
protected String toCSV(List<String> headings, String fieldSeparator, String valueSeparator) {
StringBuilder bits = new StringBuilder(); for (String value : values)
{
// Add the id if (str.length() > 0)
bits.append("\"").append(id).append("\"").append(fieldSeparator); {
bits.append(valueToCSV(items.get("collection"), valueSeparator)); str.append(DSpaceCSV.valueSeparator);
}
// Add the rest of the elements
for (String heading : headings) { str.append(value);
bits.append(fieldSeparator); }
List<String> values = items.get(heading);
if (values != null && !"collection".equals(heading)) { s = str.toString();
bits.append(valueToCSV(values, valueSeparator)); }
}
} // Replace internal quotes with two sets of quotes
return "\"" + s.replaceAll("\"", "\"\"") + "\"";
return bits.toString(); }
} }
/**
* Internal method to create a CSV formatted String joining a given set of elements
*
* @param values The values to create the string from
* @param valueSeparator value separator
* @return The line as a CSV formatted String
*/
protected String valueToCSV(List<String> values, String valueSeparator) {
// Check there is some content
if (values == null) {
return "";
}
// Get on with the work
String s;
if (values.size() == 1) {
s = values.get(0);
} else {
// Concatenate any fields together
StringBuilder str = new StringBuilder();
for (String value : values) {
if (str.length() > 0) {
str.append(valueSeparator);
}
str.append(value);
}
s = str.toString();
}
// Replace internal quotes with two sets of quotes
return "\"" + s.replaceAll("\"", "\"\"") + "\"";
}
}

View File

@@ -1,115 +0,0 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.bulkedit;
import java.sql.SQLException;
import org.apache.commons.cli.ParseException;
import org.apache.commons.lang3.ArrayUtils;
import org.dspace.content.MetadataField;
import org.dspace.content.factory.ContentServiceFactory;
import org.dspace.content.service.MetadataFieldService;
import org.dspace.content.service.MetadataValueService;
import org.dspace.core.Context;
import org.dspace.scripts.DSpaceRunnable;
import org.dspace.services.ConfigurationService;
import org.dspace.services.factory.DSpaceServicesFactory;
import org.dspace.utils.DSpace;
/**
* {@link DSpaceRunnable} implementation to delete all the values of the given
* metadata field.
*
* @author Luca Giamminonni (luca.giamminonni at 4science.it)
*
*/
public class MetadataDeletion extends DSpaceRunnable<MetadataDeletionScriptConfiguration<MetadataDeletion>> {
private MetadataValueService metadataValueService;
private MetadataFieldService metadataFieldService;
private ConfigurationService configurationService;
private String metadataField;
private boolean list;
@Override
public void internalRun() throws Exception {
if (list) {
listErasableMetadata();
return;
}
Context context = new Context();
try {
context.turnOffAuthorisationSystem();
performMetadataValuesDeletion(context);
} finally {
context.restoreAuthSystemState();
context.complete();
}
}
private void listErasableMetadata() {
String[] erasableMetadata = getErasableMetadata();
if (ArrayUtils.isEmpty(erasableMetadata)) {
handler.logInfo("No fields has been configured to be cleared via bulk deletion");
} else {
handler.logInfo("The fields that can be bulk deleted are: " + String.join(", ", erasableMetadata));
}
}
private void performMetadataValuesDeletion(Context context) throws SQLException {
MetadataField field = metadataFieldService.findByString(context, metadataField, '.');
if (field == null) {
throw new IllegalArgumentException("No metadata field found with name " + metadataField);
}
if (!ArrayUtils.contains(getErasableMetadata(), metadataField)) {
throw new IllegalArgumentException("The given metadata field cannot be bulk deleted");
}
handler.logInfo(String.format("Deleting the field '%s' from all objects", metadataField));
metadataValueService.deleteByMetadataField(context, field);
}
private String[] getErasableMetadata() {
return configurationService.getArrayProperty("bulkedit.allow-bulk-deletion");
}
@Override
@SuppressWarnings("unchecked")
public MetadataDeletionScriptConfiguration<MetadataDeletion> getScriptConfiguration() {
return new DSpace().getServiceManager()
.getServiceByName("metadata-deletion", MetadataDeletionScriptConfiguration.class);
}
@Override
public void setup() throws ParseException {
metadataValueService = ContentServiceFactory.getInstance().getMetadataValueService();
metadataFieldService = ContentServiceFactory.getInstance().getMetadataFieldService();
configurationService = DSpaceServicesFactory.getInstance().getConfigurationService();
metadataField = commandLine.getOptionValue('m');
list = commandLine.hasOption('l');
if (!list && metadataField == null) {
throw new ParseException("One of the following parameters is required: -m or -l");
}
}
}

View File

@@ -1,18 +0,0 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.bulkedit;
/**
* The {@link MetadataDeletion} for CLI.
*
* @author Luca Giamminonni (luca.giamminonni at 4science.it)
*
*/
public class MetadataDeletionCli extends MetadataDeletion {
}

View File

@@ -1,18 +0,0 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.bulkedit;
/**
* Script configuration for {@link MetadataDeletionCli}.
*
* @author Luca Giamminonni (luca.giamminonni at 4science.it)
*
*/
public class MetadataDeletionCliScriptConfiguration extends MetadataDeletionScriptConfiguration<MetadataDeletionCli> {
}

View File

@@ -1,49 +0,0 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.bulkedit;
import org.apache.commons.cli.Options;
import org.dspace.scripts.configuration.ScriptConfiguration;
/**
* The {@link ScriptConfiguration} for the {@link MetadataDeletion} script.
*/
public class MetadataDeletionScriptConfiguration<T extends MetadataDeletion> extends ScriptConfiguration<T> {
private Class<T> dspaceRunnableClass;
@Override
public Options getOptions() {
if (options == null) {
Options options = new Options();
options.addOption("m", "metadata", true, "metadata field name");
options.addOption("l", "list", false, "lists the metadata fields that can be deleted");
super.options = options;
}
return options;
}
@Override
public Class<T> getDspaceRunnableClass() {
return dspaceRunnableClass;
}
/**
* Generic setter for the dspaceRunnableClass
* @param dspaceRunnableClass The dspaceRunnableClass to be set on this MetadataDeletionScriptConfiguration
*/
@Override
public void setDspaceRunnableClass(Class<T> dspaceRunnableClass) {
this.dspaceRunnableClass = dspaceRunnableClass;
}
}

View File

@@ -7,116 +7,260 @@
*/ */
package org.dspace.app.bulkedit; package org.dspace.app.bulkedit;
import java.sql.SQLException; import org.apache.commons.cli.*;
import java.util.UUID;
import org.apache.commons.cli.ParseException; import org.dspace.content.*;
import org.apache.commons.lang3.StringUtils; import org.dspace.core.Constants;
import org.dspace.app.util.factory.UtilServiceFactory;
import org.dspace.app.util.service.DSpaceObjectUtils;
import org.dspace.content.DSpaceObject;
import org.dspace.content.factory.ContentServiceFactory;
import org.dspace.content.service.MetadataDSpaceCsvExportService;
import org.dspace.core.Context; import org.dspace.core.Context;
import org.dspace.eperson.factory.EPersonServiceFactory; import org.dspace.handle.HandleManager;
import org.dspace.eperson.service.EPersonService;
import org.dspace.handle.factory.HandleServiceFactory; import java.util.ArrayList;
import org.dspace.scripts.DSpaceRunnable; import java.sql.SQLException;
import org.dspace.utils.DSpace; import java.util.List;
/** /**
* Metadata exporter to allow the batch export of metadata into a file * Metadata exporter to allow the batch export of metadata into a file
* *
* @author Stuart Lewis * @author Stuart Lewis
*/ */
public class MetadataExport extends DSpaceRunnable<MetadataExportScriptConfiguration> { public class MetadataExport
{
/** The items to export */
private ItemIterator toExport;
private boolean help = false; /** Whether to export all metadata, or just normally edited metadata */
private String filename = null; private boolean exportAll;
private String identifier = null;
private boolean exportAllMetadata = false;
private boolean exportAllItems = false;
private static final String EXPORT_CSV = "exportCSV"; /**
* Set up a new metadata export
private MetadataDSpaceCsvExportService metadataDSpaceCsvExportService = new DSpace().getServiceManager() *
.getServicesByType(MetadataDSpaceCsvExportService.class).get(0); * @param c The Context
* @param toExport The ItemIterator of items to export
private EPersonService ePersonService = EPersonServiceFactory.getInstance().getEPersonService(); * @param exportAll whether to export all metadata or not (include handle, provenance etc)
*/
private DSpaceObjectUtils dSpaceObjectUtils = UtilServiceFactory.getInstance().getDSpaceObjectUtils(); public MetadataExport(Context c, ItemIterator toExport, boolean exportAll)
{
@Override // Store the export settings
public void internalRun() throws Exception { this.toExport = toExport;
this.exportAll = exportAll;
if (help) {
logHelpInfo();
printHelp();
return;
}
Context context = new Context();
context.turnOffAuthorisationSystem();
try {
context.setCurrentUser(ePersonService.find(context, this.getEpersonIdentifier()));
} catch (SQLException e) {
handler.handleException(e);
}
DSpaceCSV dSpaceCSV = metadataDSpaceCsvExportService
.handleExport(context, exportAllItems, exportAllMetadata, identifier,
handler);
handler.writeFilestream(context, filename, dSpaceCSV.getInputStream(), EXPORT_CSV);
context.restoreAuthSystemState();
context.complete();
} }
protected void logHelpInfo() { /**
handler.logInfo("\nfull export: metadata-export"); * Method to export a community (and sub-communities and collections)
handler.logInfo("partial export: metadata-export -i handle/UUID"); *
} * @param c The Context
* @param toExport The Community to export
@Override * @param exportAll whether to export all metadata or not (include handle, provenance etc)
public MetadataExportScriptConfiguration getScriptConfiguration() { */
return new DSpace().getServiceManager().getServiceByName("metadata-export", public MetadataExport(Context c, Community toExport, boolean exportAll)
MetadataExportScriptConfiguration.class); {
} try
{
@Override // Try to export the community
public void setup() throws ParseException { this.toExport = new ItemIterator(c, buildFromCommunity(toExport, new ArrayList<Integer>(), 0));
this.exportAll = exportAll;
if (commandLine.hasOption('h')) {
help = true;
return;
} }
catch (SQLException sqle)
if (!commandLine.hasOption('i')) { {
exportAllItems = true; // Something went wrong...
System.err.println("Error running exporter:");
sqle.printStackTrace(System.err);
System.exit(1);
} }
identifier = commandLine.getOptionValue('i');
filename = getFileNameForExportFile();
exportAllMetadata = commandLine.hasOption('a');
} }
protected String getFileNameForExportFile() throws ParseException { /**
Context context = new Context(); * Build an array list of item ids that are in a community (include sub-communities and collections)
try { *
DSpaceObject dso = null; * @param community The community to build from
if (StringUtils.isNotBlank(identifier)) { * @param itemIDs The itemID (used for recursion - use an empty ArrayList)
dso = HandleServiceFactory.getInstance().getHandleService().resolveToObject(context, identifier); * @param indent How many spaces to use when writing out the names of items added
if (dso == null) { * @return The list of item ids
dso = dSpaceObjectUtils.findDSpaceObject(context, UUID.fromString(identifier)); * @throws SQLException
*/
private List<Integer> buildFromCommunity(Community community, List<Integer> itemIDs, int indent)
throws SQLException
{
// Add all the collections
Collection[] collections = community.getCollections();
for (Collection collection : collections)
{
for (int i = 0; i < indent; i++)
{
System.out.print(" ");
}
ItemIterator items = collection.getAllItems();
while (items.hasNext())
{
int id = items.next().getID();
// Only add if not already included (so mapped items only appear once)
if (!itemIDs.contains(id))
{
itemIDs.add(id);
} }
} else {
dso = ContentServiceFactory.getInstance().getSiteService().findSite(context);
} }
if (dso == null) {
throw new ParseException("An identifier was given that wasn't able to be parsed to a DSpaceObject");
}
return dso.getID().toString() + ".csv";
} catch (SQLException e) {
handler.handleException("Something went wrong trying to retrieve DSO for identifier: " + identifier, e);
} }
return null;
// Add all the sub-communities
Community[] communities = community.getSubcommunities();
for (Community subCommunity : communities)
{
for (int i = 0; i < indent; i++)
{
System.out.print(" ");
}
buildFromCommunity(subCommunity, itemIDs, indent + 1);
}
return itemIDs;
} }
}
/**
* Run the export
*
* @return the exported CSV lines
*/
public DSpaceCSV export()
{
try
{
// Process each item
DSpaceCSV csv = new DSpaceCSV(exportAll);
while (toExport.hasNext())
{
csv.addItem(toExport.next());
}
// Return the results
return csv;
}
catch (Exception e)
{
return null;
}
}
/**
* Print the help message
*
* @param options The command line options the user gave
* @param exitCode the system exit code to use
*/
private static void printHelp(Options options, int exitCode)
{
// print the help message
HelpFormatter myhelp = new HelpFormatter();
myhelp.printHelp("MetadataExport\n", options);
System.out.println("\nfull export: metadataexport -f filename");
System.out.println("partial export: metadataexport -i handle -f filename");
System.exit(exitCode);
}
/**
* main method to run the metadata exporter
*
* @param argv the command line arguments given
*/
public static void main(String[] argv) throws Exception
{
// Create an options object and populate it
CommandLineParser parser = new PosixParser();
Options options = new Options();
options.addOption("i", "id", true, "ID or handle of thing to export (item, collection, or community)");
options.addOption("f", "file", true, "destination where you want file written");
options.addOption("a", "all", false, "include all metadata fields that are not normally changed (e.g. provenance)");
options.addOption("h", "help", false, "help");
CommandLine line = null;
try
{
line = parser.parse(options, argv);
}
catch (ParseException pe)
{
System.err.println("Error with commands.");
printHelp(options, 1);
System.exit(0);
}
if (line.hasOption('h'))
{
printHelp(options, 0);
}
// Check a filename is given
if (!line.hasOption('f'))
{
System.err.println("Required parameter -f missing!");
printHelp(options, 1);
}
String filename = line.getOptionValue('f');
// Create a context
Context c = new Context();
c.turnOffAuthorisationSystem();
// The things we'll export
ItemIterator toExport = null;
MetadataExport exporter = null;
// Export everything?
boolean exportAll = line.hasOption('a');
// Check we have an item OK
if (!line.hasOption('i'))
{
System.out.println("Exporting whole repository WARNING: May take some time!");
exporter = new MetadataExport(c, Item.findAll(c), exportAll);
}
else
{
String handle = line.getOptionValue('i');
DSpaceObject dso = HandleManager.resolveToObject(c, handle);
if (dso == null)
{
System.err.println("Item '" + handle + "' does not resolve to an item in your repository!");
printHelp(options, 1);
}
if (dso.getType() == Constants.ITEM)
{
System.out.println("Exporting item '" + dso.getName() + "' (" + handle + ")");
List<Integer> item = new ArrayList<Integer>();
item.add(dso.getID());
exporter = new MetadataExport(c, new ItemIterator(c, item), exportAll);
}
else if (dso.getType() == Constants.COLLECTION)
{
System.out.println("Exporting collection '" + dso.getName() + "' (" + handle + ")");
Collection collection = (Collection)dso;
toExport = collection.getAllItems();
exporter = new MetadataExport(c, toExport, exportAll);
}
else if (dso.getType() == Constants.COMMUNITY)
{
System.out.println("Exporting community '" + dso.getName() + "' (" + handle + ")");
exporter = new MetadataExport(c, (Community)dso, exportAll);
}
else
{
System.err.println("Error identifying '" + handle + "'");
System.exit(1);
}
}
// Perform the export
DSpaceCSV csv = exporter.export();
// Save the files to the file
csv.save(filename);
// Finish off and tidy up
c.restoreAuthSystemState();
c.complete();
}
}

View File

@@ -1,33 +0,0 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.bulkedit;
import org.apache.commons.cli.ParseException;
public class MetadataExportCli extends MetadataExport {
@Override
protected String getFileNameForExportFile() {
return commandLine.getOptionValue('f');
}
@Override
public void setup() throws ParseException {
super.setup();
// Check a filename is given
if (!commandLine.hasOption('f')) {
throw new ParseException("Required parameter -f missing!");
}
}
@Override
protected void logHelpInfo() {
handler.logInfo("\nfull export: metadata-export -f filename");
handler.logInfo("partial export: metadata-export -i handle -f filename");
}
}

View File

@@ -1,26 +0,0 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.bulkedit;
import java.io.OutputStream;
import org.apache.commons.cli.Options;
public class MetadataExportCliScriptConfiguration extends MetadataExportScriptConfiguration<MetadataExportCli> {
@Override
public Options getOptions() {
Options options = super.getOptions();
options.addOption("f", "file", true, "destination where you want file written");
options.getOption("f").setType(OutputStream .class);
options.getOption("f").setRequired(true);
super.options = options;
return options;
}
}

View File

@@ -1,182 +0,0 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.bulkedit;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.EnumSet;
import java.util.List;
import java.util.Optional;
import java.util.Set;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import org.apache.commons.cli.ParseException;
import org.apache.commons.lang3.StringUtils;
import org.dspace.content.MetadataDSpaceCsvExportServiceImpl;
import org.dspace.content.MetadataField;
import org.dspace.content.factory.ContentReportServiceFactory;
import org.dspace.content.service.MetadataDSpaceCsvExportService;
import org.dspace.contentreport.Filter;
import org.dspace.contentreport.FilteredItems;
import org.dspace.contentreport.FilteredItemsQuery;
import org.dspace.contentreport.QueryOperator;
import org.dspace.contentreport.QueryPredicate;
import org.dspace.contentreport.service.ContentReportService;
import org.dspace.core.Context;
import org.dspace.eperson.factory.EPersonServiceFactory;
import org.dspace.eperson.service.EPersonService;
import org.dspace.kernel.ServiceManager;
import org.dspace.scripts.DSpaceRunnable;
import org.dspace.services.ConfigurationService;
import org.dspace.utils.DSpace;
/**
* Metadata exporter to allow the batch export of metadata from a Filtered Items content report execution into a file
*
* @author Jean-François Morin (Université Laval)
*/
public class MetadataExportFilteredItemsReport extends DSpaceRunnable
<MetadataExportFilteredItemsReportScriptConfiguration<MetadataExportFilteredItemsReport>> {
private static final String EXPORT_CSV = "exportCSV";
public static final String DEFAULT_FILENAME = "metadataExportFilteredItems.csv";
private boolean help = false;
private String[] collectionUuids;
private String[] queryPredicates;
private String[] queryFilters;
private ConfigurationService configurationService;
private ContentReportService contentReportService;
private EPersonService ePersonService;
private MetadataDSpaceCsvExportService metadataDSpaceCsvExportService;
@SuppressWarnings("unchecked")
@Override
public MetadataExportFilteredItemsReportScriptConfiguration<MetadataExportFilteredItemsReport>
getScriptConfiguration() {
return new DSpace().getServiceManager()
.getServiceByName("metadata-export-filtered-items-report",
MetadataExportFilteredItemsReportScriptConfiguration.class);
}
@Override
public void setup() throws ParseException {
ServiceManager serviceManager = new DSpace().getServiceManager();
configurationService = serviceManager.getServicesByType(ConfigurationService.class).get(0);
contentReportService = ContentReportServiceFactory.getInstance().getContentReportService();
ePersonService = EPersonServiceFactory.getInstance().getEPersonService();
metadataDSpaceCsvExportService = serviceManager.getServiceByName(
MetadataDSpaceCsvExportServiceImpl.class.getCanonicalName(),
MetadataDSpaceCsvExportService.class);
if (commandLine.hasOption('h')) {
help = true;
return;
}
if (commandLine.hasOption('c')) {
collectionUuids = commandLine.getOptionValues('c');
}
if (commandLine.hasOption("qp")) {
queryPredicates = commandLine.getOptionValues("qp");
}
if (commandLine.hasOption('f')) {
queryFilters = commandLine.getOptionValues('f');
}
}
@Override
public void internalRun() throws Exception {
if (help) {
loghelpinfo();
printHelp();
return;
}
handler.logDebug("starting content report export");
Context context = new Context();
context.setCurrentUser(ePersonService.find(context, getEpersonIdentifier()));
List<String> collUuids = List.of();
if (collectionUuids != null) {
// Using a temporary Set to eliminate duplicates, if any
Set<String> setUuids = arrayToStream(collectionUuids)
.map(uuids -> uuids.split("[^0-9A-Fa-f\\-]+"))
.flatMap(Arrays::stream)
.filter(StringUtils::isNotBlank)
.collect(Collectors.toSet());
collUuids = new ArrayList<>(setUuids);
}
List<QueryPredicate> predicates = List.of();
if (queryPredicates != null) {
predicates = arrayToStream(queryPredicates)
.filter(StringUtils::isNotBlank)
.map(pred -> buildPredicate(context, pred))
.collect(Collectors.toList());
}
Set<Filter> filters = EnumSet.noneOf(Filter.class);
if (queryFilters != null) {
Arrays.stream(queryFilters)
.map(Filter::getFilters)
.flatMap(Set::stream)
.filter(f -> f != null)
.forEach(filters::add);
}
handler.logDebug("building query");
FilteredItemsQuery query = FilteredItemsQuery.of(
collUuids, predicates, 0, Integer.MAX_VALUE, filters, List.of());
handler.logDebug("creating iterator");
FilteredItems items = contentReportService.findFilteredItems(context, query);
handler.logDebug("creating dspacecsv");
DSpaceCSV dSpaceCSV = metadataDSpaceCsvExportService.export(context, items.getItems().iterator(),
true, handler);
handler.logDebug("writing to file " + getFileNameOrExportFile());
handler.writeFilestream(context, getFileNameOrExportFile(), dSpaceCSV.getInputStream(), EXPORT_CSV);
context.restoreAuthSystemState();
context.complete();
}
protected void loghelpinfo() {
handler.logInfo("metadata-export-filtered-items-report");
}
protected String getFileNameOrExportFile() {
return configurationService.getProperty("contentreport.metadataquery.csv.filename.default", DEFAULT_FILENAME);
}
private static Stream<String> arrayToStream(String... array) {
return Optional.ofNullable(array)
.stream()
.flatMap(Arrays::stream)
.filter(StringUtils::isNotBlank);
}
private QueryPredicate buildPredicate(Context context, String exp) {
String[] tokens = exp.split("\\:");
String field = tokens.length > 0 ? tokens[0].trim() : "";
QueryOperator operator = tokens.length > 1 ? QueryOperator.get(tokens[1].trim()) : null;
String value = tokens.length > 2 ? StringUtils.trimToEmpty(tokens[2]) : "";
try {
List<MetadataField> fields = contentReportService.getMetadataFields(context, field);
return QueryPredicate.of(fields, operator, value);
} catch (SQLException e) {
throw new IllegalArgumentException(e.getMessage(), e);
}
}
}

View File

@@ -1,29 +0,0 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.bulkedit;
import java.util.Optional;
import org.apache.commons.lang3.StringUtils;
/**
* The CLI version of the {@link MetadataExportFilteredItemsReport} script
*
* @author Jean-François Morin (Université Laval)
*/
public class MetadataExportFilteredItemsReportCli extends MetadataExportFilteredItemsReport {
@Override
protected String getFileNameOrExportFile() {
return Optional.ofNullable(commandLine.getOptionValue('n'))
.filter(StringUtils::isNotBlank)
.orElseGet(() -> super.getFileNameOrExportFile());
}
}

View File

@@ -1,36 +0,0 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.bulkedit;
import org.apache.commons.cli.Options;
import org.dspace.services.ConfigurationService;
import org.springframework.beans.factory.annotation.Autowired;
/**
* This is the CLI version of the {@link MetadataExportFilteredItemsReportScriptConfiguration} class that handles the
* configuration for the {@link MetadataExportFilteredItemsReportCli} script
*
* @author Jean-François Morin (Université Laval)
*/
public class MetadataExportFilteredItemsReportCliScriptConfiguration
extends MetadataExportFilteredItemsReportScriptConfiguration<MetadataExportFilteredItemsReportCli> {
@Autowired
private ConfigurationService configurationService;
@Override
public Options getOptions() {
Options options = super.getOptions();
String filename = configurationService.getProperty("contentreport.metadataquery.csv.filename.default",
MetadataExportFilteredItemsReport.DEFAULT_FILENAME);
options.addOption("n", "filename", true, "the filename to export to (default: " + filename + ")");
return options;
}
}

View File

@@ -1,56 +0,0 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.bulkedit;
import org.apache.commons.cli.Options;
import org.dspace.scripts.configuration.ScriptConfiguration;
/**
* The {@link ScriptConfiguration} for the {@link MetadataExportFilteredItemsReport} script
*
* @author Jean-François Morin (Université Laval)
*/
public class MetadataExportFilteredItemsReportScriptConfiguration<T extends MetadataExportFilteredItemsReport>
extends ScriptConfiguration<T> {
private Class<T> dspaceRunnableclass;
@Override
public Class<T> getDspaceRunnableClass() {
return dspaceRunnableclass;
}
@Override
public void setDspaceRunnableClass(Class<T> dspaceRunnableClass) {
dspaceRunnableclass = dspaceRunnableClass;
}
@Override
public Options getOptions() {
if (options == null) {
Options options = new Options();
options.addOption("c", "collections", true,
"UUIDs of collections to search for eligible records");
options.getOption("c").setType(String.class);
options.addOption("qp", "queryPredicates", true,
"Predicates or field queries used as criteria to filter records");
options.getOption("qp").setType(String.class);
options.addOption("f", "filters", true, """
Filters from the org.dspace.contentreport.Filter enumeration
used to filter records. Any filtered included here is considered as being selected,
and is considered unselected otherwise.""");
options.getOption("f").setType(String.class);
options.addOption("h", "help", false, "help");
super.options = options;
}
return options;
}
}

View File

@@ -1,50 +0,0 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.bulkedit;
import org.apache.commons.cli.Options;
import org.dspace.scripts.configuration.ScriptConfiguration;
/**
* The {@link ScriptConfiguration} for the {@link MetadataExport} script
*/
public class MetadataExportScriptConfiguration<T extends MetadataExport> extends ScriptConfiguration<T> {
private Class<T> dspaceRunnableClass;
@Override
public Class<T> getDspaceRunnableClass() {
return dspaceRunnableClass;
}
/**
* Generic setter for the dspaceRunnableClass
* @param dspaceRunnableClass The dspaceRunnableClass to be set on this MetadataExportScriptConfiguration
*/
@Override
public void setDspaceRunnableClass(Class<T> dspaceRunnableClass) {
this.dspaceRunnableClass = dspaceRunnableClass;
}
@Override
public Options getOptions() {
if (options == null) {
Options options = new Options();
options.addOption("i", "id", true, "ID or handle of thing to export (item, collection, or community)");
options.addOption("a", "all", false,
"include all metadata fields that are not normally changed (e.g. provenance)");
options.addOption("h", "help", false, "help");
super.options = options;
}
return options;
}
}

View File

@@ -1,182 +0,0 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.bulkedit;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import java.util.UUID;
import org.apache.commons.cli.DefaultParser;
import org.apache.commons.cli.DefaultParser.Builder;
import org.apache.commons.cli.ParseException;
import org.dspace.content.Item;
import org.dspace.content.MetadataDSpaceCsvExportServiceImpl;
import org.dspace.content.factory.ContentServiceFactory;
import org.dspace.content.service.CollectionService;
import org.dspace.content.service.CommunityService;
import org.dspace.content.service.MetadataDSpaceCsvExportService;
import org.dspace.core.Context;
import org.dspace.discovery.DiscoverQuery;
import org.dspace.discovery.IndexableObject;
import org.dspace.discovery.SearchService;
import org.dspace.discovery.SearchUtils;
import org.dspace.discovery.configuration.DiscoveryConfiguration;
import org.dspace.discovery.configuration.DiscoveryConfigurationService;
import org.dspace.discovery.indexobject.IndexableCollection;
import org.dspace.discovery.indexobject.IndexableCommunity;
import org.dspace.discovery.utils.DiscoverQueryBuilder;
import org.dspace.discovery.utils.parameter.QueryBuilderSearchFilter;
import org.dspace.eperson.factory.EPersonServiceFactory;
import org.dspace.eperson.service.EPersonService;
import org.dspace.scripts.DSpaceRunnable;
import org.dspace.sort.SortOption;
import org.dspace.utils.DSpace;
/**
* Metadata exporter to allow the batch export of metadata from a discovery search into a file
*
*/
public class MetadataExportSearch extends DSpaceRunnable<MetadataExportSearchScriptConfiguration> {
private static final String EXPORT_CSV = "exportCSV";
private boolean help = false;
private String identifier;
private String discoveryConfigName;
private String[] filterQueryStrings;
private boolean hasScope = false;
private String query;
private SearchService searchService;
private MetadataDSpaceCsvExportService metadataDSpaceCsvExportService;
private EPersonService ePersonService;
private DiscoveryConfigurationService discoveryConfigurationService;
private CommunityService communityService;
private CollectionService collectionService;
private DiscoverQueryBuilder queryBuilder;
@Override
public MetadataExportSearchScriptConfiguration getScriptConfiguration() {
return new DSpace().getServiceManager()
.getServiceByName("metadata-export-search", MetadataExportSearchScriptConfiguration.class);
}
@Override
public void setup() throws ParseException {
searchService = SearchUtils.getSearchService();
metadataDSpaceCsvExportService = new DSpace().getServiceManager()
.getServiceByName(
MetadataDSpaceCsvExportServiceImpl.class.getCanonicalName(),
MetadataDSpaceCsvExportService.class
);
ePersonService = EPersonServiceFactory.getInstance().getEPersonService();
discoveryConfigurationService = SearchUtils.getConfigurationService();
communityService = ContentServiceFactory.getInstance().getCommunityService();
collectionService = ContentServiceFactory.getInstance().getCollectionService();
queryBuilder = SearchUtils.getQueryBuilder();
if (commandLine.hasOption('h')) {
help = true;
return;
}
if (commandLine.hasOption('q')) {
query = commandLine.getOptionValue('q');
}
if (commandLine.hasOption('s')) {
hasScope = true;
identifier = commandLine.getOptionValue('s');
}
if (commandLine.hasOption('c')) {
discoveryConfigName = commandLine.getOptionValue('c');
}
if (commandLine.hasOption('f')) {
filterQueryStrings = commandLine.getOptionValues('f');
}
}
@Override
public void internalRun() throws Exception {
if (help) {
loghelpinfo();
printHelp();
return;
}
handler.logDebug("starting search export");
IndexableObject dso = null;
Context context = new Context();
context.setCurrentUser(ePersonService.find(context, this.getEpersonIdentifier()));
if (hasScope) {
dso = resolveScope(context, identifier);
}
DiscoveryConfiguration discoveryConfiguration =
discoveryConfigurationService.getDiscoveryConfiguration(discoveryConfigName);
List<QueryBuilderSearchFilter> queryBuilderSearchFilters = new ArrayList<>();
handler.logDebug("processing filter queries");
if (filterQueryStrings != null) {
for (String filterQueryString: filterQueryStrings) {
String field = filterQueryString.split(",", 2)[0];
String operator = filterQueryString.split("(,|=)", 3)[1];
String value = filterQueryString.split("=", 2)[1];
QueryBuilderSearchFilter queryBuilderSearchFilter =
new QueryBuilderSearchFilter(field, operator, value);
queryBuilderSearchFilters.add(queryBuilderSearchFilter);
}
}
handler.logDebug("building query");
DiscoverQuery discoverQuery =
queryBuilder.buildQuery(context, dso, discoveryConfiguration, query, queryBuilderSearchFilters,
"Item", 10, Long.getLong("0"), null, SortOption.DESCENDING);
handler.logDebug("creating iterator");
Iterator<Item> itemIterator = searchService.iteratorSearch(context, dso, discoverQuery);
handler.logDebug("creating dspacecsv");
DSpaceCSV dSpaceCSV = metadataDSpaceCsvExportService.export(context, itemIterator, true, handler);
handler.logDebug("writing to file " + getFileNameOrExportFile());
handler.writeFilestream(context, getFileNameOrExportFile(), dSpaceCSV.getInputStream(), EXPORT_CSV);
context.restoreAuthSystemState();
context.complete();
}
protected void loghelpinfo() {
handler.logInfo("metadata-export");
}
protected String getFileNameOrExportFile() {
return "metadataExportSearch.csv";
}
public IndexableObject resolveScope(Context context, String id) throws SQLException {
UUID uuid = UUID.fromString(id);
IndexableObject scopeObj = new IndexableCommunity(communityService.find(context, uuid));
if (scopeObj.getIndexedObject() == null) {
scopeObj = new IndexableCollection(collectionService.find(context, uuid));
}
return scopeObj;
}
@Override
protected StepResult parse(String[] args) throws ParseException {
commandLine = new DefaultParser().parse(getScriptConfiguration().getOptions(), args);
Builder builder = new DefaultParser().builder();
builder.setStripLeadingAndTrailingQuotes(false);
commandLine = builder.build().parse(getScriptConfiguration().getOptions(), args);
setup();
return StepResult.Continue;
}
}

View File

@@ -1,20 +0,0 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.bulkedit;
/**
* The cli version of the {@link MetadataExportSearch} script
*/
public class MetadataExportSearchCli extends MetadataExportSearch {
@Override
protected String getFileNameOrExportFile() {
return commandLine.getOptionValue('n');
}
}

View File

@@ -1,26 +0,0 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.bulkedit;
import org.apache.commons.cli.Options;
/**
* This is the CLI version of the {@link MetadataExportSearchScriptConfiguration} class that handles the
* configuration for the {@link MetadataExportSearchCli} script
*/
public class MetadataExportSearchCliScriptConfiguration
extends MetadataExportSearchScriptConfiguration<MetadataExportSearchCli> {
@Override
public Options getOptions() {
Options options = super.getOptions();
options.addOption("n", "filename", true, "the filename to export to");
return super.getOptions();
}
}

View File

@@ -1,56 +0,0 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.bulkedit;
import org.apache.commons.cli.Options;
import org.dspace.scripts.configuration.ScriptConfiguration;
/**
* The {@link ScriptConfiguration} for the {@link MetadataExportSearch} script
*/
public class MetadataExportSearchScriptConfiguration<T extends MetadataExportSearch> extends ScriptConfiguration<T> {
private Class<T> dspaceRunnableclass;
@Override
public Class<T> getDspaceRunnableClass() {
return dspaceRunnableclass;
}
@Override
public void setDspaceRunnableClass(Class<T> dspaceRunnableClass) {
this.dspaceRunnableclass = dspaceRunnableClass;
}
@Override
public Options getOptions() {
if (options == null) {
Options options = new Options();
options.addOption("q", "query", true,
"The discovery search string to will be used to match records. Not URL encoded");
options.getOption("q").setType(String.class);
options.addOption("s", "scope", true,
"UUID of a specific DSpace container (site, community or collection) to which the search has to be " +
"limited");
options.getOption("s").setType(String.class);
options.addOption("c", "configuration", true,
"The name of a Discovery configuration that should be used by this search");
options.getOption("c").setType(String.class);
options.addOption("f", "filter", true,
"Advanced search filter that has to be used to filter the result set, with syntax `<:filter-name>," +
"<:filter-operator>=<:filter-value>`. Not URL encoded. For example `author," +
"authority=5df05073-3be7-410d-8166-e254369e4166` or `title,contains=sample text`");
options.getOption("f").setType(String.class);
options.addOption("h", "help", false, "help");
super.options = options;
}
return options;
}
}

View File

@@ -1,68 +0,0 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.bulkedit;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStreamReader;
import java.util.UUID;
import org.apache.commons.cli.ParseException;
import org.dspace.core.Context;
import org.dspace.eperson.EPerson;
import org.dspace.eperson.factory.EPersonServiceFactory;
import org.dspace.scripts.handler.DSpaceRunnableHandler;
/**
* CLI variant for the {@link MetadataImport} class
* This has been made so that we can specify the behaviour of the determineChanges method to be specific for the CLI
*/
public class MetadataImportCLI extends MetadataImport {
@Override
protected boolean determineChange(DSpaceRunnableHandler handler) throws IOException {
handler.logInfo("Do you want to make these changes? [y/n] ");
try (BufferedReader bufferedReader = new BufferedReader(new InputStreamReader(System.in))) {
String yn = bufferedReader.readLine();
if ("y".equalsIgnoreCase(yn)) {
return true;
}
return false;
}
}
@Override
protected void assignCurrentUserInContext(Context context) throws ParseException {
try {
if (commandLine.hasOption('e')) {
EPerson eperson;
String e = commandLine.getOptionValue('e');
if (e.indexOf('@') != -1) {
eperson = EPersonServiceFactory.getInstance().getEPersonService().findByEmail(context, e);
} else {
eperson = EPersonServiceFactory.getInstance().getEPersonService().find(context, UUID.fromString(e));
}
if (eperson == null) {
throw new ParseException("Error, eperson cannot be found: " + e);
}
context.setCurrentUser(eperson);
}
} catch (Exception e) {
throw new ParseException("Unable to find DSpace user: " + e.getMessage());
}
}
@Override
public void setup() throws ParseException {
super.setup();
if (!commandLine.hasOption('e')) {
throw new ParseException("Required parameter -e missing!");
}
}
}

View File

@@ -1,26 +0,0 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.bulkedit;
import org.apache.commons.cli.Options;
import org.dspace.scripts.configuration.ScriptConfiguration;
/**
* The {@link ScriptConfiguration} for the {@link org.dspace.app.bulkedit.MetadataImportCLI} CLI script
*/
public class MetadataImportCliScriptConfiguration extends MetadataImportScriptConfiguration<MetadataImportCLI> {
@Override
public Options getOptions() {
Options options = super.getOptions();
options.addOption("e", "email", true, "email address or user id of user (required if adding new items)");
options.getOption("e").setRequired(true);
super.options = options;
return options;
}
}

View File

@@ -12,23 +12,26 @@ package org.dspace.app.bulkedit;
* *
* @author Stuart Lewis * @author Stuart Lewis
*/ */
public class MetadataImportException extends Exception { public class MetadataImportException extends Exception
{
/** /**
* Instantiate a new MetadataImportException * Instantiate a new MetadataImportException
* *
* @param message the error message * @param message the error message
*/ */
public MetadataImportException(String message) { public MetadataImportException(String message)
super(message); {
super(message);
} }
/** /**
* Instantiate a new MetadataImportException * Instantiate a new MetadataImportException
* *
* @param message the error message * @param message the error message
* @param exception the root cause * @param exception the root cause
*/ */
public MetadataImportException(String message, Exception exception) { public MetadataImportException(String message, Exception exception)
super(message, exception); {
super(message, exception);
} }
} }

View File

@@ -12,63 +12,41 @@ package org.dspace.app.bulkedit;
* *
* @author Stuart Lewis * @author Stuart Lewis
*/ */
public class MetadataImportInvalidHeadingException extends Exception { public class MetadataImportInvalidHeadingException extends Exception
/** {
* The type of error (schema or element) /** The type of error (schema or element) */
*/
private int type; private int type;
/** /** The bad heading */
* The bad heading
*/
private String badHeading; private String badHeading;
/** /** Error with the schema */
* The column number
*/
private int column;
/**
* Error with the schema
*/
public static final int SCHEMA = 0; public static final int SCHEMA = 0;
/** /** Error with the element */
* Error with the element
*/
public static final int ELEMENT = 1; public static final int ELEMENT = 1;
/**
* Error with a missing header
*/
public static final int MISSING = 98;
/**
* Error with the whole entry
*/
public static final int ENTRY = 99;
/** /**
* Instantiate a new MetadataImportInvalidHeadingException * Instantiate a new MetadataImportInvalidHeadingException
* *
* @param message the error message * @param message the error message
* @param theType the type of the error * @param theType the type of the error
* @param theColumn column number
*/ */
public MetadataImportInvalidHeadingException(String message, int theType, int theColumn) { public MetadataImportInvalidHeadingException(String message, int theType)
{
super(message); super(message);
badHeading = message; badHeading = message;
type = theType; type = theType;
column = theColumn;
} }
/** /**
* Get the type of the exception * Get the type of the exception
* *
* @return the type of the exception * @return the type of the exception
*/ */
public String getType() { public String getType()
{
return "" + type; return "" + type;
} }
@@ -77,34 +55,25 @@ public class MetadataImportInvalidHeadingException extends Exception {
* *
* @return the invalid heading * @return the invalid heading
*/ */
public String getBadHeader() { public String getBadHeader()
{
return badHeading; return badHeading;
} }
/**
* Get the column number that was invalid
*
* @return the invalid column number
*/
public int getColumn() {
return column;
}
/** /**
* Get the exception message * Get the exception message
* *
* @return The exception message * @return The exception message
*/ */
@Override public String getMessage()
public String getMessage() { {
if (type == SCHEMA) { if (type == SCHEMA)
return "Unknown metadata schema in column " + column + ": " + badHeading; {
} else if (type == ELEMENT) { return "Unknown metadata schema in heading: " + badHeading;
return "Unknown metadata element in column " + column + ": " + badHeading; }
} else if (type == MISSING) { else
return "Row with missing header: column " + column; {
} else { return "Unknown metadata element in heading: " + badHeading;
return "Bad metadata declaration in column" + column + ": " + badHeading;
} }
} }
} }

View File

@@ -1,59 +0,0 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.bulkedit;
import java.io.InputStream;
import org.apache.commons.cli.Options;
import org.dspace.scripts.configuration.ScriptConfiguration;
/**
* The {@link ScriptConfiguration} for the {@link MetadataImport} script
*/
public class MetadataImportScriptConfiguration<T extends MetadataImport> extends ScriptConfiguration<T> {
private Class<T> dspaceRunnableClass;
@Override
public Class<T> getDspaceRunnableClass() {
return dspaceRunnableClass;
}
/**
* Generic setter for the dspaceRunnableClass
* @param dspaceRunnableClass The dspaceRunnableClass to be set on this MetadataImportScriptConfiguration
*/
@Override
public void setDspaceRunnableClass(Class<T> dspaceRunnableClass) {
this.dspaceRunnableClass = dspaceRunnableClass;
}
@Override
public Options getOptions() {
if (options == null) {
Options options = new Options();
options.addOption("f", "file", true, "source file");
options.getOption("f").setType(InputStream.class);
options.getOption("f").setRequired(true);
options.addOption("s", "silent", false,
"silent operation - doesn't request confirmation of changes USE WITH CAUTION");
options.addOption("w", "workflow", false, "workflow - when adding new items, use collection workflow");
options.addOption("n", "notify", false,
"notify - when adding new items using a workflow, send notification emails");
options.addOption("v", "validate-only", false,
"validate - just validate the csv, don't run the import");
options.addOption("t", "template", false,
"template - when adding new items, use the collection template (if it exists)");
options.addOption("h", "help", false, "help");
super.options = options;
}
return options;
}
}

View File

@@ -1,21 +0,0 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
/**
* <p>The DSpace Batch Metadata Editor, which uses a CSV file to export/import
* item metadata.</p>
* <ul>
* <li>works on items, communities, collections or the whole site</li>
* <li>can also create new items, delete items and withdraw/restore them</li>
* <li>cannot export/import bitstreams</li>
* </ul>
*/
package org.dspace.app.bulkedit;

Some files were not shown because too many files have changed in this diff Show More