mirror of
https://github.com/DSpace/DSpace.git
synced 2025-10-17 06:53:09 +00:00
Compare commits
1 Commits
dspace-7.4
...
rest-tutor
Author | SHA1 | Date | |
---|---|---|---|
![]() |
cfc200f2c7 |
35
.codecov.yml
35
.codecov.yml
@@ -1,35 +0,0 @@
|
||||
# DSpace configuration for Codecov.io coverage reports
|
||||
# These override the default YAML settings at
|
||||
# https://docs.codecov.io/docs/codecov-yaml#section-default-yaml
|
||||
# Can be validated via instructions at:
|
||||
# https://docs.codecov.io/docs/codecov-yaml#validate-your-repository-yaml
|
||||
|
||||
# Tell Codecov not to send a coverage notification until (at least) 2 builds are completed
|
||||
# Since we run Unit & Integration tests in parallel, this lets Codecov know that coverage
|
||||
# needs to be merged across those builds
|
||||
codecov:
|
||||
notify:
|
||||
after_n_builds: 2
|
||||
|
||||
# Settings related to code coverage analysis
|
||||
coverage:
|
||||
status:
|
||||
# Configuration for project-level checks. This checks how the PR changes overall coverage.
|
||||
project:
|
||||
default:
|
||||
# For each PR, auto compare coverage to previous commit.
|
||||
# Require that overall (project) coverage does NOT drop more than 0.5%
|
||||
target: auto
|
||||
threshold: 0.5%
|
||||
# Configuration for patch-level checks. This checks the relative coverage of the new PR code ONLY.
|
||||
patch:
|
||||
default:
|
||||
# Enable informational mode, which just provides info to reviewers & always passes
|
||||
# https://docs.codecov.io/docs/commit-status#section-informational
|
||||
informational: true
|
||||
|
||||
# Turn PR comments "off". This feature adds the code coverage summary as a
|
||||
# comment on each PR. See https://docs.codecov.io/docs/pull-request-comments
|
||||
# However, this same info is available from the Codecov checks in the PR's
|
||||
# "Checks" tab in GitHub. So, the comment is unnecessary.
|
||||
comment: false
|
@@ -1,11 +0,0 @@
|
||||
.git/
|
||||
.idea/
|
||||
.settings/
|
||||
*/target/
|
||||
dspace/modules/*/target/
|
||||
Dockerfile.*
|
||||
dspace/src/main/docker/dspace-postgres-pgcrypto
|
||||
dspace/src/main/docker/dspace-postgres-pgcrypto-curl
|
||||
dspace/src/main/docker/solr
|
||||
dspace/src/main/docker/README.md
|
||||
dspace/src/main/docker-compose/
|
6
.gitattributes
vendored
6
.gitattributes
vendored
@@ -1,12 +1,6 @@
|
||||
# Auto detect text files and perform LF normalization
|
||||
* text=auto
|
||||
|
||||
# Ensure Unix files always keep Unix line endings
|
||||
*.sh text eol=lf
|
||||
|
||||
# Ensure Windows files always keep Windows line endings
|
||||
*.bat text eol=crlf
|
||||
|
||||
# Standard to msysgit
|
||||
*.doc diff=astextplain
|
||||
*.DOC diff=astextplain
|
||||
|
22
.github/ISSUE_TEMPLATE/bug_report.md
vendored
22
.github/ISSUE_TEMPLATE/bug_report.md
vendored
@@ -1,22 +0,0 @@
|
||||
---
|
||||
name: Bug report
|
||||
about: Create a report to help us improve
|
||||
title: ''
|
||||
labels: bug, needs triage
|
||||
assignees: ''
|
||||
|
||||
---
|
||||
|
||||
**Describe the bug**
|
||||
A clear and concise description of what the bug is. Include the version(s) of DSpace where you've seen this problem. Link to examples if they are public.
|
||||
|
||||
**To Reproduce**
|
||||
Steps to reproduce the behavior:
|
||||
1. Do this
|
||||
2. Then this...
|
||||
|
||||
**Expected behavior**
|
||||
A clear and concise description of what you expected to happen.
|
||||
|
||||
**Related work**
|
||||
Link to any related tickets or PRs here.
|
20
.github/ISSUE_TEMPLATE/feature_request.md
vendored
20
.github/ISSUE_TEMPLATE/feature_request.md
vendored
@@ -1,20 +0,0 @@
|
||||
---
|
||||
name: Feature request
|
||||
about: Suggest a new feature for this project
|
||||
title: ''
|
||||
labels: new feature, needs triage
|
||||
assignees: ''
|
||||
|
||||
---
|
||||
|
||||
**Is your feature request related to a problem? Please describe.**
|
||||
A clear and concise description of what the problem is. Ex. I'm always frustrated when [...]
|
||||
|
||||
**Describe the solution you'd like**
|
||||
A clear and concise description of what you want to happen.
|
||||
|
||||
**Describe alternatives or workarounds you've considered**
|
||||
A clear and concise description of any alternative solutions or features you've considered.
|
||||
|
||||
**Additional context**
|
||||
Add any other context or screenshots about the feature request here.
|
@@ -1,26 +0,0 @@
|
||||
# This workflow runs whenever a new pull request is created
|
||||
# TEMPORARILY DISABLED. Unfortunately this doesn't work for PRs created from forked repositories (which is how we tend to create PRs).
|
||||
# There is no known workaround yet. See https://github.community/t/how-to-use-github-token-for-prs-from-forks/16818
|
||||
name: Pull Request opened
|
||||
|
||||
# Only run for newly opened PRs against the "main" branch
|
||||
on:
|
||||
pull_request:
|
||||
types: [opened]
|
||||
branches:
|
||||
- main
|
||||
|
||||
jobs:
|
||||
automation:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
# Assign the PR to whomever created it. This is useful for visualizing assignments on project boards
|
||||
# See https://github.com/marketplace/actions/pull-request-assigner
|
||||
- name: Assign PR to creator
|
||||
uses: thomaseizinger/assign-pr-creator-action@v1.0.0
|
||||
# Note, this authentication token is created automatically
|
||||
# See: https://docs.github.com/en/actions/configuring-and-managing-workflows/authenticating-with-the-github_token
|
||||
with:
|
||||
repo-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
# Ignore errors. It is possible the PR was created by someone who cannot be assigned
|
||||
continue-on-error: true
|
26
.github/pull_request_template.md
vendored
26
.github/pull_request_template.md
vendored
@@ -1,26 +0,0 @@
|
||||
## References
|
||||
_Add references/links to any related issues or PRs. These may include:_
|
||||
* Fixes #[issue-number]
|
||||
* Related to [REST Contract](https://github.com/DSpace/Rest7Contract)
|
||||
|
||||
## Description
|
||||
Short summary of changes (1-2 sentences).
|
||||
|
||||
## Instructions for Reviewers
|
||||
Please add a more detailed description of the changes made by your PR. At a minimum, providing a bulleted list of changes in your PR is helpful to reviewers.
|
||||
|
||||
List of changes in this PR:
|
||||
* First, ...
|
||||
* Second, ...
|
||||
|
||||
**Include guidance for how to test or review your PR.** This may include: steps to reproduce a bug, screenshots or description of a new feature, or reasons behind specific changes.
|
||||
|
||||
## Checklist
|
||||
_This checklist provides a reminder of what we are going to look for when reviewing your PR. You need not complete this checklist prior to creating your PR (draft PRs are always welcome). If you are unsure about an item in the checklist, don't hesitate to ask. We're here to help!_
|
||||
|
||||
- [ ] My PR is small in size (e.g. less than 1,000 lines of code, not including comments & integration tests). Exceptions may be made if previously agreed upon.
|
||||
- [ ] My PR passes Checkstyle validation based on the [Code Style Guide](https://wiki.lyrasis.org/display/DSPACE/Code+Style+Guide).
|
||||
- [ ] My PR includes Javadoc for _all new (or modified) public methods and classes_. It also includes Javadoc for large or complex private methods.
|
||||
- [ ] My PR passes all tests and includes new/updated Unit or Integration Tests based on the [Code Testing Guide](https://wiki.lyrasis.org/display/DSPACE/Code+Testing+Guide).
|
||||
- [ ] If my PR includes new, third-party dependencies (in any `pom.xml`), I've made sure their licenses align with the [DSpace BSD License](https://github.com/DSpace/DSpace/blob/main/LICENSE) based on the [Licensing of Contributions](https://wiki.lyrasis.org/display/DSPACE/Code+Contribution+Guidelines#CodeContributionGuidelines-LicensingofContributions) documentation.
|
||||
- [ ] If my PR modifies the REST API, I've linked to the REST Contract page (or open PR) related to this change.
|
81
.github/workflows/build.yml
vendored
81
.github/workflows/build.yml
vendored
@@ -1,81 +0,0 @@
|
||||
# DSpace Continuous Integration/Build via GitHub Actions
|
||||
# Concepts borrowed from
|
||||
# https://docs.github.com/en/free-pro-team@latest/actions/guides/building-and-testing-java-with-maven
|
||||
name: Build
|
||||
|
||||
# Run this Build for all pushes / PRs to current branch
|
||||
on: [push, pull_request]
|
||||
|
||||
jobs:
|
||||
tests:
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
# Give Maven 1GB of memory to work with
|
||||
MAVEN_OPTS: "-Xmx1024M"
|
||||
strategy:
|
||||
# Create a matrix of two separate configurations for Unit vs Integration Tests
|
||||
# This will ensure those tasks are run in parallel
|
||||
# Also specify version of Java to use (this can allow us to optionally run tests on multiple JDKs in future)
|
||||
matrix:
|
||||
include:
|
||||
# NOTE: Unit Tests include deprecated REST API v6 (as it has unit tests)
|
||||
# - surefire.rerunFailingTestsCount => try again for flakey tests, and keep track of/report on number of retries
|
||||
- type: "Unit Tests"
|
||||
java: 11
|
||||
mvnflags: "-DskipUnitTests=false -Pdspace-rest -Dsurefire.rerunFailingTestsCount=2"
|
||||
resultsdir: "**/target/surefire-reports/**"
|
||||
# NOTE: ITs skip all code validation checks, as they are already done by Unit Test job.
|
||||
# - enforcer.skip => Skip maven-enforcer-plugin rules
|
||||
# - checkstyle.skip => Skip all checkstyle checks by maven-checkstyle-plugin
|
||||
# - license.skip => Skip all license header checks by license-maven-plugin
|
||||
# - xml.skip => Skip all XML/XSLT validation by xml-maven-plugin
|
||||
# - failsafe.rerunFailingTestsCount => try again for flakey tests, and keep track of/report on number of retries
|
||||
- type: "Integration Tests"
|
||||
java: 11
|
||||
mvnflags: "-DskipIntegrationTests=false -Denforcer.skip=true -Dcheckstyle.skip=true -Dlicense.skip=true -Dxml.skip=true -Dfailsafe.rerunFailingTestsCount=2"
|
||||
resultsdir: "**/target/failsafe-reports/**"
|
||||
# Do NOT exit immediately if one matrix job fails
|
||||
# This ensures ITs continue running even if Unit Tests fail, or visa versa
|
||||
fail-fast: false
|
||||
name: Run ${{ matrix.type }}
|
||||
# These are the actual CI steps to perform per job
|
||||
steps:
|
||||
# https://github.com/actions/checkout
|
||||
- name: Checkout codebase
|
||||
uses: actions/checkout@v2
|
||||
|
||||
# https://github.com/actions/setup-java
|
||||
- name: Install JDK ${{ matrix.java }}
|
||||
uses: actions/setup-java@v2
|
||||
with:
|
||||
java-version: ${{ matrix.java }}
|
||||
distribution: 'temurin'
|
||||
|
||||
# https://github.com/actions/cache
|
||||
- name: Cache Maven dependencies
|
||||
uses: actions/cache@v2
|
||||
with:
|
||||
# Cache entire ~/.m2/repository
|
||||
path: ~/.m2/repository
|
||||
# Cache key is hash of all pom.xml files. Therefore any changes to POMs will invalidate cache
|
||||
key: ${{ runner.os }}-maven-${{ hashFiles('**/pom.xml') }}
|
||||
restore-keys: ${{ runner.os }}-maven-
|
||||
|
||||
# Run parallel Maven builds based on the above 'strategy.matrix'
|
||||
- name: Run Maven ${{ matrix.type }}
|
||||
env:
|
||||
TEST_FLAGS: ${{ matrix.mvnflags }}
|
||||
run: mvn --no-transfer-progress -V install -P-assembly -Pcoverage-report $TEST_FLAGS
|
||||
|
||||
# If previous step failed, save results of tests to downloadable artifact for this job
|
||||
# (This artifact is downloadable at the bottom of any job's summary page)
|
||||
- name: Upload Results of ${{ matrix.type }} to Artifact
|
||||
if: ${{ failure() }}
|
||||
uses: actions/upload-artifact@v2
|
||||
with:
|
||||
name: ${{ matrix.type }} results
|
||||
path: ${{ matrix.resultsdir }}
|
||||
|
||||
# https://github.com/codecov/codecov-action
|
||||
- name: Upload coverage to Codecov.io
|
||||
uses: codecov/codecov-action@v2
|
169
.github/workflows/docker.yml
vendored
169
.github/workflows/docker.yml
vendored
@@ -1,169 +0,0 @@
|
||||
# DSpace Docker image build for hub.docker.com
|
||||
name: Docker images
|
||||
|
||||
# Run this Build for all pushes to 'main' or maintenance branches, or tagged releases.
|
||||
# Also run for PRs to ensure PR doesn't break Docker build process
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
- 'dspace-**'
|
||||
tags:
|
||||
- 'dspace-**'
|
||||
pull_request:
|
||||
|
||||
jobs:
|
||||
docker:
|
||||
# Ensure this job never runs on forked repos. It's only executed for 'dspace/dspace'
|
||||
if: github.repository == 'dspace/dspace'
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
# Define tags to use for Docker images based on Git tags/branches (for docker/metadata-action)
|
||||
# For a new commit on default branch (main), use the literal tag 'dspace-7_x' on Docker image.
|
||||
# For a new commit on other branches, use the branch name as the tag for Docker image.
|
||||
# For a new tag, copy that tag name as the tag for Docker image.
|
||||
IMAGE_TAGS: |
|
||||
type=raw,value=dspace-7_x,enable=${{ endsWith(github.ref, github.event.repository.default_branch) }}
|
||||
type=ref,event=branch,enable=${{ !endsWith(github.ref, github.event.repository.default_branch) }}
|
||||
type=ref,event=tag
|
||||
# Define default tag "flavor" for docker/metadata-action per
|
||||
# https://github.com/docker/metadata-action#flavor-input
|
||||
# We turn off 'latest' tag by default.
|
||||
TAGS_FLAVOR: |
|
||||
latest=false
|
||||
# Architectures / Platforms for which we will build Docker images
|
||||
# If this is a PR, we ONLY build for AMD64. For PRs we only do a sanity check test to ensure Docker builds work.
|
||||
# If this is NOT a PR (e.g. a tag or merge commit), also build for ARM64. NOTE: The ARM64 build takes MUCH
|
||||
# longer (around 45mins or so) which is why we only run it when pushing a new Docker image.
|
||||
PLATFORMS: linux/amd64${{ github.event_name != 'pull_request' && ', linux/arm64' || '' }}
|
||||
|
||||
steps:
|
||||
# https://github.com/actions/checkout
|
||||
- name: Checkout codebase
|
||||
uses: actions/checkout@v2
|
||||
|
||||
# https://github.com/docker/setup-buildx-action
|
||||
- name: Setup Docker Buildx
|
||||
uses: docker/setup-buildx-action@v1
|
||||
|
||||
# https://github.com/docker/setup-qemu-action
|
||||
- name: Set up QEMU emulation to build for multiple architectures
|
||||
uses: docker/setup-qemu-action@v2
|
||||
|
||||
# https://github.com/docker/login-action
|
||||
- name: Login to DockerHub
|
||||
# Only login if not a PR, as PRs only trigger a Docker build and not a push
|
||||
if: github.event_name != 'pull_request'
|
||||
uses: docker/login-action@v1
|
||||
with:
|
||||
username: ${{ secrets.DOCKER_USERNAME }}
|
||||
password: ${{ secrets.DOCKER_ACCESS_TOKEN }}
|
||||
|
||||
####################################################
|
||||
# Build/Push the 'dspace/dspace-dependencies' image
|
||||
####################################################
|
||||
# https://github.com/docker/metadata-action
|
||||
# Get Metadata for docker_build_deps step below
|
||||
- name: Sync metadata (tags, labels) from GitHub to Docker for 'dspace-dependencies' image
|
||||
id: meta_build_deps
|
||||
uses: docker/metadata-action@v3
|
||||
with:
|
||||
images: dspace/dspace-dependencies
|
||||
tags: ${{ env.IMAGE_TAGS }}
|
||||
flavor: ${{ env.TAGS_FLAVOR }}
|
||||
|
||||
# https://github.com/docker/build-push-action
|
||||
- name: Build and push 'dspace-dependencies' image
|
||||
id: docker_build_deps
|
||||
uses: docker/build-push-action@v2
|
||||
with:
|
||||
context: .
|
||||
file: ./Dockerfile.dependencies
|
||||
platforms: ${{ env.PLATFORMS }}
|
||||
# For pull requests, we run the Docker build (to ensure no PR changes break the build),
|
||||
# but we ONLY do an image push to DockerHub if it's NOT a PR
|
||||
push: ${{ github.event_name != 'pull_request' }}
|
||||
# Use tags / labels provided by 'docker/metadata-action' above
|
||||
tags: ${{ steps.meta_build_deps.outputs.tags }}
|
||||
labels: ${{ steps.meta_build_deps.outputs.labels }}
|
||||
|
||||
#######################################
|
||||
# Build/Push the 'dspace/dspace' image
|
||||
#######################################
|
||||
# Get Metadata for docker_build step below
|
||||
- name: Sync metadata (tags, labels) from GitHub to Docker for 'dspace' image
|
||||
id: meta_build
|
||||
uses: docker/metadata-action@v3
|
||||
with:
|
||||
images: dspace/dspace
|
||||
tags: ${{ env.IMAGE_TAGS }}
|
||||
flavor: ${{ env.TAGS_FLAVOR }}
|
||||
|
||||
- name: Build and push 'dspace' image
|
||||
id: docker_build
|
||||
uses: docker/build-push-action@v2
|
||||
with:
|
||||
context: .
|
||||
file: ./Dockerfile
|
||||
platforms: ${{ env.PLATFORMS }}
|
||||
# For pull requests, we run the Docker build (to ensure no PR changes break the build),
|
||||
# but we ONLY do an image push to DockerHub if it's NOT a PR
|
||||
push: ${{ github.event_name != 'pull_request' }}
|
||||
# Use tags / labels provided by 'docker/metadata-action' above
|
||||
tags: ${{ steps.meta_build.outputs.tags }}
|
||||
labels: ${{ steps.meta_build.outputs.labels }}
|
||||
|
||||
#####################################################
|
||||
# Build/Push the 'dspace/dspace' image ('-test' tag)
|
||||
#####################################################
|
||||
# Get Metadata for docker_build_test step below
|
||||
- name: Sync metadata (tags, labels) from GitHub to Docker for 'dspace-test' image
|
||||
id: meta_build_test
|
||||
uses: docker/metadata-action@v3
|
||||
with:
|
||||
images: dspace/dspace
|
||||
tags: ${{ env.IMAGE_TAGS }}
|
||||
# As this is a test/development image, its tags are all suffixed with "-test". Otherwise, it uses the same
|
||||
# tagging logic as the primary 'dspace/dspace' image above.
|
||||
flavor: ${{ env.TAGS_FLAVOR }}
|
||||
suffix=-test
|
||||
|
||||
- name: Build and push 'dspace-test' image
|
||||
id: docker_build_test
|
||||
uses: docker/build-push-action@v2
|
||||
with:
|
||||
context: .
|
||||
file: ./Dockerfile.test
|
||||
platforms: ${{ env.PLATFORMS }}
|
||||
# For pull requests, we run the Docker build (to ensure no PR changes break the build),
|
||||
# but we ONLY do an image push to DockerHub if it's NOT a PR
|
||||
push: ${{ github.event_name != 'pull_request' }}
|
||||
# Use tags / labels provided by 'docker/metadata-action' above
|
||||
tags: ${{ steps.meta_build_test.outputs.tags }}
|
||||
labels: ${{ steps.meta_build_test.outputs.labels }}
|
||||
|
||||
###########################################
|
||||
# Build/Push the 'dspace/dspace-cli' image
|
||||
###########################################
|
||||
# Get Metadata for docker_build_test step below
|
||||
- name: Sync metadata (tags, labels) from GitHub to Docker for 'dspace-cli' image
|
||||
id: meta_build_cli
|
||||
uses: docker/metadata-action@v3
|
||||
with:
|
||||
images: dspace/dspace-cli
|
||||
tags: ${{ env.IMAGE_TAGS }}
|
||||
flavor: ${{ env.TAGS_FLAVOR }}
|
||||
|
||||
- name: Build and push 'dspace-cli' image
|
||||
id: docker_build_cli
|
||||
uses: docker/build-push-action@v2
|
||||
with:
|
||||
context: .
|
||||
file: ./Dockerfile.cli
|
||||
platforms: ${{ env.PLATFORMS }}
|
||||
# For pull requests, we run the Docker build (to ensure no PR changes break the build),
|
||||
# but we ONLY do an image push to DockerHub if it's NOT a PR
|
||||
push: ${{ github.event_name != 'pull_request' }}
|
||||
# Use tags / labels provided by 'docker/metadata-action' above
|
||||
tags: ${{ steps.meta_build_cli.outputs.tags }}
|
||||
labels: ${{ steps.meta_build_cli.outputs.labels }}
|
29
.github/workflows/issue_opened.yml
vendored
29
.github/workflows/issue_opened.yml
vendored
@@ -1,29 +0,0 @@
|
||||
# This workflow runs whenever a new issue is created
|
||||
name: Issue opened
|
||||
|
||||
on:
|
||||
issues:
|
||||
types: [opened]
|
||||
|
||||
jobs:
|
||||
automation:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
# Add the new issue to a project board, if it needs triage
|
||||
# See https://github.com/marketplace/actions/create-project-card-action
|
||||
- name: Add issue to project board
|
||||
# Only add to project board if issue is flagged as "needs triage" or has no labels
|
||||
# NOTE: By default we flag new issues as "needs triage" in our issue template
|
||||
if: (contains(github.event.issue.labels.*.name, 'needs triage') || join(github.event.issue.labels.*.name) == '')
|
||||
uses: technote-space/create-project-card-action@v1
|
||||
# Note, the authentication token below is an ORG level Secret.
|
||||
# It must be created/recreated manually via a personal access token with "public_repo" and "admin:org" permissions
|
||||
# See: https://docs.github.com/en/actions/configuring-and-managing-workflows/authenticating-with-the-github_token#permissions-for-the-github_token
|
||||
# This is necessary because the "DSpace Backlog" project is an org level project (i.e. not repo specific)
|
||||
with:
|
||||
GITHUB_TOKEN: ${{ secrets.ORG_PROJECT_TOKEN }}
|
||||
PROJECT: DSpace Backlog
|
||||
COLUMN: Triage
|
||||
CHECK_ORG_PROJECT: true
|
||||
# Ignore errors.
|
||||
continue-on-error: true
|
25
.github/workflows/label_merge_conflicts.yml
vendored
25
.github/workflows/label_merge_conflicts.yml
vendored
@@ -1,25 +0,0 @@
|
||||
# This workflow checks open PRs for merge conflicts and labels them when conflicts are found
|
||||
name: Check for merge conflicts
|
||||
|
||||
# Run whenever the "main" branch is updated
|
||||
# NOTE: This means merge conflicts are only checked for when a PR is merged to main.
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
|
||||
jobs:
|
||||
triage:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
# See: https://github.com/mschilde/auto-label-merge-conflicts/
|
||||
- name: Auto-label PRs with merge conflicts
|
||||
uses: mschilde/auto-label-merge-conflicts@v2.0
|
||||
# Add "merge conflict" label if a merge conflict is detected. Remove it when resolved.
|
||||
# Note, the authentication token is created automatically
|
||||
# See: https://docs.github.com/en/actions/configuring-and-managing-workflows/authenticating-with-the-github_token
|
||||
with:
|
||||
CONFLICT_LABEL_NAME: 'merge conflict'
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
# Ignore errors
|
||||
continue-on-error: true
|
6
.gitignore
vendored
6
.gitignore
vendored
@@ -19,7 +19,7 @@ tags
|
||||
overlays/
|
||||
|
||||
## Ignore project files created by NetBeans
|
||||
nbproject/
|
||||
nbproject/private/
|
||||
build/
|
||||
nbbuild/
|
||||
dist/
|
||||
@@ -42,7 +42,3 @@ nb-configuration.xml
|
||||
|
||||
##Ignore JRebel project configuration
|
||||
rebel.xml
|
||||
|
||||
|
||||
## Ignore jenv configuration
|
||||
.java-version
|
||||
|
@@ -1,9 +0,0 @@
|
||||
# LGTM Settings (https://lgtm.com/)
|
||||
# For reference, see https://lgtm.com/help/lgtm/lgtm.yml-configuration-file
|
||||
# or template at https://lgtm.com/static/downloads/lgtm.template.yml
|
||||
|
||||
extraction:
|
||||
java:
|
||||
index:
|
||||
# Specify the Java version required to build the project
|
||||
java_version: 11
|
44
.travis.yml
Normal file
44
.travis.yml
Normal file
@@ -0,0 +1,44 @@
|
||||
language: java
|
||||
sudo: false
|
||||
|
||||
env:
|
||||
# Give Maven 1GB of memory to work with
|
||||
- MAVEN_OPTS=-Xmx1024M
|
||||
|
||||
jdk:
|
||||
# DS-3384 Oracle JDK 8 has DocLint enabled by default.
|
||||
# Let's use this to catch any newly introduced DocLint issues.
|
||||
- oraclejdk8
|
||||
|
||||
## Should we run into any problems with oraclejdk8 on Travis, we may try the following workaround.
|
||||
## https://docs.travis-ci.com/user/languages/java#Testing-Against-Multiple-JDKs
|
||||
## https://github.com/travis-ci/travis-ci/issues/3259#issuecomment-130860338
|
||||
#addons:
|
||||
# apt:
|
||||
# packages:
|
||||
# - oracle-java8-installer
|
||||
|
||||
# Install prerequisites for building Mirage2 more rapidly
|
||||
before_install:
|
||||
# Remove outdated settings.xml from Travis builds. Workaround for https://github.com/travis-ci/travis-ci/issues/4629
|
||||
- rm ~/.m2/settings.xml
|
||||
|
||||
# Skip install stage, as we'll do it below
|
||||
install: "echo 'Skipping install stage, dependencies will be downloaded during build and test stages.'"
|
||||
|
||||
# Two stage Build and Test
|
||||
# 1. Install & Unit Test APIs
|
||||
# 2. Assemble DSpace
|
||||
script:
|
||||
# 1. [Install & Unit Test] Check source code licenses and run source code Unit Tests
|
||||
# license:check => Validate all source code license headers
|
||||
# -Dmaven.test.skip=false => Enable DSpace Unit Tests
|
||||
# -DskipITs=false => Enable DSpace Integration Tests
|
||||
# -P !assembly => Skip normal assembly (as it can be memory intensive)
|
||||
# -B => Maven batch/non-interactive mode (recommended for CI)
|
||||
# -V => Display Maven version info before build
|
||||
# -Dsurefire.rerunFailingTestsCount=2 => try again for flakey tests, and keep track of/report on number of retries
|
||||
- "mvn clean install license:check -Dmaven.test.skip=false -DskipITs=false -P !assembly -B -V -Dsurefire.rerunFailingTestsCount=2"
|
||||
# 2. [Assemble DSpace] Ensure overlay & assembly process works (from [src]/dspace/)
|
||||
# -P !assembly => SKIP the actual building of [src]/dspace/dspace-installer (as it can be memory intensive)
|
||||
- "cd dspace && mvn package -P !assembly -B -V -Dsurefire.rerunFailingTestsCount=2"
|
67
Dockerfile
67
Dockerfile
@@ -1,67 +0,0 @@
|
||||
# This image will be published as dspace/dspace
|
||||
# See https://github.com/DSpace/DSpace/tree/main/dspace/src/main/docker for usage details
|
||||
#
|
||||
# - note: default tag for branch: dspace/dspace: dspace/dspace:dspace-7_x
|
||||
|
||||
# This Dockerfile uses JDK11 by default, but has also been tested with JDK17.
|
||||
# To build with JDK17, use "--build-arg JDK_VERSION=17"
|
||||
ARG JDK_VERSION=11
|
||||
|
||||
# Step 1 - Run Maven Build
|
||||
FROM dspace/dspace-dependencies:dspace-7_x as build
|
||||
ARG TARGET_DIR=dspace-installer
|
||||
WORKDIR /app
|
||||
# The dspace-installer directory will be written to /install
|
||||
RUN mkdir /install \
|
||||
&& chown -Rv dspace: /install \
|
||||
&& chown -Rv dspace: /app
|
||||
USER dspace
|
||||
# Copy the DSpace source code (from local machine) into the workdir (excluding .dockerignore contents)
|
||||
ADD --chown=dspace . /app/
|
||||
# Build DSpace (note: this build doesn't include the optional, deprecated "dspace-rest" webapp)
|
||||
# Copy the dspace-installer directory to /install. Clean up the build to keep the docker image small
|
||||
RUN mvn --no-transfer-progress package && \
|
||||
mv /app/dspace/target/${TARGET_DIR}/* /install && \
|
||||
mvn clean
|
||||
|
||||
# Step 2 - Run Ant Deploy
|
||||
FROM openjdk:${JDK_VERSION}-slim as ant_build
|
||||
ARG TARGET_DIR=dspace-installer
|
||||
# COPY the /install directory from 'build' container to /dspace-src in this container
|
||||
COPY --from=build /install /dspace-src
|
||||
WORKDIR /dspace-src
|
||||
# Create the initial install deployment using ANT
|
||||
ENV ANT_VERSION 1.10.12
|
||||
ENV ANT_HOME /tmp/ant-$ANT_VERSION
|
||||
ENV PATH $ANT_HOME/bin:$PATH
|
||||
# Need wget to install ant
|
||||
RUN apt-get update \
|
||||
&& apt-get install -y --no-install-recommends wget \
|
||||
&& apt-get purge -y --auto-remove \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
# Download and install 'ant'
|
||||
RUN mkdir $ANT_HOME && \
|
||||
wget -qO- "https://archive.apache.org/dist/ant/binaries/apache-ant-$ANT_VERSION-bin.tar.gz" | tar -zx --strip-components=1 -C $ANT_HOME
|
||||
# Run necessary 'ant' deploy scripts
|
||||
RUN ant init_installation update_configs update_code update_webapps
|
||||
|
||||
# Step 3 - Run tomcat
|
||||
# Create a new tomcat image that does not retain the the build directory contents
|
||||
FROM tomcat:9-jdk${JDK_VERSION}
|
||||
# NOTE: DSPACE_INSTALL must align with the "dspace.dir" default configuration.
|
||||
ENV DSPACE_INSTALL=/dspace
|
||||
# Copy the /dspace directory from 'ant_build' containger to /dspace in this container
|
||||
COPY --from=ant_build /dspace $DSPACE_INSTALL
|
||||
# Expose Tomcat port and AJP port
|
||||
EXPOSE 8080 8009
|
||||
# Give java extra memory (2GB)
|
||||
ENV JAVA_OPTS=-Xmx2000m
|
||||
|
||||
# Link the DSpace 'server' webapp into Tomcat's webapps directory.
|
||||
# This ensures that when we start Tomcat, it runs from /server path (e.g. http://localhost:8080/server/)
|
||||
RUN ln -s $DSPACE_INSTALL/webapps/server /usr/local/tomcat/webapps/server
|
||||
# If you wish to run "server" webapp off the ROOT path, then comment out the above RUN, and uncomment the below RUN.
|
||||
# You also MUST update the 'dspace.server.url' configuration to match.
|
||||
# Please note that server webapp should only run on one path at a time.
|
||||
#RUN mv /usr/local/tomcat/webapps/ROOT /usr/local/tomcat/webapps/ROOT.bk && \
|
||||
# ln -s $DSPACE_INSTALL/webapps/server /usr/local/tomcat/webapps/ROOT
|
@@ -1,54 +0,0 @@
|
||||
# This image will be published as dspace/dspace-cli
|
||||
# See https://github.com/DSpace/DSpace/tree/main/dspace/src/main/docker for usage details
|
||||
#
|
||||
# - note: default tag for branch: dspace/dspace-cli: dspace/dspace-cli:dspace-7_x
|
||||
|
||||
# This Dockerfile uses JDK11 by default, but has also been tested with JDK17.
|
||||
# To build with JDK17, use "--build-arg JDK_VERSION=17"
|
||||
ARG JDK_VERSION=11
|
||||
|
||||
# Step 1 - Run Maven Build
|
||||
FROM dspace/dspace-dependencies:dspace-7_x as build
|
||||
ARG TARGET_DIR=dspace-installer
|
||||
WORKDIR /app
|
||||
# The dspace-installer directory will be written to /install
|
||||
RUN mkdir /install \
|
||||
&& chown -Rv dspace: /install \
|
||||
&& chown -Rv dspace: /app
|
||||
USER dspace
|
||||
# Copy the DSpace source code (from local machine) into the workdir (excluding .dockerignore contents)
|
||||
ADD --chown=dspace . /app/
|
||||
# Build DSpace. Copy the dspace-installer directory to /install. Clean up the build to keep the docker image small
|
||||
RUN mvn --no-transfer-progress package && \
|
||||
mv /app/dspace/target/${TARGET_DIR}/* /install && \
|
||||
mvn clean
|
||||
|
||||
# Step 2 - Run Ant Deploy
|
||||
FROM openjdk:${JDK_VERSION}-slim as ant_build
|
||||
ARG TARGET_DIR=dspace-installer
|
||||
# COPY the /install directory from 'build' container to /dspace-src in this container
|
||||
COPY --from=build /install /dspace-src
|
||||
WORKDIR /dspace-src
|
||||
# Create the initial install deployment using ANT
|
||||
ENV ANT_VERSION 1.10.12
|
||||
ENV ANT_HOME /tmp/ant-$ANT_VERSION
|
||||
ENV PATH $ANT_HOME/bin:$PATH
|
||||
# Need wget to install ant
|
||||
RUN apt-get update \
|
||||
&& apt-get install -y --no-install-recommends wget \
|
||||
&& apt-get purge -y --auto-remove \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
# Download and install 'ant'
|
||||
RUN mkdir $ANT_HOME && \
|
||||
wget -qO- "https://archive.apache.org/dist/ant/binaries/apache-ant-$ANT_VERSION-bin.tar.gz" | tar -zx --strip-components=1 -C $ANT_HOME
|
||||
# Run necessary 'ant' deploy scripts
|
||||
RUN ant init_installation update_configs update_code
|
||||
|
||||
# Step 3 - Run jdk
|
||||
FROM openjdk:${JDK_VERSION}
|
||||
# NOTE: DSPACE_INSTALL must align with the "dspace.dir" default configuration.
|
||||
ENV DSPACE_INSTALL=/dspace
|
||||
# Copy the /dspace directory from 'ant_build' container to /dspace in this container
|
||||
COPY --from=ant_build /dspace $DSPACE_INSTALL
|
||||
# Give java extra memory (1GB)
|
||||
ENV JAVA_OPTS=-Xmx1000m
|
@@ -1,36 +0,0 @@
|
||||
# This image will be published as dspace/dspace-dependencies
|
||||
# The purpose of this image is to make the build for dspace/dspace run faster
|
||||
#
|
||||
|
||||
# This Dockerfile uses JDK11 by default, but has also been tested with JDK17.
|
||||
# To build with JDK17, use "--build-arg JDK_VERSION=17"
|
||||
ARG JDK_VERSION=11
|
||||
|
||||
# Step 1 - Run Maven Build
|
||||
FROM maven:3-openjdk-${JDK_VERSION}-slim as build
|
||||
ARG TARGET_DIR=dspace-installer
|
||||
WORKDIR /app
|
||||
# Create the 'dspace' user account & home directory
|
||||
RUN useradd dspace \
|
||||
&& mkdir -p /home/dspace \
|
||||
&& chown -Rv dspace: /home/dspace
|
||||
RUN chown -Rv dspace: /app
|
||||
# Need git to support buildnumber-maven-plugin, which lets us know what version of DSpace is being run.
|
||||
RUN apt-get update \
|
||||
&& apt-get install -y --no-install-recommends git \
|
||||
&& apt-get purge -y --auto-remove \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# Switch to dspace user & run below commands as that user
|
||||
USER dspace
|
||||
|
||||
# Copy the DSpace source code (from local machine) into the workdir (excluding .dockerignore contents)
|
||||
ADD --chown=dspace . /app/
|
||||
|
||||
# Trigger the installation of all maven dependencies (hide download progress messages)
|
||||
RUN mvn --no-transfer-progress package
|
||||
|
||||
# Clear the contents of the /app directory (including all maven builds), so no artifacts remain.
|
||||
# This ensures when dspace:dspace is built, it will use the Maven local cache (~/.m2) for dependencies
|
||||
USER root
|
||||
RUN rm -rf /app/*
|
@@ -1,80 +0,0 @@
|
||||
# This image will be published as dspace/dspace
|
||||
# See https://github.com/DSpace/DSpace/tree/main/dspace/src/main/docker for usage details
|
||||
#
|
||||
# - note: default tag for branch: dspace/dspace: dspace/dspace:dspace-7_x-test
|
||||
#
|
||||
# This image is meant for TESTING/DEVELOPMENT ONLY as it deploys the old v6 REST API under HTTP (not HTTPS)
|
||||
|
||||
# This Dockerfile uses JDK11 by default, but has also been tested with JDK17.
|
||||
# To build with JDK17, use "--build-arg JDK_VERSION=17"
|
||||
ARG JDK_VERSION=11
|
||||
|
||||
# Step 1 - Run Maven Build
|
||||
FROM dspace/dspace-dependencies:dspace-7_x as build
|
||||
ARG TARGET_DIR=dspace-installer
|
||||
WORKDIR /app
|
||||
# The dspace-installer directory will be written to /install
|
||||
RUN mkdir /install \
|
||||
&& chown -Rv dspace: /install \
|
||||
&& chown -Rv dspace: /app
|
||||
USER dspace
|
||||
# Copy the DSpace source code (from local machine) into the workdir (excluding .dockerignore contents)
|
||||
ADD --chown=dspace . /app/
|
||||
# Build DSpace (INCLUDING the optional, deprecated "dspace-rest" webapp)
|
||||
# Copy the dspace-installer directory to /install. Clean up the build to keep the docker image small
|
||||
RUN mvn --no-transfer-progress package -Pdspace-rest && \
|
||||
mv /app/dspace/target/${TARGET_DIR}/* /install && \
|
||||
mvn clean
|
||||
|
||||
# Step 2 - Run Ant Deploy
|
||||
FROM openjdk:${JDK_VERSION}-slim as ant_build
|
||||
ARG TARGET_DIR=dspace-installer
|
||||
# COPY the /install directory from 'build' container to /dspace-src in this container
|
||||
COPY --from=build /install /dspace-src
|
||||
WORKDIR /dspace-src
|
||||
# Create the initial install deployment using ANT
|
||||
ENV ANT_VERSION 1.10.12
|
||||
ENV ANT_HOME /tmp/ant-$ANT_VERSION
|
||||
ENV PATH $ANT_HOME/bin:$PATH
|
||||
# Need wget to install ant
|
||||
RUN apt-get update \
|
||||
&& apt-get install -y --no-install-recommends wget \
|
||||
&& apt-get purge -y --auto-remove \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
# Download and install 'ant'
|
||||
RUN mkdir $ANT_HOME && \
|
||||
wget -qO- "https://archive.apache.org/dist/ant/binaries/apache-ant-$ANT_VERSION-bin.tar.gz" | tar -zx --strip-components=1 -C $ANT_HOME
|
||||
# Run necessary 'ant' deploy scripts
|
||||
RUN ant init_installation update_configs update_code update_webapps
|
||||
|
||||
# Step 3 - Run tomcat
|
||||
# Create a new tomcat image that does not retain the the build directory contents
|
||||
FROM tomcat:9-jdk${JDK_VERSION}
|
||||
ENV DSPACE_INSTALL=/dspace
|
||||
ENV TOMCAT_INSTALL=/usr/local/tomcat
|
||||
# Copy the /dspace directory from 'ant_build' containger to /dspace in this container
|
||||
COPY --from=ant_build /dspace $DSPACE_INSTALL
|
||||
# Enable the AJP connector in Tomcat's server.xml
|
||||
# NOTE: secretRequired="false" should only be used when AJP is NOT accessible from an external network. But, secretRequired="true" isn't supported by mod_proxy_ajp until Apache 2.5
|
||||
RUN sed -i '/Service name="Catalina".*/a \\n <Connector protocol="AJP/1.3" port="8009" address="0.0.0.0" redirectPort="8443" URIEncoding="UTF-8" secretRequired="false" />' $TOMCAT_INSTALL/conf/server.xml
|
||||
# Expose Tomcat port and AJP port
|
||||
EXPOSE 8080 8009
|
||||
# Give java extra memory (2GB)
|
||||
ENV JAVA_OPTS=-Xmx2000m
|
||||
|
||||
# Link the DSpace 'server' webapp into Tomcat's webapps directory.
|
||||
# This ensures that when we start Tomcat, it runs from /server path (e.g. http://localhost:8080/server/)
|
||||
# Also link the v6.x (deprecated) REST API off the "/rest" path
|
||||
RUN ln -s $DSPACE_INSTALL/webapps/server /usr/local/tomcat/webapps/server && \
|
||||
ln -s $DSPACE_INSTALL/webapps/rest /usr/local/tomcat/webapps/rest
|
||||
# If you wish to run "server" webapp off the ROOT path, then comment out the above RUN, and uncomment the below RUN.
|
||||
# You also MUST update the 'dspace.server.url' configuration to match.
|
||||
# Please note that server webapp should only run on one path at a time.
|
||||
#RUN mv /usr/local/tomcat/webapps/ROOT /usr/local/tomcat/webapps/ROOT.bk && \
|
||||
# ln -s $DSPACE_INSTALL/webapps/server /usr/local/tomcat/webapps/ROOT && \
|
||||
# ln -s $DSPACE_INSTALL/webapps/rest /usr/local/tomcat/webapps/rest
|
||||
|
||||
# Overwrite the v6.x (deprecated) REST API's web.xml, so that we can run it on HTTP (defaults to requiring HTTPS)
|
||||
# WARNING: THIS IS OBVIOUSLY INSECURE. NEVER DO THIS IN PRODUCTION.
|
||||
COPY dspace/src/main/docker/test/rest_web.xml $DSPACE_INSTALL/webapps/rest/WEB-INF/web.xml
|
||||
RUN sed -i -e "s|\${dspace.dir}|$DSPACE_INSTALL|" $DSPACE_INSTALL/webapps/rest/WEB-INF/web.xml
|
23
LICENSE
23
LICENSE
@@ -1,6 +1,7 @@
|
||||
BSD 3-Clause License
|
||||
DSpace source code license:
|
||||
|
||||
Copyright (c) 2002-2021, LYRASIS. All rights reserved.
|
||||
|
||||
Copyright (c) 2002-2016, DuraSpace. All rights reserved.
|
||||
|
||||
Redistribution and use in source and binary forms, with or without
|
||||
modification, are permitted provided that the following conditions are
|
||||
@@ -13,12 +14,13 @@ notice, this list of conditions and the following disclaimer.
|
||||
notice, this list of conditions and the following disclaimer in the
|
||||
documentation and/or other materials provided with the distribution.
|
||||
|
||||
- Neither the name of the copyright holder nor the names of its
|
||||
contributors may be used to endorse or promote products derived from
|
||||
this software without specific prior written permission.
|
||||
- Neither the name DuraSpace nor the name of the DSpace Foundation
|
||||
nor the names of its contributors may be used to endorse or promote
|
||||
products derived from this software without specific prior written
|
||||
permission.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
HOLDERS OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
|
||||
@@ -28,4 +30,11 @@ OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
|
||||
ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR
|
||||
TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
|
||||
USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH
|
||||
DAMAGE.
|
||||
DAMAGE.
|
||||
|
||||
|
||||
DSpace uses third-party libraries which may be distributed under
|
||||
different licenses to the above. Information about these licenses
|
||||
is detailed in the LICENSES_THIRD_PARTY file at the root of the source
|
||||
tree. You must agree to the terms of these licenses, in addition to
|
||||
the above DSpace source code license, in order to use this software.
|
||||
|
@@ -1,633 +1,424 @@
|
||||
|
||||
DSpace uses third-party libraries which may be distributed under different
|
||||
DSpace uses third-party libraries which may be distributed under different
|
||||
licenses. We have listed all of these third party libraries and their licenses
|
||||
below. This file can be regenerated at any time by simply running:
|
||||
|
||||
|
||||
mvn clean verify -Dthird.party.licenses=true
|
||||
|
||||
You must agree to the terms of these licenses, in addition to the DSpace
|
||||
You must agree to the terms of these licenses, in addition to the DSpace
|
||||
source code license, in order to use this software.
|
||||
|
||||
---------------------------------------------------
|
||||
Third party Java libraries listed by License type.
|
||||
|
||||
PLEASE NOTE: Some dependencies may be listed under multiple licenses if they
|
||||
are dual-licensed. This is especially true of anything listed as
|
||||
"GNU General Public Library" below, as DSpace actually does NOT allow for any
|
||||
are dual-licensed. This is especially true of anything listed as
|
||||
"GNU General Public Library" below, as DSpace actually does NOT allow for any
|
||||
dependencies that are solely released under GPL terms. For more info see:
|
||||
https://wiki.lyrasis.org/display/DSPACE/Code+Contribution+Guidelines
|
||||
https://wiki.duraspace.org/display/DSPACE/Code+Contribution+Guidelines
|
||||
---------------------------------------------------
|
||||
|
||||
Apache Software License, Version 2.0:
|
||||
|
||||
* Ant-Contrib Tasks (ant-contrib:ant-contrib:1.0b3 - http://ant-contrib.sourceforge.net)
|
||||
* AWS SDK for Java - Core (com.amazonaws:aws-java-sdk-core:1.12.261 - https://aws.amazon.com/sdkforjava)
|
||||
* AWS Java SDK for AWS KMS (com.amazonaws:aws-java-sdk-kms:1.12.261 - https://aws.amazon.com/sdkforjava)
|
||||
* AWS Java SDK for Amazon S3 (com.amazonaws:aws-java-sdk-s3:1.12.261 - https://aws.amazon.com/sdkforjava)
|
||||
* JMES Path Query library (com.amazonaws:jmespath-java:1.12.261 - https://aws.amazon.com/sdkforjava)
|
||||
* HPPC Collections (com.carrotsearch:hppc:0.8.1 - http://labs.carrotsearch.com/hppc.html/hppc)
|
||||
* com.drewnoakes:metadata-extractor (com.drewnoakes:metadata-extractor:2.16.0 - https://drewnoakes.com/code/exif/)
|
||||
* parso (com.epam:parso:2.0.14 - https://github.com/epam/parso)
|
||||
* Esri Geometry API for Java (com.esri.geometry:esri-geometry-api:2.2.0 - https://github.com/Esri/geometry-api-java)
|
||||
* ClassMate (com.fasterxml:classmate:1.3.0 - http://github.com/cowtowncoder/java-classmate)
|
||||
* Jackson-annotations (com.fasterxml.jackson.core:jackson-annotations:2.12.6 - http://github.com/FasterXML/jackson)
|
||||
* Jackson-core (com.fasterxml.jackson.core:jackson-core:2.12.6 - https://github.com/FasterXML/jackson-core)
|
||||
* jackson-databind (com.fasterxml.jackson.core:jackson-databind:2.12.6.1 - http://github.com/FasterXML/jackson)
|
||||
* Jackson dataformat: CBOR (com.fasterxml.jackson.dataformat:jackson-dataformat-cbor:2.12.6 - http://github.com/FasterXML/jackson-dataformats-binary)
|
||||
* Jackson dataformat: Smile (com.fasterxml.jackson.dataformat:jackson-dataformat-smile:2.12.3 - http://github.com/FasterXML/jackson-dataformats-binary)
|
||||
* Jackson-dataformat-YAML (com.fasterxml.jackson.dataformat:jackson-dataformat-yaml:2.11.1 - https://github.com/FasterXML/jackson-dataformats-text)
|
||||
* Jackson datatype: jdk8 (com.fasterxml.jackson.datatype:jackson-datatype-jdk8:2.13.3 - https://github.com/FasterXML/jackson-modules-java8/jackson-datatype-jdk8)
|
||||
* Jackson datatype: JSR310 (com.fasterxml.jackson.datatype:jackson-datatype-jsr310:2.11.1 - https://github.com/FasterXML/jackson-modules-java8/jackson-datatype-jsr310)
|
||||
* Jackson datatype: JSR310 (com.fasterxml.jackson.datatype:jackson-datatype-jsr310:2.13.3 - https://github.com/FasterXML/jackson-modules-java8/jackson-datatype-jsr310)
|
||||
* Jackson-module-parameter-names (com.fasterxml.jackson.module:jackson-module-parameter-names:2.13.3 - https://github.com/FasterXML/jackson-modules-java8/jackson-module-parameter-names)
|
||||
* Java UUID Generator (com.fasterxml.uuid:java-uuid-generator:4.0.1 - https://github.com/cowtowncoder/java-uuid-generator)
|
||||
* Woodstox (com.fasterxml.woodstox:woodstox-core:6.2.4 - https://github.com/FasterXML/woodstox)
|
||||
* zjsonpatch (com.flipkart.zjsonpatch:zjsonpatch:0.4.6 - https://github.com/flipkart-incubator/zjsonpatch/)
|
||||
* Caffeine cache (com.github.ben-manes.caffeine:caffeine:2.9.2 - https://github.com/ben-manes/caffeine)
|
||||
* btf (com.github.java-json-tools:btf:1.3 - https://github.com/java-json-tools/btf)
|
||||
* jackson-coreutils (com.github.java-json-tools:jackson-coreutils:2.0 - https://github.com/java-json-tools/jackson-coreutils)
|
||||
* jackson-coreutils-equivalence (com.github.java-json-tools:jackson-coreutils-equivalence:1.0 - https://github.com/java-json-tools/jackson-coreutils)
|
||||
* json-schema-core (com.github.java-json-tools:json-schema-core:1.2.14 - https://github.com/java-json-tools/json-schema-core)
|
||||
* json-schema-validator (com.github.java-json-tools:json-schema-validator:2.2.14 - https://github.com/java-json-tools/json-schema-validator)
|
||||
* msg-simple (com.github.java-json-tools:msg-simple:1.2 - https://github.com/java-json-tools/msg-simple)
|
||||
* uri-template (com.github.java-json-tools:uri-template:0.10 - https://github.com/java-json-tools/uri-template)
|
||||
* JCIP Annotations under Apache License (com.github.stephenc.jcip:jcip-annotations:1.0-1 - http://stephenc.github.com/jcip-annotations)
|
||||
* Google APIs Client Library for Java (com.google.api-client:google-api-client:1.23.0 - https://github.com/google/google-api-java-client/google-api-client)
|
||||
* Google Analytics API v3-rev145-1.23.0 (com.google.apis:google-api-services-analytics:v3-rev145-1.23.0 - http://nexus.sonatype.org/oss-repository-hosting.html/google-api-services-analytics)
|
||||
* Code Generation Library (cglib:cglib:2.2.2 - http://cglib.sourceforge.net/)
|
||||
* AWS SDK for Java - Core (com.amazonaws:aws-java-sdk-core:1.10.50 - https://aws.amazon.com/sdkforjava)
|
||||
* AWS Java SDK for AWS KMS (com.amazonaws:aws-java-sdk-kms:1.10.50 - https://aws.amazon.com/sdkforjava)
|
||||
* AWS Java SDK for Amazon S3 (com.amazonaws:aws-java-sdk-s3:1.10.50 - https://aws.amazon.com/sdkforjava)
|
||||
* HPPC Collections (com.carrotsearch:hppc:0.5.2 - http://labs.carrotsearch.com/hppc.html/hppc)
|
||||
* metadata-extractor (com.drewnoakes:metadata-extractor:2.6.2 - http://code.google.com/p/metadata-extractor/)
|
||||
* Jackson-annotations (com.fasterxml.jackson.core:jackson-annotations:2.5.4 - http://github.com/FasterXML/jackson)
|
||||
* Jackson-annotations (com.fasterxml.jackson.core:jackson-annotations:2.7.0 - http://github.com/FasterXML/jackson)
|
||||
* Jackson-core (com.fasterxml.jackson.core:jackson-core:2.5.4 - https://github.com/FasterXML/jackson)
|
||||
* Jackson-core (com.fasterxml.jackson.core:jackson-core:2.7.0 - https://github.com/FasterXML/jackson-core)
|
||||
* jackson-databind (com.fasterxml.jackson.core:jackson-databind:2.5.4 - http://github.com/FasterXML/jackson)
|
||||
* jackson-databind (com.fasterxml.jackson.core:jackson-databind:2.7.0 - http://github.com/FasterXML/jackson)
|
||||
* Jackson-JAXRS-base (com.fasterxml.jackson.jaxrs:jackson-jaxrs-base:2.5.4 - http://wiki.fasterxml.com/JacksonHome/jackson-jaxrs-base)
|
||||
* Jackson-JAXRS-JSON (com.fasterxml.jackson.jaxrs:jackson-jaxrs-json-provider:2.5.4 - http://wiki.fasterxml.com/JacksonHome/jackson-jaxrs-json-provider)
|
||||
* Jackson-module-JAXB-annotations (com.fasterxml.jackson.module:jackson-module-jaxb-annotations:2.5.4 - http://wiki.fasterxml.com/JacksonJAXBAnnotations)
|
||||
* Google APIs Client Library for Java (com.google.api-client:google-api-client:1.21.0 - https://github.com/google/google-api-java-client/google-api-client)
|
||||
* Google Analytics API v3-rev123-1.21.0 (com.google.apis:google-api-services-analytics:v3-rev123-1.21.0 - http://nexus.sonatype.org/oss-repository-hosting.html/google-api-services-analytics)
|
||||
* FindBugs-jsr305 (com.google.code.findbugs:jsr305:3.0.1 - http://findbugs.sourceforge.net/)
|
||||
* Gson (com.google.code.gson:gson:2.9.0 - https://github.com/google/gson/gson)
|
||||
* error-prone annotations (com.google.errorprone:error_prone_annotations:2.7.1 - http://nexus.sonatype.org/oss-repository-hosting.html/error_prone_parent/error_prone_annotations)
|
||||
* Guava InternalFutureFailureAccess and InternalFutures (com.google.guava:failureaccess:1.0.1 - https://github.com/google/guava/failureaccess)
|
||||
* Guava: Google Core Libraries for Java (com.google.guava:guava:31.0.1-jre - https://github.com/google/guava)
|
||||
* Gson (com.google.code.gson:gson:2.6.1 - https://github.com/google/gson/gson)
|
||||
* Guava: Google Core Libraries for Java (com.google.guava:guava:14.0.1 - http://code.google.com/p/guava-libraries/guava)
|
||||
* Guava: Google Core Libraries for Java (com.google.guava:guava:19.0 - https://github.com/google/guava/guava)
|
||||
* Guava: Google Core Libraries for Java (JDK5 Backport) (com.google.guava:guava-jdk5:17.0 - http://code.google.com/p/guava-libraries/guava-jdk5)
|
||||
* Guava ListenableFuture only (com.google.guava:listenablefuture:9999.0-empty-to-avoid-conflict-with-guava - https://github.com/google/guava/listenablefuture)
|
||||
* Google HTTP Client Library for Java (com.google.http-client:google-http-client:1.23.0 - https://github.com/google/google-http-java-client/google-http-client)
|
||||
* GSON extensions to the Google HTTP Client Library for Java. (com.google.http-client:google-http-client-gson:1.41.7 - https://github.com/googleapis/google-http-java-client/google-http-client-gson)
|
||||
* Jackson 2 extensions to the Google HTTP Client Library for Java. (com.google.http-client:google-http-client-jackson2:1.23.0 - https://github.com/google/google-http-java-client/google-http-client-jackson2)
|
||||
* J2ObjC Annotations (com.google.j2objc:j2objc-annotations:1.3 - https://github.com/google/j2objc/)
|
||||
* Google OAuth Client Library for Java (com.google.oauth-client:google-oauth-client:1.33.3 - https://github.com/googleapis/google-oauth-java-client/google-oauth-client)
|
||||
* ConcurrentLinkedHashMap (com.googlecode.concurrentlinkedhashmap:concurrentlinkedhashmap-lru:1.4.2 - http://code.google.com/p/concurrentlinkedhashmap)
|
||||
* libphonenumber (com.googlecode.libphonenumber:libphonenumber:8.11.1 - https://github.com/google/libphonenumber/)
|
||||
* Jackcess (com.healthmarketscience.jackcess:jackcess:4.0.1 - https://jackcess.sourceforge.io)
|
||||
* Jackcess Encrypt (com.healthmarketscience.jackcess:jackcess-encrypt:4.0.1 - http://jackcessencrypt.sf.net)
|
||||
* project ':json-path' (com.jayway.jsonpath:json-path:2.6.0 - https://github.com/jayway/JsonPath)
|
||||
* project ':json-path-assert' (com.jayway.jsonpath:json-path-assert:2.6.0 - https://github.com/jayway/JsonPath)
|
||||
* Disruptor Framework (com.lmax:disruptor:3.4.2 - http://lmax-exchange.github.com/disruptor)
|
||||
* Google HTTP Client Library for Java (com.google.http-client:google-http-client:1.21.0 - https://github.com/google/google-http-java-client/google-http-client)
|
||||
* Jackson 2 extensions to the Google HTTP Client Library for Java. (com.google.http-client:google-http-client-jackson2:1.21.0 - https://github.com/google/google-http-java-client/google-http-client-jackson2)
|
||||
* Google OAuth Client Library for Java (com.google.oauth-client:google-oauth-client:1.21.0 - https://github.com/google/google-oauth-java-client/google-oauth-client)
|
||||
* ConcurrentLinkedHashMap (com.googlecode.concurrentlinkedhashmap:concurrentlinkedhashmap-lru:1.2 - http://code.google.com/p/concurrentlinkedhashmap)
|
||||
* ISO Parser (com.googlecode.mp4parser:isoparser:1.0-RC-1 - http://code.google.com/p/mp4parser/)
|
||||
* builder-commons (com.lyncode:builder-commons:1.0.2 - http://nexus.sonatype.org/oss-repository-hosting.html/builder-commons)
|
||||
* MaxMind DB Reader (com.maxmind.db:maxmind-db:1.2.2 - http://dev.maxmind.com/)
|
||||
* MaxMind GeoIP2 API (com.maxmind.geoip2:geoip2:2.11.0 - http://dev.maxmind.com/geoip/geoip2/web-services)
|
||||
* Nimbus JOSE+JWT (com.nimbusds:nimbus-jose-jwt:7.9 - https://bitbucket.org/connect2id/nimbus-jose-jwt)
|
||||
* opencsv (com.opencsv:opencsv:5.6 - http://opencsv.sf.net)
|
||||
* java-libpst (com.pff:java-libpst:0.9.3 - https://github.com/rjohnsondev/java-libpst)
|
||||
* rome (com.rometools:rome:1.18.0 - http://rometools.com/rome)
|
||||
* rome-modules (com.rometools:rome-modules:1.18.0 - http://rometools.com/rome-modules)
|
||||
* rome-utils (com.rometools:rome-utils:1.18.0 - http://rometools.com/rome-utils)
|
||||
* fastinfoset (com.sun.xml.fastinfoset:FastInfoset:1.2.15 - http://fi.java.net)
|
||||
* T-Digest (com.tdunning:t-digest:3.1 - https://github.com/tdunning/t-digest)
|
||||
* JSON library from Android SDK (com.vaadin.external.google:android-json:0.0.20131108.vaadin1 - http://developer.android.com/sdk)
|
||||
* SparseBitSet (com.zaxxer:SparseBitSet:1.2 - https://github.com/brettwooldridge/SparseBitSet)
|
||||
* Apache Commons BeanUtils (commons-beanutils:commons-beanutils:1.9.4 - https://commons.apache.org/proper/commons-beanutils/)
|
||||
* Apache Commons CLI (commons-cli:commons-cli:1.4 - http://commons.apache.org/proper/commons-cli/)
|
||||
* Jtwig Core (com.lyncode:jtwig-core:2.0.1 - http://www.lyncode.com/jtwig-core)
|
||||
* Jtwig Core Functions (com.lyncode:jtwig-functions:2.0.1 - http://www.lyncode.com/jtwig-functions)
|
||||
* Jtwig Spring (com.lyncode:jtwig-spring:2.0.1 - http://www.lyncode.com/jtwig-spring)
|
||||
* Test Support (com.lyncode:test-support:1.0.3 - http://nexus.sonatype.org/oss-repository-hosting.html/test-support)
|
||||
* Spatial4J (com.spatial4j:spatial4j:0.4.1 - https://github.com/spatial4j/spatial4j)
|
||||
* Apache Commons BeanUtils (commons-beanutils:commons-beanutils:1.9.2 - http://commons.apache.org/proper/commons-beanutils/)
|
||||
* Apache Commons CLI (commons-cli:commons-cli:1.3.1 - http://commons.apache.org/proper/commons-cli/)
|
||||
* Apache Commons Codec (commons-codec:commons-codec:1.10 - http://commons.apache.org/proper/commons-codec/)
|
||||
* Apache Commons Collections (commons-collections:commons-collections:3.2.2 - http://commons.apache.org/collections/)
|
||||
* Apache Commons Configuration (commons-configuration:commons-configuration:1.10 - http://commons.apache.org/configuration/)
|
||||
* Commons Digester (commons-digester:commons-digester:1.8.1 - http://commons.apache.org/digester/)
|
||||
* Apache Commons FileUpload (commons-fileupload:commons-fileupload:1.3.3 - http://commons.apache.org/proper/commons-fileupload/)
|
||||
* Apache Commons IO (commons-io:commons-io:2.7 - https://commons.apache.org/proper/commons-io/)
|
||||
* Apache Commons FileUpload (commons-fileupload:commons-fileupload:1.3.1 - http://commons.apache.org/proper/commons-fileupload/)
|
||||
* HttpClient (commons-httpclient:commons-httpclient:3.1 - http://jakarta.apache.org/httpcomponents/httpclient-3.x/)
|
||||
* Commons IO (commons-io:commons-io:2.4 - http://commons.apache.org/io/)
|
||||
* commons-jexl (commons-jexl:commons-jexl:1.0 - no url defined)
|
||||
* Commons JXPath (commons-jxpath:commons-jxpath:1.3 - http://commons.apache.org/jxpath/)
|
||||
* Commons Lang (commons-lang:commons-lang:2.6 - http://commons.apache.org/lang/)
|
||||
* Apache Commons Logging (commons-logging:commons-logging:1.2 - http://commons.apache.org/proper/commons-logging/)
|
||||
* Apache Commons Validator (commons-validator:commons-validator:1.5.0 - http://commons.apache.org/proper/commons-validator/)
|
||||
* GeoJson POJOs for Jackson (de.grundid.opendatalab:geojson-jackson:1.14 - https://github.com/opendatalab-de/geojson-jackson)
|
||||
* Boilerpipe -- Boilerplate Removal and Fulltext Extraction from HTML pages (de.l3s.boilerpipe:boilerpipe:1.1.0 - http://code.google.com/p/boilerpipe/)
|
||||
* OpenAIRE Funders Model (eu.openaire:funders-model:2.0.0 - https://api.openaire.eu)
|
||||
* Metrics Core (io.dropwizard.metrics:metrics-core:4.1.5 - https://metrics.dropwizard.io/metrics-core)
|
||||
* Graphite Integration for Metrics (io.dropwizard.metrics:metrics-graphite:4.1.5 - https://metrics.dropwizard.io/metrics-graphite)
|
||||
* Metrics Integration for Jetty 9.3 and higher (io.dropwizard.metrics:metrics-jetty9:4.1.5 - https://metrics.dropwizard.io/metrics-jetty9)
|
||||
* Metrics Integration with JMX (io.dropwizard.metrics:metrics-jmx:4.1.5 - https://metrics.dropwizard.io/metrics-jmx)
|
||||
* JVM Integration for Metrics (io.dropwizard.metrics:metrics-jvm:4.1.5 - https://metrics.dropwizard.io/metrics-jvm)
|
||||
* micrometer-core (io.micrometer:micrometer-core:1.8.6 - https://github.com/micrometer-metrics/micrometer)
|
||||
* Netty/Buffer (io.netty:netty-buffer:4.1.68.Final - https://netty.io/netty-buffer/)
|
||||
* Netty/Codec (io.netty:netty-codec:4.1.68.Final - https://netty.io/netty-codec/)
|
||||
* Netty/Codec/HTTP (io.netty:netty-codec-http:4.1.53.Final - https://netty.io/netty-codec-http/)
|
||||
* Netty/Codec/Socks (io.netty:netty-codec-socks:4.1.53.Final - https://netty.io/netty-codec-socks/)
|
||||
* Netty/Common (io.netty:netty-common:4.1.68.Final - https://netty.io/netty-common/)
|
||||
* Netty/Handler (io.netty:netty-handler:4.1.68.Final - https://netty.io/netty-handler/)
|
||||
* Netty/Handler/Proxy (io.netty:netty-handler-proxy:4.1.53.Final - https://netty.io/netty-handler-proxy/)
|
||||
* Netty/Resolver (io.netty:netty-resolver:4.1.68.Final - https://netty.io/netty-resolver/)
|
||||
* Netty/Transport (io.netty:netty-transport:4.1.68.Final - https://netty.io/netty-transport/)
|
||||
* Netty/Transport/Native/Epoll (io.netty:netty-transport-native-epoll:4.1.68.Final - https://netty.io/netty-transport-native-epoll/)
|
||||
* Netty/Transport/Native/Unix/Common (io.netty:netty-transport-native-unix-common:4.1.68.Final - https://netty.io/netty-transport-native-unix-common/)
|
||||
* OpenTracing API (io.opentracing:opentracing-api:0.33.0 - https://github.com/opentracing/opentracing-java/opentracing-api)
|
||||
* OpenTracing-noop (io.opentracing:opentracing-noop:0.33.0 - https://github.com/opentracing/opentracing-java/opentracing-noop)
|
||||
* OpenTracing-util (io.opentracing:opentracing-util:0.33.0 - https://github.com/opentracing/opentracing-java/opentracing-util)
|
||||
* Google S2 geometry library (io.sgr:s2-geometry-library-java:1.0.0 - https://github.com/sgr-io/s2-geometry-library-java)
|
||||
* swagger-annotations (io.swagger:swagger-annotations:1.6.2 - https://github.com/swagger-api/swagger-core/modules/swagger-annotations)
|
||||
* swagger-compat-spec-parser (io.swagger:swagger-compat-spec-parser:1.0.52 - http://nexus.sonatype.org/oss-repository-hosting.html/swagger-parser-project/modules/swagger-compat-spec-parser)
|
||||
* swagger-core (io.swagger:swagger-core:1.6.2 - https://github.com/swagger-api/swagger-core/modules/swagger-core)
|
||||
* swagger-models (io.swagger:swagger-models:1.6.2 - https://github.com/swagger-api/swagger-core/modules/swagger-models)
|
||||
* swagger-parser (io.swagger:swagger-parser:1.0.52 - http://nexus.sonatype.org/oss-repository-hosting.html/swagger-parser-project/modules/swagger-parser)
|
||||
* swagger-annotations (io.swagger.core.v3:swagger-annotations:2.1.5 - https://github.com/swagger-api/swagger-core/modules/swagger-annotations)
|
||||
* swagger-core (io.swagger.core.v3:swagger-core:2.1.5 - https://github.com/swagger-api/swagger-core/modules/swagger-core)
|
||||
* swagger-models (io.swagger.core.v3:swagger-models:2.1.5 - https://github.com/swagger-api/swagger-core/modules/swagger-models)
|
||||
* swagger-parser (io.swagger.parser.v3:swagger-parser:2.0.23 - http://nexus.sonatype.org/oss-repository-hosting.html/swagger-parser-project/modules/swagger-parser)
|
||||
* swagger-parser (io.swagger.parser.v3:swagger-parser-core:2.0.23 - http://nexus.sonatype.org/oss-repository-hosting.html/swagger-parser-project/modules/swagger-parser-core)
|
||||
* swagger-parser-v2-converter (io.swagger.parser.v3:swagger-parser-v2-converter:2.0.23 - http://nexus.sonatype.org/oss-repository-hosting.html/swagger-parser-project/modules/swagger-parser-v2-converter)
|
||||
* swagger-parser-v3 (io.swagger.parser.v3:swagger-parser-v3:2.0.23 - http://nexus.sonatype.org/oss-repository-hosting.html/swagger-parser-project/modules/swagger-parser-v3)
|
||||
* Jakarta Bean Validation API (jakarta.validation:jakarta.validation-api:2.0.2 - https://beanvalidation.org)
|
||||
* JSR107 API and SPI (javax.cache:cache-api:1.1.0 - https://github.com/jsr107/jsr107spec)
|
||||
* The Netty Project (io.netty:netty:3.7.0.Final - http://netty.io/)
|
||||
* jakarta-regexp (jakarta-regexp:jakarta-regexp:1.4 - no url defined)
|
||||
* javax.inject (javax.inject:javax.inject:1 - http://code.google.com/p/atinject/)
|
||||
* Bean Validation API (javax.validation:validation-api:2.0.1.Final - http://beanvalidation.org)
|
||||
* Bean Validation API (javax.validation:validation-api:1.1.0.Final - http://beanvalidation.org)
|
||||
* jdbm (jdbm:jdbm:1.0 - no url defined)
|
||||
* Joda time (joda-time:joda-time:2.2 - http://joda-time.sourceforge.net)
|
||||
* Joda-Time (joda-time:joda-time:2.9.2 - http://www.joda.org/joda-time/)
|
||||
* Byte Buddy (without dependencies) (net.bytebuddy:byte-buddy:1.11.13 - https://bytebuddy.net/byte-buddy)
|
||||
* Byte Buddy agent (net.bytebuddy:byte-buddy-agent:1.11.13 - https://bytebuddy.net/byte-buddy-agent)
|
||||
* eigenbase-properties (net.hydromatic:eigenbase-properties:1.1.5 - http://github.com/julianhyde/eigenbase-properties)
|
||||
* json-unit-core (net.javacrumbs.json-unit:json-unit-core:2.19.0 - https://github.com/lukas-krecan/JsonUnit/json-unit-core)
|
||||
* Apache Log4j (log4j:log4j:1.2.17 - http://logging.apache.org/log4j/1.2/)
|
||||
* "Java Concurrency in Practice" book annotations (net.jcip:jcip-annotations:1.0 - http://jcip.net/)
|
||||
* ASM based accessors helper used by json-smart (net.minidev:accessors-smart:1.2 - http://www.minidev.net/)
|
||||
* ASM based accessors helper used by json-smart (net.minidev:accessors-smart:2.4.7 - https://urielch.github.io/)
|
||||
* JSON Small and Fast Parser (net.minidev:json-smart:2.3 - http://www.minidev.net/)
|
||||
* JSON Small and Fast Parser (net.minidev:json-smart:2.4.7 - https://urielch.github.io/)
|
||||
* Ehcache Core (net.sf.ehcache:ehcache-core:2.4.3 - http://ehcache.org)
|
||||
* opencsv (net.sf.opencsv:opencsv:2.3 - http://opencsv.sf.net)
|
||||
* Abdera Client (org.apache.abdera:abdera-client:1.1.3 - http://abdera.apache.org/abdera-client)
|
||||
* Abdera Core (org.apache.abdera:abdera-core:1.1.3 - http://abdera.apache.org/abdera-core)
|
||||
* I18N Libraries (org.apache.abdera:abdera-i18n:1.1.3 - http://abdera.apache.org)
|
||||
* Apache Ant Core (org.apache.ant:ant:1.10.11 - https://ant.apache.org/)
|
||||
* Apache Ant Launcher (org.apache.ant:ant-launcher:1.10.11 - https://ant.apache.org/)
|
||||
* Apache Commons BCEL (org.apache.bcel:bcel:6.4.0 - https://commons.apache.org/proper/commons-bcel)
|
||||
* Calcite Core (org.apache.calcite:calcite-core:1.27.0 - https://calcite.apache.org)
|
||||
* Calcite Linq4j (org.apache.calcite:calcite-linq4j:1.27.0 - https://calcite.apache.org)
|
||||
* Apache Calcite Avatica (org.apache.calcite.avatica:avatica-core:1.18.0 - https://calcite.apache.org/avatica)
|
||||
* Apache Commons Collections (org.apache.commons:commons-collections4:4.1 - http://commons.apache.org/proper/commons-collections/)
|
||||
* Apache Commons Compress (org.apache.commons:commons-compress:1.21 - https://commons.apache.org/proper/commons-compress/)
|
||||
* Apache Commons Configuration (org.apache.commons:commons-configuration2:2.8.0 - https://commons.apache.org/proper/commons-configuration/)
|
||||
* Apache Commons CSV (org.apache.commons:commons-csv:1.9.0 - https://commons.apache.org/proper/commons-csv/)
|
||||
* Apache Commons DBCP (org.apache.commons:commons-dbcp2:2.8.0 - https://commons.apache.org/dbcp/)
|
||||
* Apache Commons Exec (org.apache.commons:commons-exec:1.3 - http://commons.apache.org/proper/commons-exec/)
|
||||
* Apache Commons Lang (org.apache.commons:commons-lang3:3.12.0 - https://commons.apache.org/proper/commons-lang/)
|
||||
* Apache Commons Math (org.apache.commons:commons-math3:3.6.1 - http://commons.apache.org/proper/commons-math/)
|
||||
* Apache Commons Pool (org.apache.commons:commons-pool2:2.9.0 - https://commons.apache.org/proper/commons-pool/)
|
||||
* Apache Commons Text (org.apache.commons:commons-text:1.9 - https://commons.apache.org/proper/commons-text)
|
||||
* Curator Client (org.apache.curator:curator-client:2.13.0 - http://curator.apache.org/curator-client)
|
||||
* Curator Framework (org.apache.curator:curator-framework:2.13.0 - http://curator.apache.org/curator-framework)
|
||||
* Curator Recipes (org.apache.curator:curator-recipes:2.13.0 - http://curator.apache.org/curator-recipes)
|
||||
* Apache Hadoop Annotations (org.apache.hadoop:hadoop-annotations:3.2.2 - no url defined)
|
||||
* Apache Hadoop Auth (org.apache.hadoop:hadoop-auth:3.2.2 - no url defined)
|
||||
* Apache Hadoop Common (org.apache.hadoop:hadoop-common:3.2.2 - no url defined)
|
||||
* Apache Hadoop HDFS Client (org.apache.hadoop:hadoop-hdfs-client:3.2.2 - no url defined)
|
||||
* htrace-core4 (org.apache.htrace:htrace-core4:4.1.0-incubating - http://incubator.apache.org/projects/htrace.html)
|
||||
* Apache HttpClient (org.apache.httpcomponents:httpclient:4.5.13 - http://hc.apache.org/httpcomponents-client)
|
||||
* Abdera Parser (org.apache.abdera:abdera-parser:1.1.3 - http://abdera.apache.org/abdera-parser)
|
||||
* org.apache.tools.ant (org.apache.ant:ant:1.7.0 - http://ant.apache.org/ant/)
|
||||
* ant-launcher (org.apache.ant:ant-launcher:1.7.0 - http://ant.apache.org/ant-launcher/)
|
||||
* Avalon Framework API (org.apache.avalon.framework:avalon-framework-api:4.3.1 - http://www.apache.org/excalibur/avalon-framework/avalon-framework-api/)
|
||||
* Avalon Framework Implementation (org.apache.avalon.framework:avalon-framework-impl:4.3.1 - http://www.apache.org/excalibur/avalon-framework/avalon-framework-impl/)
|
||||
* Cocoon Configuration API (org.apache.cocoon:cocoon-configuration-api:1.0.2 - http://cocoon.apache.org/subprojects/configuration/1.0/configuration-api/1.0/)
|
||||
* Cocoon Core (org.apache.cocoon:cocoon-core:2.2.0 - http://cocoon.apache.org/2.2/core-modules/core/2.2/)
|
||||
* Cocoon Expression Language API (org.apache.cocoon:cocoon-expression-language-api:1.0.0 - http://cocoon.apache.org/2.2/core-modules/expression-language-api/1.0/)
|
||||
* Cocoon Expression Language Implementation. (org.apache.cocoon:cocoon-expression-language-impl:1.0.0 - http://cocoon.apache.org/2.2/core-modules/expression-language-impl/1.0/)
|
||||
* Cocoon Flowscript Block Implementation (org.apache.cocoon:cocoon-flowscript-impl:1.0.0 - http://cocoon.apache.org/2.2/blocks/flowscript/1.0/)
|
||||
* Cocoon Linkrewriter Block Implementation (org.apache.cocoon:cocoon-linkrewriter-impl:1.0.0 - http://cocoon.apache.org/2.2/blocks/linkrewriter/1.0/)
|
||||
* Cocoon Pipeline API (org.apache.cocoon:cocoon-pipeline-api:1.0.0 - http://cocoon.apache.org/2.2/core-modules/pipeline-api/1.0/)
|
||||
* Cocoon Pipeline Components (org.apache.cocoon:cocoon-pipeline-components:1.0.0 - http://cocoon.apache.org/2.2/core-modules/pipeline-components/1.0/)
|
||||
* Cocoon Pipeline Implementation (org.apache.cocoon:cocoon-pipeline-impl:1.0.0 - http://cocoon.apache.org/2.2/core-modules/pipeline-impl/1.0/)
|
||||
* Cocoon Servlet Service Components (org.apache.cocoon:cocoon-servlet-service-components:1.0.0 - http://cocoon.apache.org/subprojects/servlet-service/1.0/servlet-service-components/1.0/)
|
||||
* Cocoon Sitemap API (org.apache.cocoon:cocoon-sitemap-api:1.0.0 - http://cocoon.apache.org/2.2/core-modules/sitemap-api/1.0/)
|
||||
* Cocoon Sitemap Components (org.apache.cocoon:cocoon-sitemap-components:1.0.0 - http://cocoon.apache.org/2.2/core-modules/sitemap-components/1.0/)
|
||||
* Cocoon Sitemap Implementation (org.apache.cocoon:cocoon-sitemap-impl:1.0.0 - http://cocoon.apache.org/2.2/core-modules/sitemap-impl/1.0/)
|
||||
* Cocoon Spring Configurator (org.apache.cocoon:cocoon-spring-configurator:1.0.2 - http://cocoon.apache.org/cocoon-spring-configurator)
|
||||
* Cocoon Store Implementation (org.apache.cocoon:cocoon-store-impl:1.0.0 - http://cocoon.apache.org/2.2/core-modules/store-impl/1.0/)
|
||||
* Cocoon Template Framework Block Implementation (org.apache.cocoon:cocoon-template-impl:1.1.0 - http://cocoon.apache.org/2.2/blocks/template/1.0/)
|
||||
* Cocoon Thread API (org.apache.cocoon:cocoon-thread-api:1.0.0 - http://cocoon.apache.org/2.2/core-modules/thread-api/1.0/)
|
||||
* Cocoon Thread Implementation (org.apache.cocoon:cocoon-thread-impl:1.0.0 - http://cocoon.apache.org/2.2/core-modules/thread-impl/1.0/)
|
||||
* Cocoon Util (org.apache.cocoon:cocoon-util:1.0.0 - http://cocoon.apache.org/2.2/core-modules/util/1.0/)
|
||||
* Cocoon XML API (org.apache.cocoon:cocoon-xml-api:1.0.0 - http://cocoon.apache.org/2.2/core-modules/xml-api/1.0/)
|
||||
* Cocoon XML Implementation (org.apache.cocoon:cocoon-xml-impl:1.0.0 - http://cocoon.apache.org/2.2/core-modules/xml-impl/1.0/)
|
||||
* Cocoon XML Resolver (org.apache.cocoon:cocoon-xml-resolver:1.0.0 - http://cocoon.apache.org/2.2/core-modules/xml-resolver/1.0/)
|
||||
* Cocoon XML Utilities (org.apache.cocoon:cocoon-xml-util:1.0.0 - http://cocoon.apache.org/2.2/core-modules/xml-util/1.0/)
|
||||
* Apache Commons Compress (org.apache.commons:commons-compress:1.7 - http://commons.apache.org/proper/commons-compress/)
|
||||
* Apache Commons CSV (org.apache.commons:commons-csv:1.0 - http://commons.apache.org/proper/commons-csv/)
|
||||
* Apache Commons DBCP (org.apache.commons:commons-dbcp2:2.1.1 - http://commons.apache.org/dbcp/)
|
||||
* Apache Commons Lang (org.apache.commons:commons-lang3:3.3.2 - http://commons.apache.org/proper/commons-lang/)
|
||||
* Apache Commons Pool (org.apache.commons:commons-pool2:2.4.2 - http://commons.apache.org/proper/commons-pool/)
|
||||
* Excalibur Pool API (org.apache.excalibur.components:excalibur-pool-api:2.2.1 - http://www.apache.org/excalibur/excalibur-components-modules/excalibur-pool-modules/excalibur-pool-api/)
|
||||
* Excalibur Sourceresolve (org.apache.excalibur.components:excalibur-sourceresolve:2.2.3 - http://www.apache.org/excalibur/excalibur-sourceresolve/)
|
||||
* Excalibur Store (org.apache.excalibur.components:excalibur-store:2.2.1 - http://www.apache.org/excalibur/excalibur-components-modules/excalibur-store/)
|
||||
* Excalibur XML Utilities (org.apache.excalibur.components:excalibur-xmlutil:2.2.1 - http://www.apache.org/excalibur/excalibur-components-modules/excalibur-xmlutil/)
|
||||
* Excalibur Instrument API (org.apache.excalibur.containerkit:excalibur-instrument-api:2.2.1 - http://www.apache.org/excalibur/excalibur-containerkit/excalibur-instrument-modules/excalibur-instrument-api/)
|
||||
* Excalibur Logger (org.apache.excalibur.containerkit:excalibur-logger:2.2.1 - http://www.apache.org/excalibur/excalibur-containerkit/excalibur-logger/)
|
||||
* Activation 1.1 (org.apache.geronimo.specs:geronimo-activation_1.1_spec:1.1 - http://geronimo.apache.org/maven/specs/geronimo-activation_1.1_spec/1.1)
|
||||
* JavaMail 1.4 (org.apache.geronimo.specs:geronimo-javamail_1.4_spec:1.7.1 - http://geronimo.apache.org/maven/specs/geronimo-javamail_1.4_spec/1.7.1)
|
||||
* Streaming API for XML (STAX API 1.0) (org.apache.geronimo.specs:geronimo-stax-api_1.0_spec:1.0.1 - http://geronimo.apache.org/specs/geronimo-stax-api_1.0_spec)
|
||||
* Apache Hadoop Annotations (org.apache.hadoop:hadoop-annotations:2.2.0 - no url defined)
|
||||
* Apache Hadoop Auth (org.apache.hadoop:hadoop-auth:2.2.0 - no url defined)
|
||||
* Apache Hadoop Common (org.apache.hadoop:hadoop-common:2.2.0 - no url defined)
|
||||
* Apache Hadoop HDFS (org.apache.hadoop:hadoop-hdfs:2.2.0 - no url defined)
|
||||
* Apache HttpClient (org.apache.httpcomponents:httpclient:4.5.1 - http://hc.apache.org/httpcomponents-client)
|
||||
* Apache HttpClient Cache (org.apache.httpcomponents:httpclient-cache:4.2.6 - http://hc.apache.org/httpcomponents-client)
|
||||
* Apache HttpCore (org.apache.httpcomponents:httpcore:4.4.15 - http://hc.apache.org/httpcomponents-core-ga)
|
||||
* Apache HttpClient Mime (org.apache.httpcomponents:httpmime:4.5.13 - http://hc.apache.org/httpcomponents-client)
|
||||
* Apache James :: Mime4j :: Core (org.apache.james:apache-mime4j-core:0.8.4 - http://james.apache.org/mime4j/apache-mime4j-core)
|
||||
* Apache James :: Mime4j :: DOM (org.apache.james:apache-mime4j-dom:0.8.4 - http://james.apache.org/mime4j/apache-mime4j-dom)
|
||||
* Apache HttpCore (org.apache.httpcomponents:httpcore:4.4.4 - http://hc.apache.org/httpcomponents-core-ga)
|
||||
* Apache HttpClient Mime (org.apache.httpcomponents:httpmime:4.3.1 - http://hc.apache.org/httpcomponents-client)
|
||||
* Apache JAMES Mime4j (Core) (org.apache.james:apache-mime4j-core:0.7.2 - http://james.apache.org/mime4j/apache-mime4j-core)
|
||||
* Apache JAMES Mime4j (DOM) (org.apache.james:apache-mime4j-dom:0.7.2 - http://james.apache.org/mime4j/apache-mime4j-dom)
|
||||
* Apache Jena - Libraries POM (org.apache.jena:apache-jena-libs:2.13.0 - http://jena.apache.org/apache-jena-libs/)
|
||||
* Apache Jena - ARQ (SPARQL 1.1 Query Engine) (org.apache.jena:jena-arq:2.13.0 - http://jena.apache.org/jena-arq/)
|
||||
* Apache Jena - Core (org.apache.jena:jena-core:2.13.0 - http://jena.apache.org/jena-core/)
|
||||
* Apache Jena - IRI (org.apache.jena:jena-iri:1.1.2 - http://jena.apache.org/jena-iri/)
|
||||
* Apache Jena - TDB (Native Triple Store) (org.apache.jena:jena-tdb:1.1.2 - http://jena.apache.org/jena-tdb/)
|
||||
* Kerby-kerb core (org.apache.kerby:kerb-core:1.0.1 - http://directory.apache.org/kerby/kerby-kerb/kerb-core)
|
||||
* Kerby-kerb Util (org.apache.kerby:kerb-util:1.0.1 - http://directory.apache.org/kerby/kerby-kerb/kerb-util)
|
||||
* Kerby ASN1 Project (org.apache.kerby:kerby-asn1:1.0.1 - http://directory.apache.org/kerby/kerby-common/kerby-asn1)
|
||||
* Kerby PKIX Project (org.apache.kerby:kerby-pkix:1.0.1 - http://directory.apache.org/kerby/kerby-pkix)
|
||||
* Apache Log4j 1.x Compatibility API (org.apache.logging.log4j:log4j-1.2-api:2.17.1 - https://logging.apache.org/log4j/2.x/log4j-1.2-api/)
|
||||
* Apache Log4j API (org.apache.logging.log4j:log4j-api:2.17.1 - https://logging.apache.org/log4j/2.x/log4j-api/)
|
||||
* Apache Log4j Core (org.apache.logging.log4j:log4j-core:2.17.1 - https://logging.apache.org/log4j/2.x/log4j-core/)
|
||||
* Apache Log4j JUL Adapter (org.apache.logging.log4j:log4j-jul:2.17.1 - https://logging.apache.org/log4j/2.x/log4j-jul/)
|
||||
* Apache Log4j Layout for JSON template (org.apache.logging.log4j:log4j-layout-template-json:2.16.0 - https://logging.apache.org/log4j/2.x/log4j-layout-template-json/)
|
||||
* Apache Log4j SLF4J Binding (org.apache.logging.log4j:log4j-slf4j-impl:2.17.1 - https://logging.apache.org/log4j/2.x/log4j-slf4j-impl/)
|
||||
* Apache Log4j Web (org.apache.logging.log4j:log4j-web:2.17.1 - https://logging.apache.org/log4j/2.x/log4j-web/)
|
||||
* Lucene Common Analyzers (org.apache.lucene:lucene-analyzers-common:8.11.1 - https://lucene.apache.org/lucene-parent/lucene-analyzers-common)
|
||||
* Lucene ICU Analysis Components (org.apache.lucene:lucene-analyzers-icu:8.11.1 - https://lucene.apache.org/lucene-parent/lucene-analyzers-icu)
|
||||
* Lucene Kuromoji Japanese Morphological Analyzer (org.apache.lucene:lucene-analyzers-kuromoji:8.11.1 - https://lucene.apache.org/lucene-parent/lucene-analyzers-kuromoji)
|
||||
* Lucene Nori Korean Morphological Analyzer (org.apache.lucene:lucene-analyzers-nori:8.11.1 - https://lucene.apache.org/lucene-parent/lucene-analyzers-nori)
|
||||
* Lucene Phonetic Filters (org.apache.lucene:lucene-analyzers-phonetic:8.11.1 - https://lucene.apache.org/lucene-parent/lucene-analyzers-phonetic)
|
||||
* Lucene Smart Chinese Analyzer (org.apache.lucene:lucene-analyzers-smartcn:8.11.1 - https://lucene.apache.org/lucene-parent/lucene-analyzers-smartcn)
|
||||
* Lucene Stempel Analyzer (org.apache.lucene:lucene-analyzers-stempel:8.11.1 - https://lucene.apache.org/lucene-parent/lucene-analyzers-stempel)
|
||||
* Lucene Memory (org.apache.lucene:lucene-backward-codecs:8.11.1 - https://lucene.apache.org/lucene-parent/lucene-backward-codecs)
|
||||
* Lucene Classification (org.apache.lucene:lucene-classification:8.11.1 - https://lucene.apache.org/lucene-parent/lucene-classification)
|
||||
* Lucene codecs (org.apache.lucene:lucene-codecs:8.11.1 - https://lucene.apache.org/lucene-parent/lucene-codecs)
|
||||
* Lucene Core (org.apache.lucene:lucene-core:8.11.1 - https://lucene.apache.org/lucene-parent/lucene-core)
|
||||
* Lucene Expressions (org.apache.lucene:lucene-expressions:8.11.1 - https://lucene.apache.org/lucene-parent/lucene-expressions)
|
||||
* Lucene Grouping (org.apache.lucene:lucene-grouping:8.11.1 - https://lucene.apache.org/lucene-parent/lucene-grouping)
|
||||
* Lucene Highlighter (org.apache.lucene:lucene-highlighter:8.11.1 - https://lucene.apache.org/lucene-parent/lucene-highlighter)
|
||||
* Lucene Join (org.apache.lucene:lucene-join:8.11.1 - https://lucene.apache.org/lucene-parent/lucene-join)
|
||||
* Lucene Memory (org.apache.lucene:lucene-memory:8.11.1 - https://lucene.apache.org/lucene-parent/lucene-memory)
|
||||
* Lucene Miscellaneous (org.apache.lucene:lucene-misc:8.11.1 - https://lucene.apache.org/lucene-parent/lucene-misc)
|
||||
* Lucene Queries (org.apache.lucene:lucene-queries:8.11.1 - https://lucene.apache.org/lucene-parent/lucene-queries)
|
||||
* Lucene QueryParsers (org.apache.lucene:lucene-queryparser:8.11.1 - https://lucene.apache.org/lucene-parent/lucene-queryparser)
|
||||
* Lucene Sandbox (org.apache.lucene:lucene-sandbox:8.11.1 - https://lucene.apache.org/lucene-parent/lucene-sandbox)
|
||||
* Lucene Spatial Extras (org.apache.lucene:lucene-spatial-extras:8.11.1 - https://lucene.apache.org/lucene-parent/lucene-spatial-extras)
|
||||
* Lucene Spatial 3D (org.apache.lucene:lucene-spatial3d:8.11.1 - https://lucene.apache.org/lucene-parent/lucene-spatial3d)
|
||||
* Lucene Suggest (org.apache.lucene:lucene-suggest:8.11.1 - https://lucene.apache.org/lucene-parent/lucene-suggest)
|
||||
* Apache FontBox (org.apache.pdfbox:fontbox:2.0.24 - http://pdfbox.apache.org/)
|
||||
* PDFBox JBIG2 ImageIO plugin (org.apache.pdfbox:jbig2-imageio:3.0.3 - https://www.apache.org/jbig2-imageio/)
|
||||
* Apache JempBox (org.apache.pdfbox:jempbox:1.8.16 - http://www.apache.org/pdfbox-parent/jempbox/)
|
||||
* Apache PDFBox (org.apache.pdfbox:pdfbox:2.0.24 - https://www.apache.org/pdfbox-parent/pdfbox/)
|
||||
* Apache PDFBox Debugger (org.apache.pdfbox:pdfbox-debugger:2.0.25 - https://www.apache.org/pdfbox-parent/pdfbox-debugger/)
|
||||
* Apache PDFBox tools (org.apache.pdfbox:pdfbox-tools:2.0.25 - https://www.apache.org/pdfbox-parent/pdfbox-tools/)
|
||||
* Apache XmpBox (org.apache.pdfbox:xmpbox:2.0.25 - https://www.apache.org/pdfbox-parent/xmpbox/)
|
||||
* Apache POI - Common (org.apache.poi:poi:5.2.0 - https://poi.apache.org/)
|
||||
* Apache POI - API based on OPC and OOXML schemas (org.apache.poi:poi-ooxml:5.2.0 - https://poi.apache.org/)
|
||||
* Apache POI (org.apache.poi:poi-ooxml-lite:5.2.0 - https://poi.apache.org/)
|
||||
* Apache POI (org.apache.poi:poi-scratchpad:5.2.0 - https://poi.apache.org/)
|
||||
* Apache Solr Core (org.apache.solr:solr-core:8.11.1 - https://lucene.apache.org/solr-parent/solr-core)
|
||||
* Apache Solr Solrj (org.apache.solr:solr-solrj:8.11.1 - https://lucene.apache.org/solr-parent/solr-solrj)
|
||||
* Apache Standard Taglib Implementation (org.apache.taglibs:taglibs-standard-impl:1.2.5 - http://tomcat.apache.org/taglibs/standard-1.2.5/taglibs-standard-impl)
|
||||
* Apache Standard Taglib Specification API (org.apache.taglibs:taglibs-standard-spec:1.2.5 - http://tomcat.apache.org/taglibs/standard-1.2.5/taglibs-standard-spec)
|
||||
* Lucene Common Analyzers (org.apache.lucene:lucene-analyzers-common:4.10.2 - http://lucene.apache.org/lucene-parent/lucene-analyzers-common)
|
||||
* Lucene Common Analyzers (org.apache.lucene:lucene-analyzers-common:4.10.4 - http://lucene.apache.org/lucene-parent/lucene-analyzers-common)
|
||||
* Lucene ICU Analysis Components (org.apache.lucene:lucene-analyzers-icu:4.10.4 - http://lucene.apache.org/lucene-parent/lucene-analyzers-icu)
|
||||
* Lucene Kuromoji Japanese Morphological Analyzer (org.apache.lucene:lucene-analyzers-kuromoji:4.10.4 - http://lucene.apache.org/lucene-parent/lucene-analyzers-kuromoji)
|
||||
* Lucene Morfologik Polish Lemmatizer (org.apache.lucene:lucene-analyzers-morfologik:4.10.4 - http://lucene.apache.org/lucene-parent/lucene-analyzers-morfologik)
|
||||
* Lucene Phonetic Filters (org.apache.lucene:lucene-analyzers-phonetic:4.10.4 - http://lucene.apache.org/lucene-parent/lucene-analyzers-phonetic)
|
||||
* Lucene Smart Chinese Analyzer (org.apache.lucene:lucene-analyzers-smartcn:4.10.4 - http://lucene.apache.org/lucene-parent/lucene-analyzers-smartcn)
|
||||
* Lucene Stempel Analyzer (org.apache.lucene:lucene-analyzers-stempel:4.10.4 - http://lucene.apache.org/lucene-parent/lucene-analyzers-stempel)
|
||||
* Lucene codecs (org.apache.lucene:lucene-codecs:4.10.4 - http://lucene.apache.org/lucene-parent/lucene-codecs)
|
||||
* Lucene Core (org.apache.lucene:lucene-core:4.10.2 - http://lucene.apache.org/lucene-parent/lucene-core)
|
||||
* Lucene Core (org.apache.lucene:lucene-core:4.10.4 - http://lucene.apache.org/lucene-parent/lucene-core)
|
||||
* Lucene Expressions (org.apache.lucene:lucene-expressions:4.10.4 - http://lucene.apache.org/lucene-parent/lucene-expressions)
|
||||
* Lucene Grouping (org.apache.lucene:lucene-grouping:4.10.2 - http://lucene.apache.org/lucene-parent/lucene-grouping)
|
||||
* Lucene Grouping (org.apache.lucene:lucene-grouping:4.10.4 - http://lucene.apache.org/lucene-parent/lucene-grouping)
|
||||
* Lucene Highlighter (org.apache.lucene:lucene-highlighter:4.10.2 - http://lucene.apache.org/lucene-parent/lucene-highlighter)
|
||||
* Lucene Highlighter (org.apache.lucene:lucene-highlighter:4.10.4 - http://lucene.apache.org/lucene-parent/lucene-highlighter)
|
||||
* Lucene Join (org.apache.lucene:lucene-join:4.10.2 - http://lucene.apache.org/lucene-parent/lucene-join)
|
||||
* Lucene Join (org.apache.lucene:lucene-join:4.10.4 - http://lucene.apache.org/lucene-parent/lucene-join)
|
||||
* Lucene Memory (org.apache.lucene:lucene-memory:4.10.2 - http://lucene.apache.org/lucene-parent/lucene-memory)
|
||||
* Lucene Memory (org.apache.lucene:lucene-memory:4.10.4 - http://lucene.apache.org/lucene-parent/lucene-memory)
|
||||
* Lucene Miscellaneous (org.apache.lucene:lucene-misc:4.10.2 - http://lucene.apache.org/lucene-parent/lucene-misc)
|
||||
* Lucene Miscellaneous (org.apache.lucene:lucene-misc:4.10.4 - http://lucene.apache.org/lucene-parent/lucene-misc)
|
||||
* Lucene Queries (org.apache.lucene:lucene-queries:4.10.2 - http://lucene.apache.org/lucene-parent/lucene-queries)
|
||||
* Lucene Queries (org.apache.lucene:lucene-queries:4.10.4 - http://lucene.apache.org/lucene-parent/lucene-queries)
|
||||
* Lucene QueryParsers (org.apache.lucene:lucene-queryparser:4.10.2 - http://lucene.apache.org/lucene-parent/lucene-queryparser)
|
||||
* Lucene QueryParsers (org.apache.lucene:lucene-queryparser:4.10.4 - http://lucene.apache.org/lucene-parent/lucene-queryparser)
|
||||
* Lucene Sandbox (org.apache.lucene:lucene-sandbox:4.10.2 - http://lucene.apache.org/lucene-parent/lucene-sandbox)
|
||||
* Lucene Spatial (org.apache.lucene:lucene-spatial:4.10.2 - http://lucene.apache.org/lucene-parent/lucene-spatial)
|
||||
* Lucene Spatial (org.apache.lucene:lucene-spatial:4.10.4 - http://lucene.apache.org/lucene-parent/lucene-spatial)
|
||||
* Lucene Suggest (org.apache.lucene:lucene-suggest:4.10.2 - http://lucene.apache.org/lucene-parent/lucene-suggest)
|
||||
* Lucene Suggest (org.apache.lucene:lucene-suggest:4.10.4 - http://lucene.apache.org/lucene-parent/lucene-suggest)
|
||||
* Apache FontBox (org.apache.pdfbox:fontbox:2.0.2 - http://pdfbox.apache.org/)
|
||||
* Apache JempBox (org.apache.pdfbox:jempbox:1.8.4 - http://www.apache.org/pdfbox-parent/jempbox/)
|
||||
* Apache PDFBox (org.apache.pdfbox:pdfbox:2.0.2 - http://www.apache.org/pdfbox-parent/pdfbox/)
|
||||
* Apache POI (org.apache.poi:poi:3.13 - http://poi.apache.org/)
|
||||
* Apache POI (org.apache.poi:poi-ooxml:3.13 - http://poi.apache.org/)
|
||||
* Apache POI (org.apache.poi:poi-ooxml-schemas:3.10.1 - http://poi.apache.org/)
|
||||
* Apache POI (org.apache.poi:poi-ooxml-schemas:3.13 - http://poi.apache.org/)
|
||||
* Apache POI (org.apache.poi:poi-scratchpad:3.13 - http://poi.apache.org/)
|
||||
* Apache Solr Search Server (org.apache.solr:solr:4.10.4 - http://lucene.apache.org/solr-parent/solr)
|
||||
* Apache Solr Analysis Extras (org.apache.solr:solr-analysis-extras:4.10.4 - http://lucene.apache.org/solr-parent/solr-analysis-extras)
|
||||
* Apache Solr Content Extraction Library (org.apache.solr:solr-cell:4.10.4 - http://lucene.apache.org/solr-parent/solr-cell)
|
||||
* Apache Solr Core (org.apache.solr:solr-core:4.10.4 - http://lucene.apache.org/solr-parent/solr-core)
|
||||
* Apache Solr Solrj (org.apache.solr:solr-solrj:4.10.4 - http://lucene.apache.org/solr-parent/solr-solrj)
|
||||
* Apache Thrift (org.apache.thrift:libthrift:0.9.2 - http://thrift.apache.org)
|
||||
* Apache Tika core (org.apache.tika:tika-core:2.3.0 - https://tika.apache.org/)
|
||||
* Apache Tika Apple parser module (org.apache.tika:tika-parser-apple-module:2.3.0 - https://tika.apache.org/tika-parser-apple-module/)
|
||||
* Apache Tika audiovideo parser module (org.apache.tika:tika-parser-audiovideo-module:2.3.0 - https://tika.apache.org/tika-parser-audiovideo-module/)
|
||||
* Apache Tika cad parser module (org.apache.tika:tika-parser-cad-module:2.3.0 - https://tika.apache.org/tika-parser-cad-module/)
|
||||
* Apache Tika code parser module (org.apache.tika:tika-parser-code-module:2.3.0 - https://tika.apache.org/tika-parser-code-module/)
|
||||
* Apache Tika crypto parser module (org.apache.tika:tika-parser-crypto-module:2.3.0 - https://tika.apache.org/tika-parser-crypto-module/)
|
||||
* Apache Tika digest commons (org.apache.tika:tika-parser-digest-commons:2.3.0 - https://tika.apache.org/tika-parser-digest-commons/)
|
||||
* Apache Tika font parser module (org.apache.tika:tika-parser-font-module:2.3.0 - https://tika.apache.org/tika-parser-font-module/)
|
||||
* Apache Tika html commons (org.apache.tika:tika-parser-html-commons:2.3.0 - https://tika.apache.org/tika-parser-html-commons/)
|
||||
* Apache Tika html parser module (org.apache.tika:tika-parser-html-module:2.3.0 - https://tika.apache.org/tika-parser-html-module/)
|
||||
* Apache Tika image parser module (org.apache.tika:tika-parser-image-module:2.3.0 - https://tika.apache.org/tika-parser-image-module/)
|
||||
* Apache Tika mail commons (org.apache.tika:tika-parser-mail-commons:2.3.0 - https://tika.apache.org/tika-parser-mail-commons/)
|
||||
* Apache Tika mail parser module (org.apache.tika:tika-parser-mail-module:2.3.0 - https://tika.apache.org/tika-parser-mail-module/)
|
||||
* Apache Tika Microsoft parser module (org.apache.tika:tika-parser-microsoft-module:2.3.0 - https://tika.apache.org/tika-parser-microsoft-module/)
|
||||
* Apache Tika miscellaneous office format parser module (org.apache.tika:tika-parser-miscoffice-module:2.3.0 - https://tika.apache.org/tika-parser-miscoffice-module/)
|
||||
* Apache Tika news parser module (org.apache.tika:tika-parser-news-module:2.3.0 - https://tika.apache.org/tika-parser-news-module/)
|
||||
* Apache Tika OCR parser module (org.apache.tika:tika-parser-ocr-module:2.3.0 - https://tika.apache.org/tika-parser-ocr-module/)
|
||||
* Apache Tika PDF parser module (org.apache.tika:tika-parser-pdf-module:2.3.0 - https://tika.apache.org/tika-parser-pdf-module/)
|
||||
* Apache Tika package parser module (org.apache.tika:tika-parser-pkg-module:2.3.0 - https://tika.apache.org/tika-parser-pkg-module/)
|
||||
* Apache Tika text parser module (org.apache.tika:tika-parser-text-module:2.3.0 - https://tika.apache.org/tika-parser-text-module/)
|
||||
* Apache Tika XML parser module (org.apache.tika:tika-parser-xml-module:2.3.0 - https://tika.apache.org/tika-parser-xml-module/)
|
||||
* Apache Tika XMP commons (org.apache.tika:tika-parser-xmp-commons:2.3.0 - https://tika.apache.org/tika-parser-xmp-commons/)
|
||||
* Apache Tika ZIP commons (org.apache.tika:tika-parser-zip-commons:2.3.0 - https://tika.apache.org/tika-parser-zip-commons/)
|
||||
* Apache Tika standard parser package (org.apache.tika:tika-parsers-standard-package:2.3.0 - https://tika.apache.org/tika-parsers/tika-parsers-standard/tika-parsers-standard-package/)
|
||||
* tomcat-embed-core (org.apache.tomcat.embed:tomcat-embed-core:9.0.63 - https://tomcat.apache.org/)
|
||||
* tomcat-embed-el (org.apache.tomcat.embed:tomcat-embed-el:9.0.63 - https://tomcat.apache.org/)
|
||||
* tomcat-embed-websocket (org.apache.tomcat.embed:tomcat-embed-websocket:9.0.63 - https://tomcat.apache.org/)
|
||||
* Apache Velocity - Engine (org.apache.velocity:velocity-engine-core:2.3 - http://velocity.apache.org/engine/devel/velocity-engine-core/)
|
||||
* Apache Velocity - JSR 223 Scripting (org.apache.velocity:velocity-engine-scripting:2.2 - http://velocity.apache.org/engine/devel/velocity-engine-scripting/)
|
||||
* Axiom API (org.apache.ws.commons.axiom:axiom-api:1.2.22 - http://ws.apache.org/axiom/)
|
||||
* Abdera Model (FOM) Implementation (org.apache.ws.commons.axiom:fom-impl:1.2.22 - http://ws.apache.org/axiom/implementations/fom-impl/)
|
||||
* XmlBeans (org.apache.xmlbeans:xmlbeans:5.0.3 - https://xmlbeans.apache.org/)
|
||||
* Apache ZooKeeper - Server (org.apache.zookeeper:zookeeper:3.6.2 - http://zookeeper.apache.org/zookeeper)
|
||||
* Apache ZooKeeper - Jute (org.apache.zookeeper:zookeeper-jute:3.6.2 - http://zookeeper.apache.org/zookeeper-jute)
|
||||
* org.apiguardian:apiguardian-api (org.apiguardian:apiguardian-api:1.1.0 - https://github.com/apiguardian-team/apiguardian)
|
||||
* AssertJ fluent assertions (org.assertj:assertj-core:3.21.0 - https://assertj.github.io/doc/assertj-core/)
|
||||
* Evo Inflector (org.atteo:evo-inflector:1.3 - http://atteo.org/static/evo-inflector)
|
||||
* jose4j (org.bitbucket.b_c:jose4j:0.6.5 - https://bitbucket.org/b_c/jose4j/)
|
||||
* Apache Tika core (org.apache.tika:tika-core:1.5 - http://tika.apache.org/)
|
||||
* Apache Tika parsers (org.apache.tika:tika-parsers:1.5 - http://tika.apache.org/)
|
||||
* Apache Tika XMP (org.apache.tika:tika-xmp:1.5 - http://tika.apache.org/)
|
||||
* Axiom API (org.apache.ws.commons.axiom:axiom-api:1.2.14 - http://ws.apache.org/axiom/)
|
||||
* Axiom Impl (org.apache.ws.commons.axiom:axiom-impl:1.2.14 - http://ws.apache.org/axiom/)
|
||||
* XmlBeans (org.apache.xmlbeans:xmlbeans:2.6.0 - http://xmlbeans.apache.org)
|
||||
* zookeeper (org.apache.zookeeper:zookeeper:3.4.6 - no url defined)
|
||||
* Evo Inflector (org.atteo:evo-inflector:1.2.1 - http://atteo.org/static/evo-inflector)
|
||||
* TagSoup (org.ccil.cowan.tagsoup:tagsoup:1.2.1 - http://home.ccil.org/~cowan/XML/tagsoup/)
|
||||
* jems (org.dmfs:jems:1.18 - https://github.com/dmfs/jems)
|
||||
* rfc3986-uri (org.dmfs:rfc3986-uri:0.8.1 - https://github.com/dmfs/uri-toolkit)
|
||||
* Jetty :: Apache JSP Implementation (org.eclipse.jetty:apache-jsp:9.4.15.v20190215 - http://www.eclipse.org/jetty)
|
||||
* Apache :: JSTL module (org.eclipse.jetty:apache-jstl:9.4.15.v20190215 - http://tomcat.apache.org/taglibs/standard/)
|
||||
* Jetty :: ALPN :: Client (org.eclipse.jetty:jetty-alpn-client:9.4.44.v20210927 - https://eclipse.org/jetty/jetty-alpn-parent/jetty-alpn-client)
|
||||
* Jetty :: ALPN :: JDK9 Client Implementation (org.eclipse.jetty:jetty-alpn-java-client:9.4.44.v20210927 - https://eclipse.org/jetty/jetty-alpn-parent/jetty-alpn-java-client)
|
||||
* Jetty :: ALPN :: JDK9 Server Implementation (org.eclipse.jetty:jetty-alpn-java-server:9.4.48.v20220622 - https://eclipse.org/jetty/jetty-alpn-parent/jetty-alpn-java-server)
|
||||
* Jetty :: ALPN :: Server (org.eclipse.jetty:jetty-alpn-server:9.4.44.v20210927 - https://eclipse.org/jetty/jetty-alpn-parent/jetty-alpn-server)
|
||||
* Jetty :: ALPN :: Server (org.eclipse.jetty:jetty-alpn-server:9.4.48.v20220622 - https://eclipse.org/jetty/jetty-alpn-parent/jetty-alpn-server)
|
||||
* Jetty :: Servlet Annotations (org.eclipse.jetty:jetty-annotations:9.4.15.v20190215 - http://www.eclipse.org/jetty)
|
||||
* Jetty :: Asynchronous HTTP Client (org.eclipse.jetty:jetty-client:9.4.44.v20210927 - https://eclipse.org/jetty/jetty-client)
|
||||
* Jetty :: Continuation (org.eclipse.jetty:jetty-continuation:9.4.44.v20210927 - https://eclipse.org/jetty/jetty-continuation)
|
||||
* Jetty :: Continuation (org.eclipse.jetty:jetty-continuation:9.4.48.v20220622 - https://eclipse.org/jetty/jetty-continuation)
|
||||
* Jetty :: Deployers (org.eclipse.jetty:jetty-deploy:9.4.48.v20220622 - https://eclipse.org/jetty/jetty-deploy)
|
||||
* Jetty :: Http Utility (org.eclipse.jetty:jetty-http:9.4.48.v20220622 - https://eclipse.org/jetty/jetty-http)
|
||||
* Jetty :: IO Utility (org.eclipse.jetty:jetty-io:9.4.48.v20220622 - https://eclipse.org/jetty/jetty-io)
|
||||
* Jetty :: JMX Management (org.eclipse.jetty:jetty-jmx:9.4.44.v20210927 - https://eclipse.org/jetty/jetty-jmx)
|
||||
* Jetty :: JNDI Naming (org.eclipse.jetty:jetty-jndi:9.4.15.v20190215 - http://www.eclipse.org/jetty)
|
||||
* Jetty :: Plus (org.eclipse.jetty:jetty-plus:9.4.15.v20190215 - http://www.eclipse.org/jetty)
|
||||
* Jetty :: Rewrite Handler (org.eclipse.jetty:jetty-rewrite:9.4.44.v20210927 - https://eclipse.org/jetty/jetty-rewrite)
|
||||
* Jetty :: Security (org.eclipse.jetty:jetty-security:9.4.44.v20210927 - https://eclipse.org/jetty/jetty-security)
|
||||
* Jetty :: Security (org.eclipse.jetty:jetty-security:9.4.48.v20220622 - https://eclipse.org/jetty/jetty-security)
|
||||
* Jetty :: Server Core (org.eclipse.jetty:jetty-server:9.4.48.v20220622 - https://eclipse.org/jetty/jetty-server)
|
||||
* Jetty :: Servlet Handling (org.eclipse.jetty:jetty-servlet:9.4.48.v20220622 - https://eclipse.org/jetty/jetty-servlet)
|
||||
* Jetty :: Utility Servlets and Filters (org.eclipse.jetty:jetty-servlets:9.4.48.v20220622 - https://eclipse.org/jetty/jetty-servlets)
|
||||
* Jetty :: Utilities (org.eclipse.jetty:jetty-util:9.4.48.v20220622 - https://eclipse.org/jetty/jetty-util)
|
||||
* Jetty :: Utilities :: Ajax(JSON) (org.eclipse.jetty:jetty-util-ajax:9.4.48.v20220622 - https://eclipse.org/jetty/jetty-util-ajax)
|
||||
* Jetty :: Webapp Application Support (org.eclipse.jetty:jetty-webapp:9.4.48.v20220622 - https://eclipse.org/jetty/jetty-webapp)
|
||||
* Jetty :: XML utilities (org.eclipse.jetty:jetty-xml:9.4.48.v20220622 - https://eclipse.org/jetty/jetty-xml)
|
||||
* Jetty :: HTTP2 :: Client (org.eclipse.jetty.http2:http2-client:9.4.44.v20210927 - https://eclipse.org/jetty/http2-parent/http2-client)
|
||||
* Jetty :: HTTP2 :: Common (org.eclipse.jetty.http2:http2-common:9.4.48.v20220622 - https://eclipse.org/jetty/http2-parent/http2-common)
|
||||
* Jetty :: HTTP2 :: HPACK (org.eclipse.jetty.http2:http2-hpack:9.4.44.v20210927 - https://eclipse.org/jetty/http2-parent/http2-hpack)
|
||||
* Jetty :: HTTP2 :: HTTP Client Transport (org.eclipse.jetty.http2:http2-http-client-transport:9.4.44.v20210927 - https://eclipse.org/jetty/http2-parent/http2-http-client-transport)
|
||||
* Jetty :: HTTP2 :: Server (org.eclipse.jetty.http2:http2-server:9.4.48.v20220622 - https://eclipse.org/jetty/http2-parent/http2-server)
|
||||
* Jetty :: Schemas (org.eclipse.jetty.toolchain:jetty-schemas:3.1.2 - https://eclipse.org/jetty/jetty-schemas)
|
||||
* Ehcache (org.ehcache:ehcache:3.4.0 - http://ehcache.org)
|
||||
* flyway-core (org.flywaydb:flyway-core:8.4.4 - https://flywaydb.org/flyway-core)
|
||||
* Ogg and Vorbis for Java, Core (org.gagravarr:vorbis-java-core:0.8 - https://github.com/Gagravarr/VorbisJava)
|
||||
* Apache Tika plugin for Ogg, Vorbis and FLAC (org.gagravarr:vorbis-java-tika:0.8 - https://github.com/Gagravarr/VorbisJava)
|
||||
* jersey-core-client (org.glassfish.jersey.core:jersey-client:2.35 - https://projects.eclipse.org/projects/ee4j.jersey/jersey-client)
|
||||
* jersey-core-common (org.glassfish.jersey.core:jersey-common:2.35 - https://projects.eclipse.org/projects/ee4j.jersey/jersey-common)
|
||||
* jersey-inject-hk2 (org.glassfish.jersey.inject:jersey-hk2:2.35 - https://projects.eclipse.org/projects/ee4j.jersey/project/jersey-hk2)
|
||||
* Hibernate Validator Engine (org.hibernate.validator:hibernate-validator:6.0.23.Final - http://hibernate.org/validator/hibernate-validator)
|
||||
* Hibernate Validator Portable Extension (org.hibernate.validator:hibernate-validator-cdi:6.0.23.Final - http://hibernate.org/validator/hibernate-validator-cdi)
|
||||
* Javassist (org.javassist:javassist:3.25.0-GA - http://www.javassist.org/)
|
||||
* Java Annotation Indexer (org.jboss:jandex:2.4.2.Final - http://www.jboss.org/jandex)
|
||||
* JBoss Logging 3 (org.jboss.logging:jboss-logging:3.4.3.Final - http://www.jboss.org)
|
||||
* JDOM (org.jdom:jdom2:2.0.6.1 - http://www.jdom.org)
|
||||
* jtwig-core (org.jtwig:jtwig-core:5.87.0.RELEASE - http://jtwig.org)
|
||||
* jtwig-reflection (org.jtwig:jtwig-reflection:5.87.0.RELEASE - http://jtwig.org)
|
||||
* jtwig-spring (org.jtwig:jtwig-spring:5.87.0.RELEASE - http://jtwig.org)
|
||||
* jtwig-spring-boot-starter (org.jtwig:jtwig-spring-boot-starter:5.87.0.RELEASE - http://jtwig.org)
|
||||
* jtwig-web (org.jtwig:jtwig-web:5.87.0.RELEASE - http://jtwig.org)
|
||||
* Spatial4J (org.locationtech.spatial4j:spatial4j:0.7 - https://projects.eclipse.org/projects/locationtech.spatial4j)
|
||||
* MockServer Java Client (org.mock-server:mockserver-client-java:5.11.2 - http://www.mock-server.com)
|
||||
* MockServer Core (org.mock-server:mockserver-core:5.11.2 - http://www.mock-server.com)
|
||||
* MockServer JUnit 4 Integration (org.mock-server:mockserver-junit-rule:5.11.2 - http://www.mock-server.com)
|
||||
* MockServer & Proxy Netty (org.mock-server:mockserver-netty:5.11.2 - http://www.mock-server.com)
|
||||
* MortBay :: Apache EL :: API and Implementation (org.mortbay.jasper:apache-el:8.5.35.1 - https://github.com/jetty-project/jasper-jsp/apache-el)
|
||||
* MortBay :: Apache Jasper :: JSP Implementation (org.mortbay.jasper:apache-jsp:8.5.35.1 - https://github.com/jetty-project/jasper-jsp/apache-jsp)
|
||||
* Jackson (org.codehaus.jackson:jackson-core-asl:1.9.13 - http://jackson.codehaus.org)
|
||||
* Data Mapper for Jackson (org.codehaus.jackson:jackson-mapper-asl:1.9.13 - http://jackson.codehaus.org)
|
||||
* Woodstox (org.codehaus.woodstox:woodstox-core-asl:4.1.4 - http://woodstox.codehaus.org)
|
||||
* Woodstox (org.codehaus.woodstox:wstx-asl:3.2.0 - http://woodstox.codehaus.org)
|
||||
* Woodstox (org.codehaus.woodstox:wstx-asl:3.2.7 - http://woodstox.codehaus.org)
|
||||
* flyway-core (org.flywaydb:flyway-core:4.0.3 - https://flywaydb.org/flyway-core)
|
||||
* Ogg and Vorbis for Java, Core (org.gagravarr:vorbis-java-core:0.1 - https://github.com/Gagravarr/VorbisJava)
|
||||
* Apache Tika plugin for Ogg, Vorbis and FLAC (org.gagravarr:vorbis-java-tika:0.1 - https://github.com/Gagravarr/VorbisJava)
|
||||
* Javassist (org.javassist:javassist:3.18.1-GA - http://www.javassist.org/)
|
||||
* Jetty Server (org.mortbay.jetty:jetty:6.1.26 - http://www.eclipse.org/jetty/jetty-parent/project/modules/jetty)
|
||||
* Jetty Servlet Tester (org.mortbay.jetty:jetty-servlet-tester:6.1.26 - http://www.eclipse.org/jetty/jetty-parent/project/jetty-servlet-tester)
|
||||
* Jetty Utilities (org.mortbay.jetty:jetty-util:6.1.26 - http://www.eclipse.org/jetty/jetty-parent/project/jetty-util)
|
||||
* Servlet Specification API (org.mortbay.jetty:servlet-api:2.5-20081211 - http://jetty.mortbay.org/servlet-api)
|
||||
* Objenesis (org.objenesis:objenesis:3.2 - http://objenesis.org/objenesis)
|
||||
* parboiled-core (org.parboiled:parboiled-core:1.3.1 - http://parboiled.org)
|
||||
* parboiled-java (org.parboiled:parboiled-java:1.3.1 - http://parboiled.org)
|
||||
* RRD4J (org.rrd4j:rrd4j:3.5 - https://github.com/rrd4j/rrd4j/)
|
||||
* JSONassert (org.skyscreamer:jsonassert:1.5.0 - https://github.com/skyscreamer/JSONassert)
|
||||
* Spring AOP (org.springframework:spring-aop:5.3.20 - https://github.com/spring-projects/spring-framework)
|
||||
* Spring Beans (org.springframework:spring-beans:5.3.20 - https://github.com/spring-projects/spring-framework)
|
||||
* Spring Context (org.springframework:spring-context:5.3.20 - https://github.com/spring-projects/spring-framework)
|
||||
* Spring Context Support (org.springframework:spring-context-support:5.3.20 - https://github.com/spring-projects/spring-framework)
|
||||
* Spring Core (org.springframework:spring-core:5.3.20 - https://github.com/spring-projects/spring-framework)
|
||||
* Spring Expression Language (SpEL) (org.springframework:spring-expression:5.3.20 - https://github.com/spring-projects/spring-framework)
|
||||
* Spring Commons Logging Bridge (org.springframework:spring-jcl:5.3.20 - https://github.com/spring-projects/spring-framework)
|
||||
* Spring JDBC (org.springframework:spring-jdbc:5.3.20 - https://github.com/spring-projects/spring-framework)
|
||||
* Spring Object/Relational Mapping (org.springframework:spring-orm:5.3.20 - https://github.com/spring-projects/spring-framework)
|
||||
* Spring TestContext Framework (org.springframework:spring-test:5.3.20 - https://github.com/spring-projects/spring-framework)
|
||||
* Spring Transaction (org.springframework:spring-tx:5.3.20 - https://github.com/spring-projects/spring-framework)
|
||||
* Spring Web (org.springframework:spring-web:5.3.20 - https://github.com/spring-projects/spring-framework)
|
||||
* Spring Web MVC (org.springframework:spring-webmvc:5.3.20 - https://github.com/spring-projects/spring-framework)
|
||||
* spring-boot (org.springframework.boot:spring-boot:2.6.8 - https://spring.io/projects/spring-boot)
|
||||
* spring-boot-actuator (org.springframework.boot:spring-boot-actuator:2.6.8 - https://spring.io/projects/spring-boot)
|
||||
* spring-boot-actuator-autoconfigure (org.springframework.boot:spring-boot-actuator-autoconfigure:2.6.8 - https://spring.io/projects/spring-boot)
|
||||
* spring-boot-autoconfigure (org.springframework.boot:spring-boot-autoconfigure:2.6.8 - https://spring.io/projects/spring-boot)
|
||||
* Spring Boot Configuration Processor (org.springframework.boot:spring-boot-configuration-processor:2.0.0.RELEASE - https://projects.spring.io/spring-boot/#/spring-boot-parent/spring-boot-tools/spring-boot-configuration-processor)
|
||||
* spring-boot-starter (org.springframework.boot:spring-boot-starter:2.6.8 - https://spring.io/projects/spring-boot)
|
||||
* spring-boot-starter-actuator (org.springframework.boot:spring-boot-starter-actuator:2.6.8 - https://spring.io/projects/spring-boot)
|
||||
* spring-boot-starter-aop (org.springframework.boot:spring-boot-starter-aop:2.6.8 - https://spring.io/projects/spring-boot)
|
||||
* spring-boot-starter-cache (org.springframework.boot:spring-boot-starter-cache:2.6.8 - https://spring.io/projects/spring-boot)
|
||||
* spring-boot-starter-data-rest (org.springframework.boot:spring-boot-starter-data-rest:2.6.8 - https://spring.io/projects/spring-boot)
|
||||
* spring-boot-starter-json (org.springframework.boot:spring-boot-starter-json:2.6.8 - https://spring.io/projects/spring-boot)
|
||||
* spring-boot-starter-log4j2 (org.springframework.boot:spring-boot-starter-log4j2:2.6.8 - https://spring.io/projects/spring-boot)
|
||||
* spring-boot-starter-security (org.springframework.boot:spring-boot-starter-security:2.6.8 - https://spring.io/projects/spring-boot)
|
||||
* spring-boot-starter-test (org.springframework.boot:spring-boot-starter-test:2.6.8 - https://spring.io/projects/spring-boot)
|
||||
* spring-boot-starter-tomcat (org.springframework.boot:spring-boot-starter-tomcat:2.6.8 - https://spring.io/projects/spring-boot)
|
||||
* spring-boot-starter-web (org.springframework.boot:spring-boot-starter-web:2.6.8 - https://spring.io/projects/spring-boot)
|
||||
* spring-boot-test (org.springframework.boot:spring-boot-test:2.6.8 - https://spring.io/projects/spring-boot)
|
||||
* spring-boot-test-autoconfigure (org.springframework.boot:spring-boot-test-autoconfigure:2.6.8 - https://spring.io/projects/spring-boot)
|
||||
* Spring Data Core (org.springframework.data:spring-data-commons:2.6.4 - https://www.spring.io/spring-data/spring-data-commons)
|
||||
* Spring Data REST - Core (org.springframework.data:spring-data-rest-core:3.6.4 - https://www.spring.io/spring-data/spring-data-rest-parent/spring-data-rest-core)
|
||||
* Spring Data REST - WebMVC (org.springframework.data:spring-data-rest-webmvc:3.6.4 - https://www.spring.io/spring-data/spring-data-rest-parent/spring-data-rest-webmvc)
|
||||
* Spring HATEOAS (org.springframework.hateoas:spring-hateoas:1.4.2 - https://github.com/spring-projects/spring-hateoas)
|
||||
* Spring Plugin - Core (org.springframework.plugin:spring-plugin-core:2.0.0.RELEASE - https://github.com/spring-projects/spring-plugin/spring-plugin-core)
|
||||
* spring-security-config (org.springframework.security:spring-security-config:5.6.5 - https://spring.io/projects/spring-security)
|
||||
* spring-security-core (org.springframework.security:spring-security-core:5.6.5 - https://spring.io/projects/spring-security)
|
||||
* spring-security-crypto (org.springframework.security:spring-security-crypto:5.6.5 - https://spring.io/projects/spring-security)
|
||||
* spring-security-test (org.springframework.security:spring-security-test:5.6.5 - https://spring.io/projects/spring-security)
|
||||
* spring-security-web (org.springframework.security:spring-security-web:5.6.5 - https://spring.io/projects/spring-security)
|
||||
* Noggit (org.noggit:noggit:0.5 - http://noggit.org)
|
||||
* Objenesis (org.objenesis:objenesis:2.1 - http://objenesis.org)
|
||||
* parboiled-core (org.parboiled:parboiled-core:1.1.6 - http://parboiled.org)
|
||||
* parboiled-java (org.parboiled:parboiled-java:1.1.6 - http://parboiled.org)
|
||||
* org.restlet (org.restlet.jee:org.restlet:2.1.1 - no url defined)
|
||||
* org.restlet.ext.servlet (org.restlet.jee:org.restlet.ext.servlet:2.1.1 - no url defined)
|
||||
* rome-modules (org.rometools:rome-modules:1.0 - http://www.rometools.org)
|
||||
* Spring AOP (org.springframework:spring-aop:3.2.16.RELEASE - https://github.com/SpringSource/spring-framework)
|
||||
* Spring AOP (org.springframework:spring-aop:3.2.5.RELEASE - https://github.com/SpringSource/spring-framework)
|
||||
* Spring Beans (org.springframework:spring-beans:3.2.16.RELEASE - https://github.com/SpringSource/spring-framework)
|
||||
* Spring Beans (org.springframework:spring-beans:3.2.5.RELEASE - https://github.com/SpringSource/spring-framework)
|
||||
* Spring Context (org.springframework:spring-context:3.2.16.RELEASE - https://github.com/SpringSource/spring-framework)
|
||||
* Spring Context (org.springframework:spring-context:3.2.5.RELEASE - https://github.com/SpringSource/spring-framework)
|
||||
* Spring Context Support (org.springframework:spring-context-support:3.2.16.RELEASE - https://github.com/SpringSource/spring-framework)
|
||||
* Spring Core (org.springframework:spring-core:3.2.16.RELEASE - https://github.com/SpringSource/spring-framework)
|
||||
* Spring Core (org.springframework:spring-core:3.2.5.RELEASE - https://github.com/SpringSource/spring-framework)
|
||||
* Spring Expression Language (SpEL) (org.springframework:spring-expression:3.2.16.RELEASE - https://github.com/SpringSource/spring-framework)
|
||||
* Spring Expression Language (SpEL) (org.springframework:spring-expression:3.2.5.RELEASE - https://github.com/SpringSource/spring-framework)
|
||||
* Spring JDBC (org.springframework:spring-jdbc:3.2.16.RELEASE - https://github.com/SpringSource/spring-framework)
|
||||
* Spring JDBC (org.springframework:spring-jdbc:3.2.5.RELEASE - https://github.com/SpringSource/spring-framework)
|
||||
* Spring Object/Relational Mapping (org.springframework:spring-orm:3.2.16.RELEASE - https://github.com/SpringSource/spring-framework)
|
||||
* Spring Object/Relational Mapping (org.springframework:spring-orm:3.2.5.RELEASE - https://github.com/SpringSource/spring-framework)
|
||||
* Spring TestContext Framework (org.springframework:spring-test:3.2.5.RELEASE - https://github.com/SpringSource/spring-framework)
|
||||
* Spring Transaction (org.springframework:spring-tx:3.2.16.RELEASE - https://github.com/SpringSource/spring-framework)
|
||||
* Spring Transaction (org.springframework:spring-tx:3.2.5.RELEASE - https://github.com/SpringSource/spring-framework)
|
||||
* Spring Web (org.springframework:spring-web:3.2.16.RELEASE - https://github.com/SpringSource/spring-framework)
|
||||
* Spring Web (org.springframework:spring-web:3.2.5.RELEASE - https://github.com/SpringSource/spring-framework)
|
||||
* Spring Web MVC (org.springframework:spring-webmvc:3.2.16.RELEASE - https://github.com/SpringSource/spring-framework)
|
||||
* Spring Web MVC (org.springframework:spring-webmvc:3.2.5.RELEASE - https://github.com/SpringSource/spring-framework)
|
||||
* spring-security-config (org.springframework.security:spring-security-config:3.2.9.RELEASE - http://spring.io/spring-security)
|
||||
* spring-security-core (org.springframework.security:spring-security-core:3.2.9.RELEASE - http://spring.io/spring-security)
|
||||
* spring-security-web (org.springframework.security:spring-security-web:3.2.9.RELEASE - http://spring.io/spring-security)
|
||||
* SWORD Java API, GUI and CLI (org.swordapp:sword-common:1.1 - http://nexus.sonatype.org/oss-repository-hosting.html/sword-common)
|
||||
* SWORD v2 :: Common Server Library (org.swordapp:sword2-server:1.0 - http://www.swordapp.org/)
|
||||
* snappy-java (org.xerial.snappy:snappy-java:1.1.7.6 - https://github.com/xerial/snappy-java)
|
||||
* xml-matchers (org.xmlmatchers:xml-matchers:0.10 - http://code.google.com/p/xml-matchers/)
|
||||
* org.xmlunit:xmlunit-core (org.xmlunit:xmlunit-core:2.8.0 - https://www.xmlunit.org/)
|
||||
* org.xmlunit:xmlunit-core (org.xmlunit:xmlunit-core:2.8.4 - https://www.xmlunit.org/)
|
||||
* org.xmlunit:xmlunit-placeholders (org.xmlunit:xmlunit-placeholders:2.8.0 - https://www.xmlunit.org/xmlunit-placeholders/)
|
||||
* SnakeYAML (org.yaml:snakeyaml:1.29 - http://www.snakeyaml.org)
|
||||
* software.amazon.ion:ion-java (software.amazon.ion:ion-java:1.0.2 - https://github.com/amznlabs/ion-java/)
|
||||
* oro (oro:oro:2.0.8 - no url defined)
|
||||
* JUnitParams (pl.pragmatists:JUnitParams:1.0.2 - http://junitparams.googlecode.com)
|
||||
* Rome A9 OpenSearch (rome:opensearch:0.1 - http://wiki.java.net/bin/view/Javawsxml/OpenSearch)
|
||||
* ROME, RSS and atOM utilitiEs for Java (rome:rome:1.0 - https://rome.dev.java.net/)
|
||||
* oai4j (se.kb:oai4j:0.6b1 - http://oai4j-client.sourceforge.net/)
|
||||
* StAX API (stax:stax-api:1.0.1 - http://stax.codehaus.org/)
|
||||
* standard (taglibs:standard:1.1.2 - no url defined)
|
||||
* Xalan Java Serializer (xalan:serializer:2.7.2 - http://xml.apache.org/xalan-j/)
|
||||
* xalan (xalan:xalan:2.7.0 - no url defined)
|
||||
* Xalan Java (xalan:xalan:2.7.2 - http://xml.apache.org/xalan-j/)
|
||||
* Xerces2-j (xerces:xercesImpl:2.12.2 - https://xerces.apache.org/xerces2-j/)
|
||||
* Xerces2-j (xerces:xercesImpl:2.11.0 - https://xerces.apache.org/xerces2-j/)
|
||||
* xmlParserAPIs (xerces:xmlParserAPIs:2.6.2 - no url defined)
|
||||
* XML Commons External Components XML APIs (xml-apis:xml-apis:1.4.01 - http://xml.apache.org/commons/components/external/)
|
||||
* XML Commons Resolver Component (xml-resolver:xml-resolver:1.2 - http://xml.apache.org/commons/components/resolver/)
|
||||
|
||||
BSD License:
|
||||
|
||||
* AntLR Parser Generator (antlr:antlr:2.7.7 - http://www.antlr.org/)
|
||||
* Adobe XMPCore (com.adobe.xmp:xmpcore:6.1.11 - https://www.adobe.com/devnet/xmp/library/eula-xmp-library-java.html)
|
||||
* ASM Core (asm:asm:3.3.1 - http://asm.objectweb.org/asm/)
|
||||
* XMP Library for Java (com.adobe.xmp:xmpcore:5.1.2 - http://www.adobe.com/devnet/xmp.html)
|
||||
* coverity-escapers (com.coverity.security:coverity-escapers:1.1.1 - http://coverity.com/security)
|
||||
* Java Advanced Imaging Image I/O Tools API core (standalone) (com.github.jai-imageio:jai-imageio-core:1.4.0 - https://github.com/jai-imageio/jai-imageio-core)
|
||||
* JSONLD Java :: Core (com.github.jsonld-java:jsonld-java:0.5.1 - http://github.com/jsonld-java/jsonld-java/jsonld-java/)
|
||||
* curvesapi (com.github.virtuald:curvesapi:1.06 - https://github.com/virtuald/curvesapi)
|
||||
* Protocol Buffers [Core] (com.google.protobuf:protobuf-java:3.11.0 - https://developers.google.com/protocol-buffers/protobuf-java/)
|
||||
* JZlib (com.jcraft:jzlib:1.1.3 - http://www.jcraft.com/jzlib/)
|
||||
* Protocol Buffer Java API (com.google.protobuf:protobuf-java:2.5.0 - http://code.google.com/p/protobuf)
|
||||
* Jena IRI (com.hp.hpl.jena:iri:0.8 - http://jena.sf.net/iri)
|
||||
* Jena (com.hp.hpl.jena:jena:2.6.4 - http://www.openjena.org/)
|
||||
* yui compressor (com.yahoo.platform.yui:yuicompressor:2.3.6 - http://developer.yahoo.com/yui/compressor/)
|
||||
* dnsjava (dnsjava:dnsjava:2.1.7 - http://www.dnsjava.org)
|
||||
* dom4j (dom4j:dom4j:1.6.1 - http://dom4j.org)
|
||||
* Biblio Transformation Engine :: Core (gr.ekt.bte:bte-core:0.9.3.5 - http://github.com/EKT/Biblio-Transformation-Engine/bte-core)
|
||||
* Biblio Transformation Engine :: Input/Output (gr.ekt.bte:bte-io:0.9.3.5 - http://github.com/EKT/Biblio-Transformation-Engine/bte-io)
|
||||
* jaxen (jaxen:jaxen:1.1.6 - http://jaxen.codehaus.org/)
|
||||
* ANTLR 4 Runtime (org.antlr:antlr4-runtime:4.5.1-1 - http://www.antlr.org/antlr4-runtime)
|
||||
* commons-compiler (org.codehaus.janino:commons-compiler:3.0.9 - http://janino-compiler.github.io/commons-compiler/)
|
||||
* janino (org.codehaus.janino:janino:3.0.9 - http://janino-compiler.github.io/janino/)
|
||||
* Stax2 API (org.codehaus.woodstox:stax2-api:4.2.1 - http://github.com/FasterXML/stax2-api)
|
||||
* Hamcrest Date (org.exparity:hamcrest-date:2.0.7 - https://github.com/exparity/hamcrest-date)
|
||||
* jersey-core-client (org.glassfish.jersey.core:jersey-client:2.35 - https://projects.eclipse.org/projects/ee4j.jersey/jersey-client)
|
||||
* jersey-inject-hk2 (org.glassfish.jersey.inject:jersey-hk2:2.35 - https://projects.eclipse.org/projects/ee4j.jersey/project/jersey-hk2)
|
||||
* Hamcrest (org.hamcrest:hamcrest:2.2 - http://hamcrest.org/JavaHamcrest/)
|
||||
* JLine (jline:jline:0.9.94 - http://jline.sourceforge.net)
|
||||
* ANTLR 3 Runtime (org.antlr:antlr-runtime:3.5 - http://www.antlr.org)
|
||||
* Morfologik FSA (org.carrot2:morfologik-fsa:1.7.1 - http://morfologik.blogspot.com/morfologik-fsa/)
|
||||
* Morfologik Stemming Dictionary for Polish (org.carrot2:morfologik-polish:1.7.1 - http://morfologik.blogspot.com/morfologik-polish/)
|
||||
* Morfologik Stemming APIs (org.carrot2:morfologik-stemming:1.7.1 - http://morfologik.blogspot.com/morfologik-stemming/)
|
||||
* Stax2 API (org.codehaus.woodstox:stax2-api:3.1.1 - http://woodstox.codehaus.org/StAX2)
|
||||
* DSpace Kernel :: API and Implementation (org.dspace:dspace-api:6.0-rc4-SNAPSHOT - https://github.com/dspace/DSpace/dspace-api)
|
||||
* DSpace I18N :: Language Packs (org.dspace:dspace-api-lang:6.0.3 - https://github.com/dspace/dspace-api-lang)
|
||||
* DSpace JSP-UI (org.dspace:dspace-jspui:6.0-rc4-SNAPSHOT - https://github.com/dspace/DSpace/dspace-jspui)
|
||||
* DSpace OAI-PMH (org.dspace:dspace-oai:6.0-rc4-SNAPSHOT - https://github.com/dspace/DSpace/dspace-oai)
|
||||
* DSpace RDF (org.dspace:dspace-rdf:6.0-rc4-SNAPSHOT - https://github.com/dspace/DSpace/dspace-rdf)
|
||||
* DSpace REST :: API and Implementation (org.dspace:dspace-rest:6.0-rc4-SNAPSHOT - http://demo.dspace.org)
|
||||
* DSpace Services Framework :: API and Implementation (org.dspace:dspace-services:6.0-rc4-SNAPSHOT - https://github.com/dspace/DSpace/dspace-services)
|
||||
* Apache Solr Webapp (org.dspace:dspace-solr:6.0-rc4-SNAPSHOT - https://github.com/dspace/DSpace/dspace-solr)
|
||||
* DSpace SWORD (org.dspace:dspace-sword:6.0-rc4-SNAPSHOT - https://github.com/dspace/DSpace/dspace-sword)
|
||||
* DSpace SWORD v2 (org.dspace:dspace-swordv2:6.0-rc4-SNAPSHOT - https://github.com/dspace/DSpace/dspace-swordv2)
|
||||
* DSpace XML-UI (Manakin) (org.dspace:dspace-xmlui:6.0-rc4-SNAPSHOT - https://github.com/dspace/DSpace/dspace-xmlui)
|
||||
* DSpace XML-UI (Manakin) I18N :: Language Packs (org.dspace:dspace-xmlui-lang:6.0.3 - https://github.com/dspace/dspace-xmlui-lang)
|
||||
* handle (org.dspace:handle:6.2 - no url defined)
|
||||
* jargon (org.dspace:jargon:1.4.25 - no url defined)
|
||||
* mets (org.dspace:mets:1.5.2 - no url defined)
|
||||
* oclc-harvester2 (org.dspace:oclc-harvester2:0.1.12 - no url defined)
|
||||
* XOAI : OAI-PMH Java Toolkit (org.dspace:xoai:3.2.10 - http://nexus.sonatype.org/oss-repository-hosting.html/xoai)
|
||||
* Repackaged Cocoon Servlet Service Implementation (org.dspace.dependencies.cocoon:dspace-cocoon-servlet-service-impl:1.0.3 - http://projects.dspace.org/dspace-pom/dspace-cocoon-servlet-service-impl)
|
||||
* DSpace Kernel :: Additions and Local Customizations (org.dspace.modules:additions:6.0-rc4-SNAPSHOT - https://github.com/dspace/DSpace/modules/additions)
|
||||
* Hamcrest All (org.hamcrest:hamcrest-all:1.3 - https://github.com/hamcrest/JavaHamcrest/hamcrest-all)
|
||||
* Hamcrest Core (org.hamcrest:hamcrest-core:1.3 - https://github.com/hamcrest/JavaHamcrest/hamcrest-core)
|
||||
* HdrHistogram (org.hdrhistogram:HdrHistogram:2.1.12 - http://hdrhistogram.github.io/HdrHistogram/)
|
||||
* JBibTeX (org.jbibtex:jbibtex:1.0.20 - http://www.jbibtex.org)
|
||||
* asm (org.ow2.asm:asm:8.0.1 - http://asm.ow2.io/)
|
||||
* asm-analysis (org.ow2.asm:asm-analysis:7.1 - http://asm.ow2.org/)
|
||||
* asm-commons (org.ow2.asm:asm-commons:8.0.1 - http://asm.ow2.io/)
|
||||
* asm-tree (org.ow2.asm:asm-tree:7.1 - http://asm.ow2.org/)
|
||||
* asm-util (org.ow2.asm:asm-util:7.1 - http://asm.ow2.org/)
|
||||
* PostgreSQL JDBC Driver (org.postgresql:postgresql:42.4.1 - https://jdbc.postgresql.org)
|
||||
* Reflections (org.reflections:reflections:0.9.12 - http://github.com/ronmamo/reflections)
|
||||
* JMatIO (org.tallison:jmatio:1.5 - https://github.com/tballison/jmatio)
|
||||
* Hamcrest Core (org.hamcrest:hamcrest-all:1.3 - https://github.com/hamcrest/JavaHamcrest/hamcrest-all)
|
||||
* JBibTeX (org.jbibtex:jbibtex:1.0.10 - http://www.jbibtex.org)
|
||||
* ASM Core (org.ow2.asm:asm:4.1 - http://asm.objectweb.org/asm/)
|
||||
* ASM Analysis (org.ow2.asm:asm-analysis:4.1 - http://asm.objectweb.org/asm-analysis/)
|
||||
* ASM Commons (org.ow2.asm:asm-commons:4.1 - http://asm.objectweb.org/asm-commons/)
|
||||
* ASM Tree (org.ow2.asm:asm-tree:4.1 - http://asm.objectweb.org/asm-tree/)
|
||||
* ASM Util (org.ow2.asm:asm-util:4.1 - http://asm.objectweb.org/asm-util/)
|
||||
* XMLUnit for Java (xmlunit:xmlunit:1.1 - http://xmlunit.sourceforge.net/)
|
||||
* XMLUnit for Java (xmlunit:xmlunit:1.3 - http://xmlunit.sourceforge.net/)
|
||||
|
||||
Common Development and Distribution License (CDDL):
|
||||
|
||||
* istack common utility code runtime (com.sun.istack:istack-commons-runtime:3.0.7 - http://java.net/istack-commons/istack-commons-runtime/)
|
||||
* JavaMail API (com.sun.mail:javax.mail:1.6.2 - http://javaee.github.io/javamail/javax.mail)
|
||||
* JavaMail API (no providers) (com.sun.mail:mailapi:1.6.2 - http://javaee.github.io/javamail/mailapi)
|
||||
* Old JAXB Core (com.sun.xml.bind:jaxb-core:2.3.0.1 - http://jaxb.java.net/jaxb-bundles/jaxb-core)
|
||||
* Old JAXB Runtime (com.sun.xml.bind:jaxb-impl:2.3.1 - http://jaxb.java.net/jaxb-bundles/jaxb-impl)
|
||||
* Jakarta Annotations API (jakarta.annotation:jakarta.annotation-api:1.3.5 - https://projects.eclipse.org/projects/ee4j.ca)
|
||||
* jakarta.ws.rs-api (jakarta.ws.rs:jakarta.ws.rs-api:2.1.6 - https://github.com/eclipse-ee4j/jaxrs-api)
|
||||
* JavaBeans Activation Framework (JAF) (javax.activation:activation:1.1 - http://java.sun.com/products/javabeans/jaf/index.jsp)
|
||||
* JavaBeans Activation Framework API jar (javax.activation:javax.activation-api:1.2.0 - http://java.net/all/javax.activation-api/)
|
||||
* javax.annotation API (javax.annotation:javax.annotation-api:1.3.2 - http://jcp.org/en/jsr/detail?id=250)
|
||||
* JAXB Reference Implementation (com.sun.xml.bind:jaxb-impl:2.2.5 - http://jaxb.java.net/)
|
||||
* JHighlight (com.uwyn:jhighlight:1.0 - https://jhighlight.dev.java.net/)
|
||||
* JavaBeans(TM) Activation Framework (javax.activation:activation:1.1.1 - http://java.sun.com/javase/technologies/desktop/javabeans/jaf/index.jsp)
|
||||
* javax.annotation API (javax.annotation:javax.annotation-api:1.2 - http://jcp.org/en/jsr/detail?id=250)
|
||||
* JavaMail API (compat) (javax.mail:mail:1.4.7 - http://kenai.com/projects/javamail/mail)
|
||||
* Java Servlet API (javax.servlet:javax.servlet-api:3.1.0 - http://servlet-spec.java.net)
|
||||
* javax.transaction API (javax.transaction:javax.transaction-api:1.3 - http://jta-spec.java.net)
|
||||
* jaxb-api (javax.xml.bind:jaxb-api:2.3.1 - https://github.com/javaee/jaxb-spec/jaxb-api)
|
||||
* JHighlight (org.codelibs:jhighlight:1.0.3 - https://github.com/codelibs/jhighlight)
|
||||
* HK2 API module (org.glassfish.hk2:hk2-api:2.6.1 - https://github.com/eclipse-ee4j/glassfish-hk2/hk2-api)
|
||||
* ServiceLocator Default Implementation (org.glassfish.hk2:hk2-locator:2.6.1 - https://github.com/eclipse-ee4j/glassfish-hk2/hk2-locator)
|
||||
* HK2 Implementation Utilities (org.glassfish.hk2:hk2-utils:2.6.1 - https://github.com/eclipse-ee4j/glassfish-hk2/hk2-utils)
|
||||
* OSGi resource locator (org.glassfish.hk2:osgi-resource-locator:1.0.3 - https://projects.eclipse.org/projects/ee4j/osgi-resource-locator)
|
||||
* aopalliance version 1.0 repackaged as a module (org.glassfish.hk2.external:aopalliance-repackaged:2.6.1 - https://github.com/eclipse-ee4j/glassfish-hk2/external/aopalliance-repackaged)
|
||||
* javax.inject:1 as OSGi bundle (org.glassfish.hk2.external:jakarta.inject:2.6.1 - https://github.com/eclipse-ee4j/glassfish-hk2/external/jakarta.inject)
|
||||
* JAXB Runtime (org.glassfish.jaxb:jaxb-runtime:2.3.1 - http://jaxb.java.net/jaxb-runtime-parent/jaxb-runtime)
|
||||
* TXW2 Runtime (org.glassfish.jaxb:txw2:2.3.1 - http://jaxb.java.net/jaxb-txw-parent/txw2)
|
||||
* jersey-core-client (org.glassfish.jersey.core:jersey-client:2.35 - https://projects.eclipse.org/projects/ee4j.jersey/jersey-client)
|
||||
* jersey-inject-hk2 (org.glassfish.jersey.inject:jersey-hk2:2.35 - https://projects.eclipse.org/projects/ee4j.jersey/project/jersey-hk2)
|
||||
* Java Transaction API (org.jboss.spec.javax.transaction:jboss-transaction-api_1.2_spec:1.1.1.Final - http://www.jboss.org/jboss-transaction-api_1.2_spec)
|
||||
* Extended StAX API (org.jvnet.staxex:stax-ex:1.8 - http://stax-ex.java.net/)
|
||||
|
||||
Cordra (Version 2) License Agreement:
|
||||
|
||||
* net.cnri:cnri-servlet-container (net.cnri:cnri-servlet-container:3.0.0 - https://gitlab.com/cnri/cnri-servlet-container)
|
||||
* net.cnri:cnri-servlet-container-lib (net.cnri:cnri-servlet-container-lib:3.0.0 - https://gitlab.com/cnri/cnri-servlet-container)
|
||||
* net.cnri:cnriutil (net.cnri:cnriutil:2.0 - https://gitlab.com/cnri/cnriutil)
|
||||
|
||||
Eclipse Distribution License, Version 1.0:
|
||||
|
||||
* Jakarta Activation API jar (jakarta.activation:jakarta.activation-api:1.2.2 - https://github.com/eclipse-ee4j/jaf/jakarta.activation-api)
|
||||
* Jakarta XML Binding API (jakarta.xml.bind:jakarta.xml.bind-api:2.3.3 - https://github.com/eclipse-ee4j/jaxb-api/jakarta.xml.bind-api)
|
||||
* javax.persistence-api (javax.persistence:javax.persistence-api:2.2 - https://github.com/javaee/jpa-spec)
|
||||
* jersey-core-client (org.glassfish.jersey.core:jersey-client:2.35 - https://projects.eclipse.org/projects/ee4j.jersey/jersey-client)
|
||||
* jersey-inject-hk2 (org.glassfish.jersey.inject:jersey-hk2:2.35 - https://projects.eclipse.org/projects/ee4j.jersey/project/jersey-hk2)
|
||||
* Java Persistence API, Version 2.1 (org.hibernate.javax.persistence:hibernate-jpa-2.1-api:1.0.2.Final - http://hibernate.org)
|
||||
* jsp-api (javax.servlet:jsp-api:2.0 - no url defined)
|
||||
* jstl (javax.servlet:jstl:1.2 - no url defined)
|
||||
* servlet-api (javax.servlet:servlet-api:2.5 - no url defined)
|
||||
* javax.ws.rs-api (javax.ws.rs:javax.ws.rs-api:2.0.1 - http://jax-rs-spec.java.net)
|
||||
* Class Model for Hk2 (org.glassfish.hk2:class-model:2.4.0-b31 - https://hk2.java.net/class-model)
|
||||
* HK2 config types (org.glassfish.hk2:config-types:2.4.0-b31 - https://hk2.java.net/hk2-configuration/hk2-configuration-persistence/hk2-xml-dom/config-types)
|
||||
* HK2 module of HK2 itself (org.glassfish.hk2:hk2:2.4.0-b31 - https://hk2.java.net/hk2)
|
||||
* HK2 API module (org.glassfish.hk2:hk2-api:2.4.0-b31 - https://hk2.java.net/hk2-api)
|
||||
* HK2 configuration module (org.glassfish.hk2:hk2-config:2.4.0-b31 - https://hk2.java.net/hk2-configuration/hk2-configuration-persistence/hk2-xml-dom/hk2-config)
|
||||
* HK2 core module (org.glassfish.hk2:hk2-core:2.4.0-b31 - https://hk2.java.net/hk2-core)
|
||||
* ServiceLocator Default Implementation (org.glassfish.hk2:hk2-locator:2.4.0-b31 - https://hk2.java.net/hk2-locator)
|
||||
* Run Level Service (org.glassfish.hk2:hk2-runlevel:2.4.0-b31 - https://hk2.java.net/hk2-runlevel)
|
||||
* HK2 Implementation Utilities (org.glassfish.hk2:hk2-utils:2.4.0-b31 - https://hk2.java.net/hk2-utils)
|
||||
* OSGi resource locator bundle - used by various API providers that rely on META-INF/services mechanism to locate providers. (org.glassfish.hk2:osgi-resource-locator:1.0.1 - http://glassfish.org/osgi-resource-locator/)
|
||||
* HK2 Spring Bridge (org.glassfish.hk2:spring-bridge:2.4.0-b31 - https://hk2.java.net/spring-bridge)
|
||||
* aopalliance version 1.0 repackaged as a module (org.glassfish.hk2.external:aopalliance-repackaged:2.4.0-b31 - https://hk2.java.net/external/aopalliance-repackaged)
|
||||
* ASM library repackaged as OSGi bundle (org.glassfish.hk2.external:asm-all-repackaged:2.4.0-b31 - https://hk2.java.net/external/asm-all-repackaged)
|
||||
* javax.validation:1.1.0.Final as OSGi bundle (org.glassfish.hk2.external:bean-validator:2.4.0-b31 - https://hk2.java.net/external/bean-validator)
|
||||
* javax.inject:1 as OSGi bundle (org.glassfish.hk2.external:javax.inject:2.4.0-b31 - https://hk2.java.net/external/javax.inject)
|
||||
* jersey-repackaged-guava (org.glassfish.jersey.bundles.repackaged:jersey-guava:2.22.1 - https://jersey.java.net/project/project/jersey-guava/)
|
||||
* jersey-container-servlet (org.glassfish.jersey.containers:jersey-container-servlet:2.22.1 - https://jersey.java.net/project/jersey-container-servlet/)
|
||||
* jersey-container-servlet-core (org.glassfish.jersey.containers:jersey-container-servlet-core:2.22.1 - https://jersey.java.net/project/jersey-container-servlet-core/)
|
||||
* jersey-core-client (org.glassfish.jersey.core:jersey-client:2.22.1 - https://jersey.java.net/jersey-client/)
|
||||
* jersey-core-common (org.glassfish.jersey.core:jersey-common:2.22.1 - https://jersey.java.net/jersey-common/)
|
||||
* jersey-core-server (org.glassfish.jersey.core:jersey-server:2.22.1 - https://jersey.java.net/jersey-server/)
|
||||
* jersey-ext-entity-filtering (org.glassfish.jersey.ext:jersey-entity-filtering:2.22.1 - https://jersey.java.net/project/jersey-entity-filtering/)
|
||||
* jersey-spring3 (org.glassfish.jersey.ext:jersey-spring3:2.22.1 - https://jersey.java.net/project/jersey-spring3/)
|
||||
* jersey-media-jaxb (org.glassfish.jersey.media:jersey-media-jaxb:2.22.1 - https://jersey.java.net/project/jersey-media-jaxb/)
|
||||
* jersey-media-json-jackson (org.glassfish.jersey.media:jersey-media-json-jackson:2.22.1 - https://jersey.java.net/project/jersey-media-json-jackson/)
|
||||
* Java Transaction API (org.jboss.spec.javax.transaction:jboss-transaction-api_1.1_spec:1.0.1.Final - http://www.jboss.org/jboss-transaction-api_1.1_spec)
|
||||
* Type arithmetic library for Java5 (org.jvnet:tiger-types:1.4 - http://java.net/tiger-types/)
|
||||
|
||||
Eclipse Public License:
|
||||
|
||||
* System Rules (com.github.stefanbirkner:system-rules:1.19.0 - http://stefanbirkner.github.io/system-rules/)
|
||||
* H2 Database Engine (com.h2database:h2:2.1.210 - https://h2database.com)
|
||||
* Jakarta Annotations API (jakarta.annotation:jakarta.annotation-api:1.3.5 - https://projects.eclipse.org/projects/ee4j.ca)
|
||||
* jakarta.ws.rs-api (jakarta.ws.rs:jakarta.ws.rs-api:2.1.6 - https://github.com/eclipse-ee4j/jaxrs-api)
|
||||
* javax.persistence-api (javax.persistence:javax.persistence-api:2.2 - https://github.com/javaee/jpa-spec)
|
||||
* JUnit (junit:junit:4.13.1 - http://junit.org)
|
||||
* AspectJ Weaver (org.aspectj:aspectjweaver:1.9.7 - https://www.eclipse.org/aspectj/)
|
||||
* Eclipse Compiler for Java(TM) (org.eclipse.jdt:ecj:3.14.0 - http://www.eclipse.org/jdt)
|
||||
* Jetty :: Apache JSP Implementation (org.eclipse.jetty:apache-jsp:9.4.15.v20190215 - http://www.eclipse.org/jetty)
|
||||
* Apache :: JSTL module (org.eclipse.jetty:apache-jstl:9.4.15.v20190215 - http://tomcat.apache.org/taglibs/standard/)
|
||||
* Jetty :: ALPN :: Client (org.eclipse.jetty:jetty-alpn-client:9.4.44.v20210927 - https://eclipse.org/jetty/jetty-alpn-parent/jetty-alpn-client)
|
||||
* Jetty :: ALPN :: JDK9 Client Implementation (org.eclipse.jetty:jetty-alpn-java-client:9.4.44.v20210927 - https://eclipse.org/jetty/jetty-alpn-parent/jetty-alpn-java-client)
|
||||
* Jetty :: ALPN :: JDK9 Server Implementation (org.eclipse.jetty:jetty-alpn-java-server:9.4.48.v20220622 - https://eclipse.org/jetty/jetty-alpn-parent/jetty-alpn-java-server)
|
||||
* Jetty :: ALPN :: Server (org.eclipse.jetty:jetty-alpn-server:9.4.44.v20210927 - https://eclipse.org/jetty/jetty-alpn-parent/jetty-alpn-server)
|
||||
* Jetty :: ALPN :: Server (org.eclipse.jetty:jetty-alpn-server:9.4.48.v20220622 - https://eclipse.org/jetty/jetty-alpn-parent/jetty-alpn-server)
|
||||
* Jetty :: Servlet Annotations (org.eclipse.jetty:jetty-annotations:9.4.15.v20190215 - http://www.eclipse.org/jetty)
|
||||
* Jetty :: Asynchronous HTTP Client (org.eclipse.jetty:jetty-client:9.4.44.v20210927 - https://eclipse.org/jetty/jetty-client)
|
||||
* Jetty :: Continuation (org.eclipse.jetty:jetty-continuation:9.4.44.v20210927 - https://eclipse.org/jetty/jetty-continuation)
|
||||
* Jetty :: Continuation (org.eclipse.jetty:jetty-continuation:9.4.48.v20220622 - https://eclipse.org/jetty/jetty-continuation)
|
||||
* Jetty :: Deployers (org.eclipse.jetty:jetty-deploy:9.4.48.v20220622 - https://eclipse.org/jetty/jetty-deploy)
|
||||
* Jetty :: Http Utility (org.eclipse.jetty:jetty-http:9.4.48.v20220622 - https://eclipse.org/jetty/jetty-http)
|
||||
* Jetty :: IO Utility (org.eclipse.jetty:jetty-io:9.4.48.v20220622 - https://eclipse.org/jetty/jetty-io)
|
||||
* Jetty :: JMX Management (org.eclipse.jetty:jetty-jmx:9.4.44.v20210927 - https://eclipse.org/jetty/jetty-jmx)
|
||||
* Jetty :: JNDI Naming (org.eclipse.jetty:jetty-jndi:9.4.15.v20190215 - http://www.eclipse.org/jetty)
|
||||
* Jetty :: Plus (org.eclipse.jetty:jetty-plus:9.4.15.v20190215 - http://www.eclipse.org/jetty)
|
||||
* Jetty :: Rewrite Handler (org.eclipse.jetty:jetty-rewrite:9.4.44.v20210927 - https://eclipse.org/jetty/jetty-rewrite)
|
||||
* Jetty :: Security (org.eclipse.jetty:jetty-security:9.4.44.v20210927 - https://eclipse.org/jetty/jetty-security)
|
||||
* Jetty :: Security (org.eclipse.jetty:jetty-security:9.4.48.v20220622 - https://eclipse.org/jetty/jetty-security)
|
||||
* Jetty :: Server Core (org.eclipse.jetty:jetty-server:9.4.48.v20220622 - https://eclipse.org/jetty/jetty-server)
|
||||
* Jetty :: Servlet Handling (org.eclipse.jetty:jetty-servlet:9.4.48.v20220622 - https://eclipse.org/jetty/jetty-servlet)
|
||||
* Jetty :: Utility Servlets and Filters (org.eclipse.jetty:jetty-servlets:9.4.48.v20220622 - https://eclipse.org/jetty/jetty-servlets)
|
||||
* Jetty :: Utilities (org.eclipse.jetty:jetty-util:9.4.48.v20220622 - https://eclipse.org/jetty/jetty-util)
|
||||
* Jetty :: Utilities :: Ajax(JSON) (org.eclipse.jetty:jetty-util-ajax:9.4.48.v20220622 - https://eclipse.org/jetty/jetty-util-ajax)
|
||||
* Jetty :: Webapp Application Support (org.eclipse.jetty:jetty-webapp:9.4.48.v20220622 - https://eclipse.org/jetty/jetty-webapp)
|
||||
* Jetty :: XML utilities (org.eclipse.jetty:jetty-xml:9.4.48.v20220622 - https://eclipse.org/jetty/jetty-xml)
|
||||
* Jetty :: HTTP2 :: Client (org.eclipse.jetty.http2:http2-client:9.4.44.v20210927 - https://eclipse.org/jetty/http2-parent/http2-client)
|
||||
* Jetty :: HTTP2 :: Common (org.eclipse.jetty.http2:http2-common:9.4.48.v20220622 - https://eclipse.org/jetty/http2-parent/http2-common)
|
||||
* Jetty :: HTTP2 :: HPACK (org.eclipse.jetty.http2:http2-hpack:9.4.44.v20210927 - https://eclipse.org/jetty/http2-parent/http2-hpack)
|
||||
* Jetty :: HTTP2 :: HTTP Client Transport (org.eclipse.jetty.http2:http2-http-client-transport:9.4.44.v20210927 - https://eclipse.org/jetty/http2-parent/http2-http-client-transport)
|
||||
* Jetty :: HTTP2 :: Server (org.eclipse.jetty.http2:http2-server:9.4.48.v20220622 - https://eclipse.org/jetty/http2-parent/http2-server)
|
||||
* Jetty :: Schemas (org.eclipse.jetty.toolchain:jetty-schemas:3.1.2 - https://eclipse.org/jetty/jetty-schemas)
|
||||
* HK2 API module (org.glassfish.hk2:hk2-api:2.6.1 - https://github.com/eclipse-ee4j/glassfish-hk2/hk2-api)
|
||||
* ServiceLocator Default Implementation (org.glassfish.hk2:hk2-locator:2.6.1 - https://github.com/eclipse-ee4j/glassfish-hk2/hk2-locator)
|
||||
* HK2 Implementation Utilities (org.glassfish.hk2:hk2-utils:2.6.1 - https://github.com/eclipse-ee4j/glassfish-hk2/hk2-utils)
|
||||
* OSGi resource locator (org.glassfish.hk2:osgi-resource-locator:1.0.3 - https://projects.eclipse.org/projects/ee4j/osgi-resource-locator)
|
||||
* aopalliance version 1.0 repackaged as a module (org.glassfish.hk2.external:aopalliance-repackaged:2.6.1 - https://github.com/eclipse-ee4j/glassfish-hk2/external/aopalliance-repackaged)
|
||||
* javax.inject:1 as OSGi bundle (org.glassfish.hk2.external:jakarta.inject:2.6.1 - https://github.com/eclipse-ee4j/glassfish-hk2/external/jakarta.inject)
|
||||
* jersey-core-client (org.glassfish.jersey.core:jersey-client:2.35 - https://projects.eclipse.org/projects/ee4j.jersey/jersey-client)
|
||||
* jersey-core-common (org.glassfish.jersey.core:jersey-common:2.35 - https://projects.eclipse.org/projects/ee4j.jersey/jersey-common)
|
||||
* jersey-inject-hk2 (org.glassfish.jersey.inject:jersey-hk2:2.35 - https://projects.eclipse.org/projects/ee4j.jersey/project/jersey-hk2)
|
||||
* Java Persistence API, Version 2.1 (org.hibernate.javax.persistence:hibernate-jpa-2.1-api:1.0.2.Final - http://hibernate.org)
|
||||
* JUnit (junit:junit:4.11 - http://junit.org)
|
||||
* AspectJ runtime (org.aspectj:aspectjrt:1.6.11 - http://www.aspectj.org)
|
||||
* JPA 2.0 API (org.hibernate.javax.persistence:hibernate-jpa-2.0-api:1.0.1.Final - http://hibernate.org)
|
||||
* Jetty Server (org.mortbay.jetty:jetty:6.1.26 - http://www.eclipse.org/jetty/jetty-parent/project/modules/jetty)
|
||||
* Jetty Servlet Tester (org.mortbay.jetty:jetty-servlet-tester:6.1.26 - http://www.eclipse.org/jetty/jetty-parent/project/jetty-servlet-tester)
|
||||
* Jetty Utilities (org.mortbay.jetty:jetty-util:6.1.26 - http://www.eclipse.org/jetty/jetty-parent/project/jetty-util)
|
||||
|
||||
GNU General Public License, Version 2 with the Classpath Exception:
|
||||
|
||||
* Java Transaction API (org.jboss.spec.javax.transaction:jboss-transaction-api_1.1_spec:1.0.1.Final - http://www.jboss.org/jboss-transaction-api_1.1_spec)
|
||||
|
||||
GNU Lesser General Public License (LGPL):
|
||||
|
||||
* btf (com.github.java-json-tools:btf:1.3 - https://github.com/java-json-tools/btf)
|
||||
* jackson-coreutils (com.github.java-json-tools:jackson-coreutils:2.0 - https://github.com/java-json-tools/jackson-coreutils)
|
||||
* jackson-coreutils-equivalence (com.github.java-json-tools:jackson-coreutils-equivalence:1.0 - https://github.com/java-json-tools/jackson-coreutils)
|
||||
* json-schema-core (com.github.java-json-tools:json-schema-core:1.2.14 - https://github.com/java-json-tools/json-schema-core)
|
||||
* json-schema-validator (com.github.java-json-tools:json-schema-validator:2.2.14 - https://github.com/java-json-tools/json-schema-validator)
|
||||
* msg-simple (com.github.java-json-tools:msg-simple:1.2 - https://github.com/java-json-tools/msg-simple)
|
||||
* uri-template (com.github.java-json-tools:uri-template:0.10 - https://github.com/java-json-tools/uri-template)
|
||||
* FindBugs-Annotations (com.google.code.findbugs:annotations:3.0.1u2 - http://findbugs.sourceforge.net/)
|
||||
* JHighlight (org.codelibs:jhighlight:1.0.3 - https://github.com/codelibs/jhighlight)
|
||||
* Hibernate ORM - hibernate-core (org.hibernate:hibernate-core:5.6.5.Final - https://hibernate.org/orm)
|
||||
* Hibernate ORM - hibernate-jcache (org.hibernate:hibernate-jcache:5.6.5.Final - https://hibernate.org/orm)
|
||||
* Hibernate ORM - hibernate-jpamodelgen (org.hibernate:hibernate-jpamodelgen:5.6.5.Final - https://hibernate.org/orm)
|
||||
* Hibernate Commons Annotations (org.hibernate.common:hibernate-commons-annotations:5.1.2.Final - http://hibernate.org)
|
||||
* MaxMind GeoIP Legacy API (com.maxmind.geoip:geoip-api:1.3.0 - https://github.com/maxmind/geoip-api-java)
|
||||
* JHighlight (com.uwyn:jhighlight:1.0 - https://jhighlight.dev.java.net/)
|
||||
* DSpace TM-Extractors Dependency (org.dspace.dependencies:dspace-tm-extractors:1.0.1 - http://projects.dspace.org/dspace-pom/dspace-tm-extractors)
|
||||
* A Hibernate O/RM Module (org.hibernate:hibernate-core:4.2.21.Final - http://hibernate.org)
|
||||
* A Hibernate O/RM Module (org.hibernate:hibernate-ehcache:4.2.21.Final - http://hibernate.org)
|
||||
* Hibernate Commons Annotations (org.hibernate.common:hibernate-commons-annotations:4.0.2.Final - http://hibernate.org)
|
||||
* im4java (org.im4java:im4java:1.4.0 - http://sourceforge.net/projects/im4java/)
|
||||
* Javassist (org.javassist:javassist:3.25.0-GA - http://www.javassist.org/)
|
||||
* Javassist (org.javassist:javassist:3.18.1-GA - http://www.javassist.org/)
|
||||
* JBoss Logging 3 (org.jboss.logging:jboss-logging:3.1.0.GA - http://www.jboss.org)
|
||||
* org.jdesktop - Swing Worker (org.jdesktop:swing-worker:1.1 - no url defined)
|
||||
* xom (xom:xom:1.1 - http://www.xom.nu)
|
||||
* XOM (xom:xom:1.2.5 - http://xom.nu)
|
||||
* XOM (xom:xom:1.3.7 - https://xom.nu)
|
||||
|
||||
Go License:
|
||||
ICU License:
|
||||
|
||||
* RE2/J (com.google.re2j:re2j:1.2 - http://github.com/google/re2j)
|
||||
* ICU4J (com.ibm.icu:icu4j:56.1 - http://icu-project.org/)
|
||||
|
||||
Handle.Net Public License Agreement (Ver.2):
|
||||
JDOM License (Apache-style license):
|
||||
|
||||
* Handle Server (net.handle:handle:9.3.0 - https://www.handle.net)
|
||||
* jdom (jdom:jdom:1.0 - no url defined)
|
||||
|
||||
MIT License:
|
||||
|
||||
* Java SemVer (com.github.zafarkhaja:java-semver:0.9.0 - https://github.com/zafarkhaja/jsemver)
|
||||
* dd-plist (com.googlecode.plist:dd-plist:1.23 - http://www.github.com/3breadt/dd-plist)
|
||||
* DigitalCollections: IIIF API Library (de.digitalcollections.iiif:iiif-apis:0.3.9 - https://github.com/dbmdz/iiif-apis)
|
||||
* JOpt Simple (net.sf.jopt-simple:jopt-simple:5.0.4 - http://jopt-simple.github.io/jopt-simple)
|
||||
* Bouncy Castle S/MIME API (org.bouncycastle:bcmail-jdk15on:1.70 - https://www.bouncycastle.org/java.html)
|
||||
* Bouncy Castle PKIX, CMS, EAC, TSP, PKCS, OCSP, CMP, and CRMF APIs (org.bouncycastle:bcpkix-jdk15on:1.70 - https://www.bouncycastle.org/java.html)
|
||||
* Bouncy Castle Provider (org.bouncycastle:bcprov-jdk15on:1.70 - https://www.bouncycastle.org/java.html)
|
||||
* Bouncy Castle ASN.1 Extension and Utility APIs (org.bouncycastle:bcutil-jdk15on:1.70 - https://www.bouncycastle.org/java.html)
|
||||
* org.brotli:dec (org.brotli:dec:0.1.2 - http://brotli.org/dec)
|
||||
* Checker Qual (org.checkerframework:checker-qual:3.10.0 - https://checkerframework.org)
|
||||
* Checker Qual (org.checkerframework:checker-qual:3.5.0 - https://checkerframework.org)
|
||||
* jersey-core-client (org.glassfish.jersey.core:jersey-client:2.35 - https://projects.eclipse.org/projects/ee4j.jersey/jersey-client)
|
||||
* jersey-inject-hk2 (org.glassfish.jersey.inject:jersey-hk2:2.35 - https://projects.eclipse.org/projects/ee4j.jersey/project/jersey-hk2)
|
||||
* mockito-core (org.mockito:mockito-core:3.12.4 - https://github.com/mockito/mockito)
|
||||
* mockito-inline (org.mockito:mockito-inline:3.12.4 - https://github.com/mockito/mockito)
|
||||
* ORCID - Model (org.orcid:orcid-model:3.0.2 - http://github.com/ORCID/orcid-model)
|
||||
* JCL 1.2 implemented over SLF4J (org.slf4j:jcl-over-slf4j:1.7.25 - http://www.slf4j.org)
|
||||
* JUL to SLF4J bridge (org.slf4j:jul-to-slf4j:1.7.25 - http://www.slf4j.org)
|
||||
* SLF4J API Module (org.slf4j:slf4j-api:1.7.25 - http://www.slf4j.org)
|
||||
* SLF4J Extensions Module (org.slf4j:slf4j-ext:1.7.28 - http://www.slf4j.org)
|
||||
* HAL Browser (org.webjars:hal-browser:ad9b865 - http://webjars.org)
|
||||
* toastr (org.webjars.bowergithub.codeseven:toastr:2.1.4 - http://webjars.org)
|
||||
* backbone (org.webjars.bowergithub.jashkenas:backbone:1.4.1 - https://www.webjars.org)
|
||||
* underscore (org.webjars.bowergithub.jashkenas:underscore:1.13.2 - https://www.webjars.org)
|
||||
* jquery (org.webjars.bowergithub.jquery:jquery-dist:3.6.0 - https://www.webjars.org)
|
||||
* urijs (org.webjars.bowergithub.medialize:uri.js:1.19.10 - https://www.webjars.org)
|
||||
* bootstrap (org.webjars.bowergithub.twbs:bootstrap:4.6.1 - https://www.webjars.org)
|
||||
* core-js (org.webjars.npm:core-js:3.25.2 - https://www.webjars.org)
|
||||
* @json-editor/json-editor (org.webjars.npm:json-editor__json-editor:2.6.1 - https://www.webjars.org)
|
||||
* Bouncy Castle CMS and S/MIME API (org.bouncycastle:bcmail-jdk15:1.46 - http://www.bouncycastle.org/java.html)
|
||||
* Bouncy Castle Provider (org.bouncycastle:bcprov-jdk15:1.46 - http://www.bouncycastle.org/java.html)
|
||||
* Main (org.jmockit:jmockit:1.21 - http://www.jmockit.org)
|
||||
* OpenCloud (org.mcavallo:opencloud:0.3 - http://opencloud.mcavallo.org/)
|
||||
* Mockito (org.mockito:mockito-core:1.10.19 - http://www.mockito.org)
|
||||
* JCL 1.1.1 implemented over SLF4J (org.slf4j:jcl-over-slf4j:1.7.14 - http://www.slf4j.org)
|
||||
* JUL to SLF4J bridge (org.slf4j:jul-to-slf4j:1.7.14 - http://www.slf4j.org)
|
||||
* SLF4J API Module (org.slf4j:slf4j-api:1.7.14 - http://www.slf4j.org)
|
||||
* SLF4J LOG4J-12 Binding (org.slf4j:slf4j-log4j12:1.7.14 - http://www.slf4j.org)
|
||||
|
||||
Mozilla Public License:
|
||||
|
||||
* juniversalchardet (com.googlecode.juniversalchardet:juniversalchardet:1.0.3 - http://juniversalchardet.googlecode.com/)
|
||||
* H2 Database Engine (com.h2database:h2:2.1.210 - https://h2database.com)
|
||||
* Saxon-HE (net.sf.saxon:Saxon-HE:9.8.0-14 - http://www.saxonica.com/)
|
||||
* Javassist (org.javassist:javassist:3.25.0-GA - http://www.javassist.org/)
|
||||
* Mozilla Rhino (org.mozilla:rhino:1.7.7.2 - https://developer.mozilla.org/en/Rhino)
|
||||
* h2 (com.h2database:h2:1.4.187 - no url defined)
|
||||
* Javassist (org.javassist:javassist:3.18.1-GA - http://www.javassist.org/)
|
||||
* Rhino (rhino:js:1.6R7 - http://www.mozilla.org/rhino/)
|
||||
|
||||
Public Domain:
|
||||
|
||||
* jersey-core-client (org.glassfish.jersey.core:jersey-client:2.35 - https://projects.eclipse.org/projects/ee4j.jersey/jersey-client)
|
||||
* jersey-core-common (org.glassfish.jersey.core:jersey-common:2.35 - https://projects.eclipse.org/projects/ee4j.jersey/jersey-common)
|
||||
* jersey-inject-hk2 (org.glassfish.jersey.inject:jersey-hk2:2.35 - https://projects.eclipse.org/projects/ee4j.jersey/project/jersey-hk2)
|
||||
* HdrHistogram (org.hdrhistogram:HdrHistogram:2.1.12 - http://hdrhistogram.github.io/HdrHistogram/)
|
||||
* LatencyUtils (org.latencyutils:LatencyUtils:2.0.3 - http://latencyutils.github.io/LatencyUtils/)
|
||||
* Reflections (org.reflections:reflections:0.9.12 - http://github.com/ronmamo/reflections)
|
||||
* XZ for Java (org.tukaani:xz:1.9 - https://tukaani.org/xz/java.html)
|
||||
* AOP alliance (aopalliance:aopalliance:1.0 - http://aopalliance.sourceforge.net)
|
||||
* Dough Lea's util.concurrent package (concurrent:concurrent:1.3.4 - no url defined)
|
||||
* Reflections (org.reflections:reflections:0.9.9-RC1 - http://code.google.com/p/reflections/reflections/)
|
||||
* XZ for Java (org.tukaani:xz:1.4 - http://tukaani.org/xz/java.html)
|
||||
|
||||
The JSON License:
|
||||
Similar to Apache License but with the acknowledgment clause removed:
|
||||
|
||||
* JSON in Java (org.json:json:20180130 - https://github.com/douglascrockford/JSON-java)
|
||||
* JDOM (org.jdom:jdom:1.1.3 - http://www.jdom.org)
|
||||
|
||||
UnRar License:
|
||||
The PostgreSQL License:
|
||||
|
||||
* Java Unrar (com.github.junrar:junrar:7.4.1 - https://github.com/junrar/junrar)
|
||||
* PostgreSQL JDBC Driver - JDBC 4.2 (org.postgresql:postgresql:9.4.1211 - https://github.com/pgjdbc/pgjdbc)
|
||||
|
||||
Unicode/ICU License:
|
||||
license.txt:
|
||||
|
||||
* ICU4J (com.ibm.icu:icu4j:62.1 - http://icu-project.org/)
|
||||
|
||||
W3C license:
|
||||
|
||||
* jersey-core-client (org.glassfish.jersey.core:jersey-client:2.35 - https://projects.eclipse.org/projects/ee4j.jersey/jersey-client)
|
||||
* jersey-inject-hk2 (org.glassfish.jersey.inject:jersey-hk2:2.35 - https://projects.eclipse.org/projects/ee4j.jersey/project/jersey-hk2)
|
||||
|
||||
jQuery license:
|
||||
|
||||
* jersey-core-client (org.glassfish.jersey.core:jersey-client:2.35 - https://projects.eclipse.org/projects/ee4j.jersey/jersey-client)
|
||||
* jersey-inject-hk2 (org.glassfish.jersey.inject:jersey-hk2:2.35 - https://projects.eclipse.org/projects/ee4j.jersey/project/jersey-hk2)
|
||||
* JPA 2.0 API (org.hibernate.javax.persistence:hibernate-jpa-2.0-api:1.0.1.Final - http://hibernate.org)
|
||||
|
31
NOTICE
31
NOTICE
@@ -1,28 +1,15 @@
|
||||
Licenses of Third-Party Libraries
|
||||
=================================
|
||||
|
||||
DSpace uses third-party libraries which may be distributed under
|
||||
different licenses than specified in our LICENSE file. Information
|
||||
about these licenses is detailed in the LICENSES_THIRD_PARTY file at
|
||||
the root of the source tree. You must agree to the terms of these
|
||||
licenses, in addition to the DSpace source code license, in order to
|
||||
use this software.
|
||||
Licensing Notice
|
||||
|
||||
Licensing Notices
|
||||
=================
|
||||
|
||||
[July 2019] DuraSpace joined with LYRASIS (another 501(c)3 organization) in July 2019.
|
||||
LYRASIS holds the copyrights of DuraSpace.
|
||||
|
||||
[July 2009] Fedora Commons joined with the DSpace Foundation and began operating under
|
||||
Fedora Commons joined with the DSpace Foundation and began operating under
|
||||
the new name DuraSpace in July 2009. DuraSpace holds the copyrights of
|
||||
the DSpace Foundation, Inc.
|
||||
|
||||
[July 2007] The DSpace Foundation, Inc. is a 501(c)3 corporation established in July 2007
|
||||
with a mission to promote and advance the dspace platform enabling management,
|
||||
access and preservation of digital works. The Foundation was able to transfer
|
||||
the legal copyright from Hewlett-Packard Company (HP) and Massachusetts
|
||||
Institute of Technology (MIT) to the DSpace Foundation in October 2007. Many
|
||||
of the files in the source code may contain a copyright statement stating HP
|
||||
and MIT possess the copyright, in these instances please note that the copy
|
||||
The DSpace Foundation, Inc. is a 501(c)3 corporation established in July 2007
|
||||
with a mission to promote and advance the dspace platform enabling management,
|
||||
access and preservation of digital works. The Foundation was able to transfer
|
||||
the legal copyright from Hewlett-Packard Company (HP) and Massachusetts
|
||||
Institute of Technology (MIT) to the DSpace Foundation in October 2007. Many
|
||||
of the files in the source code may contain a copyright statement stating HP
|
||||
and MIT possess the copyright, in these instances please note that the copy
|
||||
right has transferred to the DSpace foundation, and subsequently to DuraSpace.
|
||||
|
126
README.md
126
README.md
@@ -1,65 +1,62 @@
|
||||
|
||||
# DSpace
|
||||
|
||||
[](https://github.com/DSpace/DSpace/actions?query=workflow%3ABuild)
|
||||
## NOTE: The rest-tutorial branch has been created to support the [DSpace 7 REST documentation](https://dspace-labs.github.io/DSpace7RestTutorial/walkthrough/intro)
|
||||
- This branch provides stable, referencable line numbers in code
|
||||
|
||||
[DSpace Documentation](https://wiki.lyrasis.org/display/DSDOC/) |
|
||||
[](https://travis-ci.org/DSpace/DSpace)
|
||||
|
||||
[DSpace Documentation](https://wiki.duraspace.org/display/DSDOC/) |
|
||||
[DSpace Releases](https://github.com/DSpace/DSpace/releases) |
|
||||
[DSpace Wiki](https://wiki.lyrasis.org/display/DSPACE/Home) |
|
||||
[Support](https://wiki.lyrasis.org/display/DSPACE/Support)
|
||||
[DSpace Wiki](https://wiki.duraspace.org/display/DSPACE/Home) |
|
||||
[Support](https://wiki.duraspace.org/display/DSPACE/Support)
|
||||
|
||||
## Overview
|
||||
|
||||
DSpace open source software is a turnkey repository application used by more than
|
||||
DSpace open source software is a turnkey repository application used by more than
|
||||
2,000 organizations and institutions worldwide to provide durable access to digital resources.
|
||||
For more information, visit http://www.dspace.org/
|
||||
|
||||
DSpace consists of both a Java-based backend and an Angular-based frontend.
|
||||
***
|
||||
:warning: **Work on DSpace 7 has begun on our `master` branch.** This means that there is temporarily NO user interface on this `master` branch. DSpace 7 will feature a new, unified [Angular](https://angular.io/) user interface, along with an enhanced, rebuilt REST API. The latest status of this work can be found on the [DSpace 7 UI Working Group](https://wiki.duraspace.org/display/DSPACE/DSpace+7+UI+Working+Group) page. Additionally, the codebases can be found in the following places:
|
||||
* DSpace 7 REST API work is occurring on the [`master` branch](https://github.com/DSpace/DSpace/tree/master/dspace-spring-rest) of this repository.
|
||||
* The REST Contract is being documented at https://github.com/DSpace/Rest7Contract
|
||||
* DSpace 7 Angular UI work is occurring at https://github.com/DSpace/dspace-angular
|
||||
|
||||
**If you would like to get involved in our DSpace 7 development effort, we welcome new contributors.** Just join one of our meetings or get in touch via Slack. See the [DSpace 7 UI Working Group](https://wiki.duraspace.org/display/DSPACE/DSpace+7+UI+Working+Group) wiki page for more info.
|
||||
|
||||
* Backend (this codebase) provides a REST API, along with other machine-based interfaces (e.g. OAI-PMH, SWORD, etc)
|
||||
* The REST Contract is at https://github.com/DSpace/RestContract
|
||||
* Frontend (https://github.com/DSpace/dspace-angular/) is the User Interface built on the REST API
|
||||
|
||||
Prior versions of DSpace (v6.x and below) used two different UIs (XMLUI and JSPUI). Those UIs are no longer supported in v7 (and above).
|
||||
* A maintenance branch for older versions is still available, see `dspace-6_x` for 6.x maintenance.
|
||||
**If you are looking for the ongoing maintenance work for DSpace 6 (or prior releases)**, you can find that work on the corresponding maintenance branch (e.g. [`dspace-6_x`](https://github.com/DSpace/DSpace/tree/dspace-6_x)) in this repository.
|
||||
***
|
||||
|
||||
## Downloads
|
||||
|
||||
* Backend (REST API): https://github.com/DSpace/DSpace/releases
|
||||
* Frontend (User Interface): https://github.com/DSpace/dspace-angular/releases
|
||||
The latest release of DSpace can be downloaded from the [DSpace website](http://www.dspace.org/latest-release/) or from [GitHub](https://github.com/DSpace/DSpace/releases).
|
||||
|
||||
Past releases are all available via GitHub at https://github.com/DSpace/DSpace/releases
|
||||
|
||||
## Documentation / Installation
|
||||
|
||||
Documentation for each release may be viewed online or downloaded via our [Documentation Wiki](https://wiki.lyrasis.org/display/DSDOC/).
|
||||
Documentation for each release may be viewed online or downloaded via our [Documentation Wiki](https://wiki.duraspace.org/display/DSDOC/).
|
||||
|
||||
The latest DSpace Installation instructions are available at:
|
||||
https://wiki.lyrasis.org/display/DSDOC7x/Installing+DSpace
|
||||
https://wiki.duraspace.org/display/DSDOC6x/Installing+DSpace
|
||||
|
||||
Please be aware that, as a Java web application, DSpace requires a database (PostgreSQL)
|
||||
Please be aware that, as a Java web application, DSpace requires a database (PostgreSQL or Oracle)
|
||||
and a servlet container (usually Tomcat) in order to function.
|
||||
More information about these and all other prerequisites can be found in the Installation instructions above.
|
||||
|
||||
## Running DSpace 7 in Docker
|
||||
|
||||
NOTE: At this time, we do not have production-ready Docker images for DSpace.
|
||||
That said, we do have quick-start Docker Compose scripts for development or testing purposes.
|
||||
|
||||
See [Running DSpace 7 with Docker Compose](dspace/src/main/docker-compose/README.md)
|
||||
|
||||
## Contributing
|
||||
|
||||
DSpace is a community built and supported project. We do not have a centralized development or support team,
|
||||
DSpace is a community built and supported project. We do not have a centralized development or support team,
|
||||
but have a dedicated group of volunteers who help us improve the software, documentation, resources, etc.
|
||||
|
||||
We welcome contributions of any type. Here's a few basic guides that provide suggestions for contributing to DSpace:
|
||||
* [How to Contribute to DSpace](https://wiki.lyrasis.org/display/DSPACE/How+to+Contribute+to+DSpace): How to contribute in general (via code, documentation, bug reports, expertise, etc)
|
||||
* [Code Contribution Guidelines](https://wiki.lyrasis.org/display/DSPACE/Code+Contribution+Guidelines): How to give back code or contribute features, bug fixes, etc.
|
||||
* [DSpace Community Advisory Team (DCAT)](https://wiki.lyrasis.org/display/cmtygp/DSpace+Community+Advisory+Team): If you are not a developer, we also have an interest group specifically for repository managers. The DCAT group meets virtually, once a month, and sends open invitations to join their meetings via the [DCAT mailing list](https://groups.google.com/d/forum/DSpaceCommunityAdvisoryTeam).
|
||||
* [How to Contribute to DSpace](https://wiki.duraspace.org/display/DSPACE/How+to+Contribute+to+DSpace): How to contribute in general (via code, documentation, bug reports, expertise, etc)
|
||||
* [Code Contribution Guidelines](https://wiki.duraspace.org/display/DSPACE/Code+Contribution+Guidelines): How to give back code or contribute features, bug fixes, etc.
|
||||
* [DSpace Community Advisory Team (DCAT)](https://wiki.duraspace.org/display/cmtygp/DSpace+Community+Advisory+Team): If you are not a developer, we also have an interest group specifically for repository managers. The DCAT group meets virtually, once a month, and sends open invitations to join their meetings via the [DCAT mailing list](https://groups.google.com/d/forum/DSpaceCommunityAdvisoryTeam).
|
||||
|
||||
We also encourage GitHub Pull Requests (PRs) at any time. Please see our [Development with Git](https://wiki.lyrasis.org/display/DSPACE/Development+with+Git) guide for more info.
|
||||
We also encourage GitHub Pull Requests (PRs) at any time. Please see our [Development with Git](https://wiki.duraspace.org/display/DSPACE/Development+with+Git) guide for more info.
|
||||
|
||||
In addition, a listing of all known contributors to DSpace software can be
|
||||
found online at: https://wiki.lyrasis.org/display/DSPACE/DSpaceContributors
|
||||
found online at: https://wiki.duraspace.org/display/DSPACE/DSpaceContributors
|
||||
|
||||
## Getting Help
|
||||
|
||||
@@ -67,75 +64,22 @@ DSpace provides public mailing lists where you can post questions or raise topic
|
||||
We welcome everyone to participate in these lists:
|
||||
|
||||
* [dspace-community@googlegroups.com](https://groups.google.com/d/forum/dspace-community) : General discussion about DSpace platform, announcements, sharing of best practices
|
||||
* [dspace-tech@googlegroups.com](https://groups.google.com/d/forum/dspace-tech) : Technical support mailing list. See also our guide for [How to troubleshoot an error](https://wiki.lyrasis.org/display/DSPACE/Troubleshoot+an+error).
|
||||
* [dspace-tech@googlegroups.com](https://groups.google.com/d/forum/dspace-tech) : Technical support mailing list. See also our guide for [How to troubleshoot an error](https://wiki.duraspace.org/display/DSPACE/Troubleshoot+an+error).
|
||||
* [dspace-devel@googlegroups.com](https://groups.google.com/d/forum/dspace-devel) : Developers / Development mailing list
|
||||
|
||||
Great Q&A is also available under the [DSpace tag on Stackoverflow](http://stackoverflow.com/questions/tagged/dspace)
|
||||
|
||||
Additional support options are at https://wiki.lyrasis.org/display/DSPACE/Support
|
||||
Additional support options are listed at https://wiki.duraspace.org/display/DSPACE/Support
|
||||
|
||||
DSpace also has an active service provider network. If you'd rather hire a service provider to
|
||||
install, upgrade, customize or host DSpace, then we recommend getting in touch with one of our
|
||||
DSpace also has an active service provider network. If you'd rather hire a service provider to
|
||||
install, upgrade, customize or host DSpace, then we recommend getting in touch with one of our
|
||||
[Registered Service Providers](http://www.dspace.org/service-providers).
|
||||
|
||||
## Issue Tracker
|
||||
|
||||
DSpace uses GitHub to track issues:
|
||||
* Backend (REST API) issues: https://github.com/DSpace/DSpace/issues
|
||||
* Frontend (User Interface) issues: https://github.com/DSpace/dspace-angular/issues
|
||||
|
||||
## Testing
|
||||
|
||||
### Running Tests
|
||||
|
||||
By default, in DSpace, Unit Tests and Integration Tests are disabled. However, they are
|
||||
run automatically by [GitHub Actions](https://github.com/DSpace/DSpace/actions?query=workflow%3ABuild) for all Pull Requests and code commits.
|
||||
|
||||
* How to run both Unit Tests (via `maven-surefire-plugin`) and Integration Tests (via `maven-failsafe-plugin`):
|
||||
```
|
||||
mvn install -DskipUnitTests=false -DskipIntegrationTests=false
|
||||
```
|
||||
* How to run _only_ Unit Tests:
|
||||
```
|
||||
mvn test -DskipUnitTests=false
|
||||
```
|
||||
* How to run a *single* Unit Test
|
||||
```
|
||||
# Run all tests in a specific test class
|
||||
# NOTE: failIfNoTests=false is required to skip tests in other modules
|
||||
mvn test -DskipUnitTests=false -Dtest=[full.package.testClassName] -DfailIfNoTests=false
|
||||
|
||||
# Run one test method in a specific test class
|
||||
mvn test -DskipUnitTests=false -Dtest=[full.package.testClassName]#[testMethodName] -DfailIfNoTests=false
|
||||
```
|
||||
* How to run _only_ Integration Tests
|
||||
```
|
||||
mvn install -DskipIntegrationTests=false
|
||||
```
|
||||
* How to run a *single* Integration Test
|
||||
```
|
||||
# Run all integration tests in a specific test class
|
||||
# NOTE: failIfNoTests=false is required to skip tests in other modules
|
||||
mvn install -DskipIntegrationTests=false -Dit.test=[full.package.testClassName] -DfailIfNoTests=false
|
||||
|
||||
# Run one test method in a specific test class
|
||||
mvn install -DskipIntegrationTests=false -Dit.test=[full.package.testClassName]#[testMethodName] -DfailIfNoTests=false
|
||||
```
|
||||
* How to run only tests of a specific DSpace module
|
||||
```
|
||||
# Before you can run only one module's tests, other modules may need installing into your ~/.m2
|
||||
cd [dspace-src]
|
||||
mvn clean install
|
||||
|
||||
# Then, move into a module subdirectory, and run the test command
|
||||
cd [dspace-src]/dspace-server-webapp
|
||||
# Choose your test command from the lists above
|
||||
```
|
||||
The DSpace Issue Tracker can be found at: https://jira.duraspace.org/projects/DS/summary
|
||||
|
||||
## License
|
||||
|
||||
DSpace source code is freely available under a standard [BSD 3-Clause license](https://opensource.org/licenses/BSD-3-Clause).
|
||||
The full license is available in the [LICENSE](LICENSE) file or online at http://www.dspace.org/license/
|
||||
|
||||
DSpace uses third-party libraries which may be distributed under different licenses. Those licenses are listed
|
||||
in the [LICENSES_THIRD_PARTY](LICENSES_THIRD_PARTY) file.
|
||||
The full license is available at http://www.dspace.org/license/
|
||||
|
15
SECURITY.md
15
SECURITY.md
@@ -1,15 +0,0 @@
|
||||
# Security Policy
|
||||
|
||||
## Supported Versions
|
||||
|
||||
For information regarding which versions of DSpace are currently under support, please see our DSpace Software Support Policy:
|
||||
|
||||
https://wiki.lyrasis.org/display/DSPACE/DSpace+Software+Support+Policy
|
||||
|
||||
## Reporting a Vulnerability
|
||||
|
||||
If you believe you have found a security vulnerability in a supported version of DSpace, we encourage you to let us know right away.
|
||||
We will investigate all legitimate reports and do our best to quickly fix the problem. Please see our DSpace Software Support Policy
|
||||
for information on privately reporting vulnerabilities:
|
||||
|
||||
https://wiki.lyrasis.org/display/DSPACE/DSpace+Software+Support+Policy
|
@@ -44,16 +44,15 @@ For more information on CheckStyle configurations below, see: http://checkstyle.
|
||||
with @SuppressWarnings. See also SuppressWarningsHolder below -->
|
||||
<module name="SuppressWarningsFilter" />
|
||||
|
||||
<!-- Maximum line length is 120 characters -->
|
||||
<module name="LineLength">
|
||||
<property name="fileExtensions" value="java"/>
|
||||
<property name="max" value="120"/>
|
||||
<!-- Only exceptions for packages, imports, URLs, and JavaDoc {@link} tags -->
|
||||
<property name="ignorePattern" value="^package.*|^import.*|http://|https://|@link"/>
|
||||
</module>
|
||||
|
||||
<!-- Check individual Java source files for specific rules -->
|
||||
<module name="TreeWalker">
|
||||
<!-- Maximum line length is 120 characters -->
|
||||
<module name="LineLength">
|
||||
<property name="max" value="120"/>
|
||||
<!-- Only exceptions for packages, imports, URLs, and JavaDoc {@link} tags -->
|
||||
<property name="ignorePattern" value="^package.*|^import.*|http://|https://|@link"/>
|
||||
</module>
|
||||
|
||||
<!-- Highlight any TODO or FIXME comments in info messages -->
|
||||
<module name="TodoComment">
|
||||
<property name="severity" value="info"/>
|
||||
@@ -95,8 +94,11 @@ For more information on CheckStyle configurations below, see: http://checkstyle.
|
||||
<!-- <property name="scope" value="public"/> -->
|
||||
<!-- TODO: Above rule has been disabled because of large amount of missing public method Javadocs -->
|
||||
<property name="scope" value="nothing"/>
|
||||
<!-- Allow RuntimeExceptions to be undeclared -->
|
||||
<property name="allowUndeclaredRTE" value="true"/>
|
||||
<!-- Allow params, throws and return tags to be optional -->
|
||||
<property name="allowMissingParamTags" value="true"/>
|
||||
<property name="allowMissingThrowsTags" value="true"/>
|
||||
<property name="allowMissingReturnTag" value="true"/>
|
||||
</module>
|
||||
|
||||
|
@@ -1,38 +0,0 @@
|
||||
version: "3.7"
|
||||
|
||||
services:
|
||||
dspace-cli:
|
||||
image: "${DOCKER_OWNER:-dspace}/dspace-cli:${DSPACE_VER:-dspace-7_x}"
|
||||
container_name: dspace-cli
|
||||
build:
|
||||
context: .
|
||||
dockerfile: Dockerfile.cli
|
||||
environment:
|
||||
# Below syntax may look odd, but it is how to override dspace.cfg settings via env variables.
|
||||
# See https://github.com/DSpace/DSpace/blob/main/dspace/config/config-definition.xml
|
||||
# __P__ => "." (e.g. dspace__P__dir => dspace.dir)
|
||||
# __D__ => "-" (e.g. google__D__metadata => google-metadata)
|
||||
# dspace.dir: Must match with Dockerfile's DSPACE_INSTALL directory.
|
||||
dspace__P__dir: /dspace
|
||||
# db.url: Ensure we are using the 'dspacedb' image for our database
|
||||
db__P__url: 'jdbc:postgresql://dspacedb:5432/dspace'
|
||||
# solr.server: Ensure we are using the 'dspacesolr' image for Solr
|
||||
solr__P__server: http://dspacesolr:8983/solr
|
||||
volumes:
|
||||
# Keep DSpace assetstore directory between reboots
|
||||
- assetstore:/dspace/assetstore
|
||||
# Mount local [src]/dspace/config/ to container. This syncs your local configs with container
|
||||
# NOTE: Environment variables specified above will OVERRIDE any configs in local.cfg or dspace.cfg
|
||||
- ./dspace/config:/dspace/config
|
||||
entrypoint: /dspace/bin/dspace
|
||||
command: help
|
||||
networks:
|
||||
- dspacenet
|
||||
tty: true
|
||||
stdin_open: true
|
||||
|
||||
volumes:
|
||||
assetstore:
|
||||
|
||||
networks:
|
||||
dspacenet:
|
@@ -1,121 +0,0 @@
|
||||
version: '3.7'
|
||||
networks:
|
||||
dspacenet:
|
||||
ipam:
|
||||
config:
|
||||
# Define a custom subnet for our DSpace network, so that we can easily trust requests from host to container.
|
||||
# If you customize this value, be sure to customize the 'proxies.trusted.ipranges' env variable below.
|
||||
- subnet: 172.23.0.0/16
|
||||
services:
|
||||
# DSpace (backend) webapp container
|
||||
dspace:
|
||||
container_name: dspace
|
||||
environment:
|
||||
# Below syntax may look odd, but it is how to override dspace.cfg settings via env variables.
|
||||
# See https://github.com/DSpace/DSpace/blob/main/dspace/config/config-definition.xml
|
||||
# __P__ => "." (e.g. dspace__P__dir => dspace.dir)
|
||||
# __D__ => "-" (e.g. google__D__metadata => google-metadata)
|
||||
# dspace.dir: Must match with Dockerfile's DSPACE_INSTALL directory.
|
||||
dspace__P__dir: /dspace
|
||||
# Uncomment to set a non-default value for dspace.server.url or dspace.ui.url
|
||||
# dspace__P__server__P__url: http://localhost:8080/server
|
||||
# dspace__P__ui__P__url: http://localhost:4000
|
||||
dspace__P__name: 'DSpace Started with Docker Compose'
|
||||
# db.url: Ensure we are using the 'dspacedb' image for our database
|
||||
db__P__url: 'jdbc:postgresql://dspacedb:5432/dspace'
|
||||
# solr.server: Ensure we are using the 'dspacesolr' image for Solr
|
||||
solr__P__server: http://dspacesolr:8983/solr
|
||||
# proxies.trusted.ipranges: This setting is required for a REST API running in Docker to trust requests
|
||||
# from the host machine. This IP range MUST correspond to the 'dspacenet' subnet defined above.
|
||||
proxies__P__trusted__P__ipranges: '172.23.0'
|
||||
image: "${DOCKER_OWNER:-dspace}/dspace:${DSPACE_VER:-dspace-7_x-test}"
|
||||
build:
|
||||
context: .
|
||||
dockerfile: Dockerfile.test
|
||||
depends_on:
|
||||
- dspacedb
|
||||
networks:
|
||||
dspacenet:
|
||||
ports:
|
||||
- published: 8080
|
||||
target: 8080
|
||||
- published: 8009
|
||||
target: 8009
|
||||
stdin_open: true
|
||||
tty: true
|
||||
volumes:
|
||||
# Keep DSpace assetstore directory between reboots
|
||||
- assetstore:/dspace/assetstore
|
||||
# Mount local [src]/dspace/config/ to container. This syncs your local configs with container
|
||||
# NOTE: Environment variables specified above will OVERRIDE any configs in local.cfg or dspace.cfg
|
||||
- ./dspace/config:/dspace/config
|
||||
# Ensure that the database is ready BEFORE starting tomcat
|
||||
# 1. While a TCP connection to dspacedb port 5432 is not available, continue to sleep
|
||||
# 2. Then, run database migration to init database tables
|
||||
# 3. Finally, start Tomcat
|
||||
entrypoint:
|
||||
- /bin/bash
|
||||
- '-c'
|
||||
- |
|
||||
while (!</dev/tcp/dspacedb/5432) > /dev/null 2>&1; do sleep 1; done;
|
||||
/dspace/bin/dspace database migrate
|
||||
catalina.sh run
|
||||
# DSpace database container
|
||||
dspacedb:
|
||||
container_name: dspacedb
|
||||
environment:
|
||||
PGDATA: /pgdata
|
||||
# Uses a custom Postgres image with pgcrypto installed
|
||||
image: dspace/dspace-postgres-pgcrypto
|
||||
networks:
|
||||
dspacenet:
|
||||
ports:
|
||||
- published: 5432
|
||||
target: 5432
|
||||
stdin_open: true
|
||||
tty: true
|
||||
volumes:
|
||||
- pgdata:/pgdata
|
||||
# DSpace Solr container
|
||||
dspacesolr:
|
||||
container_name: dspacesolr
|
||||
# Uses official Solr image at https://hub.docker.com/_/solr/
|
||||
image: solr:8.11-slim
|
||||
networks:
|
||||
dspacenet:
|
||||
ports:
|
||||
- published: 8983
|
||||
target: 8983
|
||||
stdin_open: true
|
||||
tty: true
|
||||
working_dir: /var/solr/data
|
||||
volumes:
|
||||
# Mount our local Solr core configs so that they are available as Solr configsets on container
|
||||
- ./dspace/solr/authority:/opt/solr/server/solr/configsets/authority
|
||||
- ./dspace/solr/oai:/opt/solr/server/solr/configsets/oai
|
||||
- ./dspace/solr/search:/opt/solr/server/solr/configsets/search
|
||||
- ./dspace/solr/statistics:/opt/solr/server/solr/configsets/statistics
|
||||
# Keep Solr data directory between reboots
|
||||
- solr_data:/var/solr/data
|
||||
# Initialize all DSpace Solr cores using the mounted local configsets (see above), then start Solr
|
||||
# * First, run precreate-core to create the core (if it doesn't yet exist). If exists already, this is a no-op
|
||||
# * Second, copy updated configs from mounted configsets to this core. If it already existed, this updates core
|
||||
# to the latest configs. If it's a newly created core, this is a no-op.
|
||||
entrypoint:
|
||||
- /bin/bash
|
||||
- '-c'
|
||||
- |
|
||||
init-var-solr
|
||||
precreate-core authority /opt/solr/server/solr/configsets/authority
|
||||
cp -r -u /opt/solr/server/solr/configsets/authority/* authority
|
||||
precreate-core oai /opt/solr/server/solr/configsets/oai
|
||||
cp -r -u /opt/solr/server/solr/configsets/oai/* oai
|
||||
precreate-core search /opt/solr/server/solr/configsets/search
|
||||
cp -r -u /opt/solr/server/solr/configsets/search/* search
|
||||
precreate-core statistics /opt/solr/server/solr/configsets/statistics
|
||||
cp -r -u /opt/solr/server/solr/configsets/statistics/* statistics
|
||||
exec solr -f
|
||||
volumes:
|
||||
assetstore:
|
||||
pgdata:
|
||||
solr_data:
|
File diff suppressed because it is too large
Load Diff
@@ -0,0 +1,163 @@
|
||||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright ownership.
|
||||
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
* (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.apache.solr.handler.extraction;
|
||||
|
||||
|
||||
/**
|
||||
* The various Solr Parameters names to use when extracting content.
|
||||
**/
|
||||
public interface ExtractingParams {
|
||||
|
||||
/**
|
||||
* Map all generated attribute names to field names with lowercase and underscores.
|
||||
*/
|
||||
public static final String LOWERNAMES = "lowernames";
|
||||
|
||||
/**
|
||||
* if true, ignore TikaException (give up to extract text but index meta data)
|
||||
*/
|
||||
public static final String IGNORE_TIKA_EXCEPTION = "ignoreTikaException";
|
||||
|
||||
|
||||
/**
|
||||
* The param prefix for mapping Tika metadata to Solr fields.
|
||||
* <p>
|
||||
* To map a field, add a name like:
|
||||
* <pre>fmap.title=solr.title</pre>
|
||||
*
|
||||
* In this example, the tika "title" metadata value will be added to a Solr field named "solr.title"
|
||||
*/
|
||||
public static final String MAP_PREFIX = "fmap.";
|
||||
|
||||
/**
|
||||
* The boost value for the name of the field. The boost can be specified by a name mapping.
|
||||
* <p>
|
||||
* For example
|
||||
* <pre>
|
||||
* map.title=solr.title
|
||||
* boost.solr.title=2.5
|
||||
* </pre>
|
||||
* will boost the solr.title field for this document by 2.5
|
||||
*/
|
||||
public static final String BOOST_PREFIX = "boost.";
|
||||
|
||||
/**
|
||||
* Pass in literal values to be added to the document, as in
|
||||
* <pre>
|
||||
* literal.myField=Foo
|
||||
* </pre>
|
||||
*/
|
||||
public static final String LITERALS_PREFIX = "literal.";
|
||||
|
||||
|
||||
/**
|
||||
* Restrict the extracted parts of a document to be indexed
|
||||
* by passing in an XPath expression. All content that satisfies the XPath expr.
|
||||
* will be passed to the {@link org.apache.solr.handler.extraction.SolrContentHandler}.
|
||||
* <p>
|
||||
* See Tika's docs for what the extracted document looks like.
|
||||
*
|
||||
* @see #CAPTURE_ELEMENTS
|
||||
*/
|
||||
public static final String XPATH_EXPRESSION = "xpath";
|
||||
|
||||
|
||||
/**
|
||||
* Only extract and return the content, do not index it.
|
||||
*/
|
||||
public static final String EXTRACT_ONLY = "extractOnly";
|
||||
|
||||
/**
|
||||
* Content output format if extractOnly is true. Default is "xml", alternative is "text".
|
||||
*/
|
||||
public static final String EXTRACT_FORMAT = "extractFormat";
|
||||
|
||||
/**
|
||||
* Capture attributes separately according to the name of the element, instead of just adding them to the string
|
||||
* buffer
|
||||
*/
|
||||
public static final String CAPTURE_ATTRIBUTES = "captureAttr";
|
||||
|
||||
/**
|
||||
* Literal field values will by default override other values such as metadata and content. Set this to false to
|
||||
* revert to pre-4.0 behaviour
|
||||
*/
|
||||
public static final String LITERALS_OVERRIDE = "literalsOverride";
|
||||
|
||||
/**
|
||||
* Capture the specified fields (and everything included below it that isn't capture by some other capture field)
|
||||
* separately from the default. This is different
|
||||
* then the case of passing in an XPath expression.
|
||||
* <p>
|
||||
* The Capture field is based on the localName returned to the
|
||||
* {@link org.apache.solr.handler.extraction.SolrContentHandler}
|
||||
* by Tika, not to be confused by the mapped field. The field name can then
|
||||
* be mapped into the index schema.
|
||||
* <p>
|
||||
* For instance, a Tika document may look like:
|
||||
* <pre>
|
||||
* <html>
|
||||
* ...
|
||||
* <body>
|
||||
* <p>some text here. <div>more text</div></p>
|
||||
* Some more text
|
||||
* </body>
|
||||
* </pre>
|
||||
* By passing in the p tag, you could capture all P tags separately from the rest of the t
|
||||
* Thus, in the example, the capture of the P tag would be: "some text here. more text"
|
||||
*/
|
||||
public static final String CAPTURE_ELEMENTS = "capture";
|
||||
|
||||
/**
|
||||
* The type of the stream. If not specified, Tika will use mime type detection.
|
||||
*/
|
||||
public static final String STREAM_TYPE = "stream.type";
|
||||
|
||||
|
||||
/**
|
||||
* Optional. The file name. If specified, Tika can take this into account while
|
||||
* guessing the MIME type.
|
||||
*/
|
||||
public static final String RESOURCE_NAME = "resource.name";
|
||||
|
||||
/**
|
||||
* Optional. The password for this resource. Will be used instead of the rule based password lookup mechanisms
|
||||
*/
|
||||
public static final String RESOURCE_PASSWORD = "resource.password";
|
||||
|
||||
/**
|
||||
* Optional. If specified, the prefix will be prepended to all Metadata, such that it would be possible
|
||||
* to setup a dynamic field to automatically capture it
|
||||
*/
|
||||
public static final String UNKNOWN_FIELD_PREFIX = "uprefix";
|
||||
|
||||
/**
|
||||
* Optional. If specified and the name of a potential field cannot be determined, the default Field specified
|
||||
* will be used instead.
|
||||
*/
|
||||
public static final String DEFAULT_FIELD = "defaultField";
|
||||
|
||||
/**
|
||||
* Optional. If specified, loads the file as a source for password lookups for Tika encrypted documents.
|
||||
* <p>
|
||||
* File format is Java properties format with one key=value per line.
|
||||
* The key is evaluated as a regex against the file name, and the value is the password
|
||||
* The rules are evaluated top-bottom, i.e. the first match will be used
|
||||
* If you want a fallback password to be always used, supply a .*=<defaultmypassword> at the end
|
||||
*/
|
||||
public static final String PASSWORD_MAP_FILE = "passwordsFile";
|
||||
}
|
@@ -1,30 +0,0 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.access.status;
|
||||
|
||||
import java.sql.SQLException;
|
||||
import java.util.Date;
|
||||
|
||||
import org.dspace.content.Item;
|
||||
import org.dspace.core.Context;
|
||||
|
||||
/**
|
||||
* Plugin interface for the access status calculation.
|
||||
*/
|
||||
public interface AccessStatusHelper {
|
||||
/**
|
||||
* Calculate the access status for the item.
|
||||
*
|
||||
* @param context the DSpace context
|
||||
* @param item the item
|
||||
* @return an access status value
|
||||
* @throws SQLException An exception that provides information on a database access error or other errors.
|
||||
*/
|
||||
public String getAccessStatusFromItem(Context context, Item item, Date threshold)
|
||||
throws SQLException;
|
||||
}
|
@@ -1,66 +0,0 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.access.status;
|
||||
|
||||
import java.sql.SQLException;
|
||||
import java.util.Date;
|
||||
|
||||
import org.dspace.access.status.service.AccessStatusService;
|
||||
import org.dspace.content.Item;
|
||||
import org.dspace.core.Context;
|
||||
import org.dspace.core.service.PluginService;
|
||||
import org.dspace.services.ConfigurationService;
|
||||
import org.joda.time.LocalDate;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
|
||||
/**
|
||||
* Implementation for the access status calculation service.
|
||||
*/
|
||||
public class AccessStatusServiceImpl implements AccessStatusService {
|
||||
// Plugin implementation, set from the DSpace configuration by init().
|
||||
protected AccessStatusHelper helper = null;
|
||||
|
||||
protected Date forever_date = null;
|
||||
|
||||
@Autowired(required = true)
|
||||
protected ConfigurationService configurationService;
|
||||
|
||||
@Autowired(required = true)
|
||||
protected PluginService pluginService;
|
||||
|
||||
/**
|
||||
* Initialize the bean (after dependency injection has already taken place).
|
||||
* Ensures the configurationService is injected, so that we can get the plugin
|
||||
* and the forever embargo date threshold from the configuration.
|
||||
* Called by "init-method" in Spring configuration.
|
||||
*
|
||||
* @throws Exception on generic exception
|
||||
*/
|
||||
public void init() throws Exception {
|
||||
if (helper == null) {
|
||||
helper = (AccessStatusHelper) pluginService.getSinglePlugin(AccessStatusHelper.class);
|
||||
if (helper == null) {
|
||||
throw new IllegalStateException("The AccessStatusHelper plugin was not defined in "
|
||||
+ "DSpace configuration.");
|
||||
}
|
||||
|
||||
// Defines the embargo forever date threshold for the access status.
|
||||
// Look at EmbargoService.FOREVER for some improvements?
|
||||
int year = configurationService.getIntProperty("access.status.embargo.forever.year");
|
||||
int month = configurationService.getIntProperty("access.status.embargo.forever.month");
|
||||
int day = configurationService.getIntProperty("access.status.embargo.forever.day");
|
||||
|
||||
forever_date = new LocalDate(year, month, day).toDate();
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getAccessStatus(Context context, Item item) throws SQLException {
|
||||
return helper.getAccessStatusFromItem(context, item, forever_date);
|
||||
}
|
||||
}
|
@@ -1,159 +0,0 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.access.status;
|
||||
|
||||
import java.sql.SQLException;
|
||||
import java.util.Date;
|
||||
import java.util.List;
|
||||
import java.util.Objects;
|
||||
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.dspace.authorize.ResourcePolicy;
|
||||
import org.dspace.authorize.factory.AuthorizeServiceFactory;
|
||||
import org.dspace.authorize.service.AuthorizeService;
|
||||
import org.dspace.authorize.service.ResourcePolicyService;
|
||||
import org.dspace.content.Bitstream;
|
||||
import org.dspace.content.Bundle;
|
||||
import org.dspace.content.DSpaceObject;
|
||||
import org.dspace.content.Item;
|
||||
import org.dspace.content.factory.ContentServiceFactory;
|
||||
import org.dspace.content.service.ItemService;
|
||||
import org.dspace.core.Constants;
|
||||
import org.dspace.core.Context;
|
||||
import org.dspace.eperson.Group;
|
||||
|
||||
/**
|
||||
* Default plugin implementation of the access status helper.
|
||||
* The getAccessStatusFromItem method provides a simple logic to
|
||||
* calculate the access status of an item based on the policies of
|
||||
* the primary or the first bitstream in the original bundle.
|
||||
* Users can override this method for enhanced functionality.
|
||||
*/
|
||||
public class DefaultAccessStatusHelper implements AccessStatusHelper {
|
||||
public static final String EMBARGO = "embargo";
|
||||
public static final String METADATA_ONLY = "metadata.only";
|
||||
public static final String OPEN_ACCESS = "open.access";
|
||||
public static final String RESTRICTED = "restricted";
|
||||
public static final String UNKNOWN = "unknown";
|
||||
|
||||
protected ItemService itemService =
|
||||
ContentServiceFactory.getInstance().getItemService();
|
||||
protected ResourcePolicyService resourcePolicyService =
|
||||
AuthorizeServiceFactory.getInstance().getResourcePolicyService();
|
||||
protected AuthorizeService authorizeService =
|
||||
AuthorizeServiceFactory.getInstance().getAuthorizeService();
|
||||
|
||||
public DefaultAccessStatusHelper() {
|
||||
super();
|
||||
}
|
||||
|
||||
/**
|
||||
* Look at the item's policies to determine an access status value.
|
||||
* It is also considering a date threshold for embargos and restrictions.
|
||||
*
|
||||
* If the item is null, simply returns the "unknown" value.
|
||||
*
|
||||
* @param context the DSpace context
|
||||
* @param item the item to embargo
|
||||
* @param threshold the embargo threshold date
|
||||
* @return an access status value
|
||||
*/
|
||||
@Override
|
||||
public String getAccessStatusFromItem(Context context, Item item, Date threshold)
|
||||
throws SQLException {
|
||||
if (item == null) {
|
||||
return UNKNOWN;
|
||||
}
|
||||
// Consider only the original bundles.
|
||||
List<Bundle> bundles = item.getBundles(Constants.DEFAULT_BUNDLE_NAME);
|
||||
// Check for primary bitstreams first.
|
||||
Bitstream bitstream = bundles.stream()
|
||||
.map(bundle -> bundle.getPrimaryBitstream())
|
||||
.filter(Objects::nonNull)
|
||||
.findFirst()
|
||||
.orElse(null);
|
||||
if (bitstream == null) {
|
||||
// If there is no primary bitstream,
|
||||
// take the first bitstream in the bundles.
|
||||
bitstream = bundles.stream()
|
||||
.map(bundle -> bundle.getBitstreams())
|
||||
.flatMap(List::stream)
|
||||
.findFirst()
|
||||
.orElse(null);
|
||||
}
|
||||
return caculateAccessStatusForDso(context, bitstream, threshold);
|
||||
}
|
||||
|
||||
/**
|
||||
* Look at the DSpace object's policies to determine an access status value.
|
||||
*
|
||||
* If the object is null, returns the "metadata.only" value.
|
||||
* If any policy attached to the object is valid for the anonymous group,
|
||||
* returns the "open.access" value.
|
||||
* Otherwise, if the policy start date is before the embargo threshold date,
|
||||
* returns the "embargo" value.
|
||||
* Every other cases return the "restricted" value.
|
||||
*
|
||||
* @param context the DSpace context
|
||||
* @param dso the DSpace object
|
||||
* @param threshold the embargo threshold date
|
||||
* @return an access status value
|
||||
*/
|
||||
private String caculateAccessStatusForDso(Context context, DSpaceObject dso, Date threshold)
|
||||
throws SQLException {
|
||||
if (dso == null) {
|
||||
return METADATA_ONLY;
|
||||
}
|
||||
// Only consider read policies.
|
||||
List<ResourcePolicy> policies = authorizeService
|
||||
.getPoliciesActionFilter(context, dso, Constants.READ);
|
||||
int openAccessCount = 0;
|
||||
int embargoCount = 0;
|
||||
int restrictedCount = 0;
|
||||
int unknownCount = 0;
|
||||
// Looks at all read policies.
|
||||
for (ResourcePolicy policy : policies) {
|
||||
boolean isValid = resourcePolicyService.isDateValid(policy);
|
||||
Group group = policy.getGroup();
|
||||
// The group must not be null here. However,
|
||||
// if it is, consider this as an unexpected case.
|
||||
if (group == null) {
|
||||
unknownCount++;
|
||||
} else if (StringUtils.equals(group.getName(), Group.ANONYMOUS)) {
|
||||
// Only calculate the status for the anonymous group.
|
||||
if (isValid) {
|
||||
// If the policy is valid, the anonymous group have access
|
||||
// to the bitstream.
|
||||
openAccessCount++;
|
||||
} else {
|
||||
Date startDate = policy.getStartDate();
|
||||
if (startDate != null && !startDate.before(threshold)) {
|
||||
// If the policy start date have a value and if this value
|
||||
// is equal or superior to the configured forever date, the
|
||||
// access status is also restricted.
|
||||
restrictedCount++;
|
||||
} else {
|
||||
// If the current date is not between the policy start date
|
||||
// and end date, the access status is embargo.
|
||||
embargoCount++;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
if (openAccessCount > 0) {
|
||||
return OPEN_ACCESS;
|
||||
}
|
||||
if (embargoCount > 0 && restrictedCount == 0) {
|
||||
return EMBARGO;
|
||||
}
|
||||
if (unknownCount > 0) {
|
||||
return UNKNOWN;
|
||||
}
|
||||
return RESTRICTED;
|
||||
}
|
||||
}
|
@@ -1,25 +0,0 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.access.status.factory;
|
||||
|
||||
import org.dspace.access.status.service.AccessStatusService;
|
||||
import org.dspace.services.factory.DSpaceServicesFactory;
|
||||
|
||||
/**
|
||||
* Abstract factory to get services for the access status package,
|
||||
* use AccessStatusServiceFactory.getInstance() to retrieve an implementation.
|
||||
*/
|
||||
public abstract class AccessStatusServiceFactory {
|
||||
|
||||
public abstract AccessStatusService getAccessStatusService();
|
||||
|
||||
public static AccessStatusServiceFactory getInstance() {
|
||||
return DSpaceServicesFactory.getInstance().getServiceManager()
|
||||
.getServiceByName("accessStatusServiceFactory", AccessStatusServiceFactory.class);
|
||||
}
|
||||
}
|
@@ -1,26 +0,0 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.access.status.factory;
|
||||
|
||||
import org.dspace.access.status.service.AccessStatusService;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
|
||||
/**
|
||||
* Factory implementation to get services for the access status package,
|
||||
* use AccessStatusServiceFactory.getInstance() to retrieve an implementation.
|
||||
*/
|
||||
public class AccessStatusServiceFactoryImpl extends AccessStatusServiceFactory {
|
||||
|
||||
@Autowired(required = true)
|
||||
private AccessStatusService accessStatusService;
|
||||
|
||||
@Override
|
||||
public AccessStatusService getAccessStatusService() {
|
||||
return accessStatusService;
|
||||
}
|
||||
}
|
@@ -1,30 +0,0 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
/**
|
||||
* <p>
|
||||
* Access status allows the users to view the bitstreams availability before
|
||||
* browsing into the item itself.
|
||||
* </p>
|
||||
* <p>
|
||||
* The access status is calculated through a pluggable class:
|
||||
* {@link org.dspace.access.status.AccessStatusHelper}.
|
||||
* The {@link org.dspace.access.status.AccessStatusServiceImpl}
|
||||
* must be configured to specify this class, as well as a forever embargo date
|
||||
* threshold year, month and day.
|
||||
* </p>
|
||||
* <p>
|
||||
* See {@link org.dspace.access.status.DefaultAccessStatusHelper} for a simple calculation
|
||||
* based on the primary or the first bitstream of the original bundle. You can
|
||||
* supply your own class to implement more complex access statuses.
|
||||
* </p>
|
||||
* <p>
|
||||
* For now, the access status is calculated when the item is shown in a list.
|
||||
* </p>
|
||||
*/
|
||||
|
||||
package org.dspace.access.status;
|
@@ -1,46 +0,0 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.access.status.service;
|
||||
|
||||
import java.sql.SQLException;
|
||||
|
||||
import org.dspace.content.Item;
|
||||
import org.dspace.core.Context;
|
||||
|
||||
/**
|
||||
* Public interface to the access status subsystem.
|
||||
* <p>
|
||||
* Configuration properties: (with examples)
|
||||
* {@code
|
||||
* # values for the forever embargo date threshold
|
||||
* # This threshold date is used in the default access status helper to dermine if an item is
|
||||
* # restricted or embargoed based on the start date of the primary (or first) file policies.
|
||||
* # In this case, if the policy start date is inferior to the threshold date, the status will
|
||||
* # be embargo, else it will be restricted.
|
||||
* # You might want to change this threshold based on your needs. For example: some databases
|
||||
* # doesn't accept a date superior to 31 december 9999.
|
||||
* access.status.embargo.forever.year = 10000
|
||||
* access.status.embargo.forever.month = 1
|
||||
* access.status.embargo.forever.day = 1
|
||||
* # implementation of access status helper plugin - replace with local implementation if applicable
|
||||
* # This default access status helper provides an item status based on the policies of the primary
|
||||
* # bitstream (or first bitstream in the original bundles if no primary file is specified).
|
||||
* plugin.single.org.dspace.access.status.AccessStatusHelper = org.dspace.access.status.DefaultAccessStatusHelper
|
||||
* }
|
||||
*/
|
||||
public interface AccessStatusService {
|
||||
|
||||
/**
|
||||
* Calculate the access status for an Item while considering the forever embargo date threshold.
|
||||
*
|
||||
* @param context the DSpace context
|
||||
* @param item the item
|
||||
* @throws SQLException An exception that provides information on a database access error or other errors.
|
||||
*/
|
||||
public String getAccessStatus(Context context, Item item) throws SQLException;
|
||||
}
|
@@ -14,10 +14,10 @@ import java.util.UUID;
|
||||
|
||||
import org.apache.commons.cli.CommandLine;
|
||||
import org.apache.commons.cli.CommandLineParser;
|
||||
import org.apache.commons.cli.DefaultParser;
|
||||
import org.apache.commons.cli.HelpFormatter;
|
||||
import org.apache.commons.cli.Options;
|
||||
import org.apache.commons.collections4.CollectionUtils;
|
||||
import org.apache.commons.cli.PosixParser;
|
||||
import org.apache.commons.collections.CollectionUtils;
|
||||
import org.dspace.authorize.AuthorizeException;
|
||||
import org.dspace.content.Community;
|
||||
import org.dspace.content.factory.ContentServiceFactory;
|
||||
@@ -51,7 +51,7 @@ public class CommunityFiliator {
|
||||
*/
|
||||
public static void main(String[] argv) throws Exception {
|
||||
// create an options object and populate it
|
||||
CommandLineParser parser = new DefaultParser();
|
||||
CommandLineParser parser = new PosixParser();
|
||||
|
||||
Options options = new Options();
|
||||
|
||||
@@ -180,9 +180,13 @@ public class CommunityFiliator {
|
||||
// second test - circularity: parent's parents can't include proposed
|
||||
// child
|
||||
List<Community> parentDads = parent.getParentCommunities();
|
||||
if (parentDads.contains(child)) {
|
||||
System.out.println("Error, circular parentage - child is parent of parent");
|
||||
System.exit(1);
|
||||
|
||||
for (int i = 0; i < parentDads.size(); i++) {
|
||||
if (parentDads.get(i).getID().equals(child.getID())) {
|
||||
System.out
|
||||
.println("Error, circular parentage - child is parent of parent");
|
||||
System.exit(1);
|
||||
}
|
||||
}
|
||||
|
||||
// everthing's OK
|
||||
@@ -206,15 +210,26 @@ public class CommunityFiliator {
|
||||
throws SQLException, AuthorizeException, IOException {
|
||||
// verify that child is indeed a child of parent
|
||||
List<Community> parentKids = parent.getSubcommunities();
|
||||
if (!parentKids.contains(child)) {
|
||||
System.out.println("Error, child community not a child of parent community");
|
||||
boolean isChild = false;
|
||||
|
||||
for (int i = 0; i < parentKids.size(); i++) {
|
||||
if (parentKids.get(i).getID().equals(child.getID())) {
|
||||
isChild = true;
|
||||
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if (!isChild) {
|
||||
System.out
|
||||
.println("Error, child community not a child of parent community");
|
||||
System.exit(1);
|
||||
}
|
||||
|
||||
// OK remove the mappings - but leave the community, which will become
|
||||
// top-level
|
||||
child.removeParentCommunity(parent);
|
||||
parent.removeSubCommunity(child);
|
||||
child.getParentCommunities().remove(parent);
|
||||
parent.getSubcommunities().remove(child);
|
||||
communityService.update(c, child);
|
||||
communityService.update(c, parent);
|
||||
|
||||
|
@@ -13,10 +13,10 @@ import java.util.Locale;
|
||||
|
||||
import org.apache.commons.cli.CommandLine;
|
||||
import org.apache.commons.cli.CommandLineParser;
|
||||
import org.apache.commons.cli.DefaultParser;
|
||||
import org.apache.commons.cli.HelpFormatter;
|
||||
import org.apache.commons.cli.Options;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.apache.commons.cli.PosixParser;
|
||||
import org.apache.commons.lang.StringUtils;
|
||||
import org.dspace.core.ConfigurationManager;
|
||||
import org.dspace.core.Context;
|
||||
import org.dspace.core.I18nUtil;
|
||||
import org.dspace.eperson.EPerson;
|
||||
@@ -24,8 +24,6 @@ import org.dspace.eperson.Group;
|
||||
import org.dspace.eperson.factory.EPersonServiceFactory;
|
||||
import org.dspace.eperson.service.EPersonService;
|
||||
import org.dspace.eperson.service.GroupService;
|
||||
import org.dspace.services.ConfigurationService;
|
||||
import org.dspace.services.factory.DSpaceServicesFactory;
|
||||
|
||||
/**
|
||||
* A command-line tool for creating an initial administrator for setting up a
|
||||
@@ -55,56 +53,34 @@ public final class CreateAdministrator {
|
||||
protected GroupService groupService;
|
||||
|
||||
/**
|
||||
* For invoking via the command line. If called with no command line arguments,
|
||||
* For invoking via the command line. If called with no command line arguments,
|
||||
* it will negotiate with the user for the administrator details
|
||||
*
|
||||
* @param argv the command line arguments given
|
||||
* @throws Exception if error
|
||||
*/
|
||||
public static void main(String[] argv)
|
||||
throws Exception {
|
||||
CommandLineParser parser = new DefaultParser();
|
||||
throws Exception {
|
||||
CommandLineParser parser = new PosixParser();
|
||||
Options options = new Options();
|
||||
|
||||
CreateAdministrator ca = new CreateAdministrator();
|
||||
|
||||
options.addOption("e", "email", true, "administrator email address");
|
||||
options.addOption("f", "first", true, "administrator first name");
|
||||
options.addOption("h", "help", false, "explain create-administrator options");
|
||||
options.addOption("l", "last", true, "administrator last name");
|
||||
options.addOption("c", "language", true, "administrator language");
|
||||
options.addOption("p", "password", true, "administrator password");
|
||||
|
||||
CommandLine line = null;
|
||||
|
||||
try {
|
||||
|
||||
line = parser.parse(options, argv);
|
||||
|
||||
} catch (Exception e) {
|
||||
|
||||
System.out.println(e.getMessage() + "\nTry \"dspace create-administrator -h\" to print help information.");
|
||||
System.exit(1);
|
||||
|
||||
}
|
||||
CommandLine line = parser.parse(options, argv);
|
||||
|
||||
if (line.hasOption("e") && line.hasOption("f") && line.hasOption("l") &&
|
||||
line.hasOption("c") && line.hasOption("p")) {
|
||||
line.hasOption("c") && line.hasOption("p")) {
|
||||
ca.createAdministrator(line.getOptionValue("e"),
|
||||
line.getOptionValue("f"), line.getOptionValue("l"),
|
||||
line.getOptionValue("c"), line.getOptionValue("p"));
|
||||
} else if (line.hasOption("h")) {
|
||||
String header = "\nA command-line tool for creating an initial administrator for setting up a" +
|
||||
" DSpace site. Unless all the required parameters are passed it will" +
|
||||
" prompt for an e-mail address, last name, first name and password from" +
|
||||
" standard input.. An administrator group is then created and the data passed" +
|
||||
" in used to create an e-person in that group.\n\n";
|
||||
String footer = "\n";
|
||||
HelpFormatter formatter = new HelpFormatter();
|
||||
formatter.printHelp("dspace create-administrator", header, options, footer, true);
|
||||
return;
|
||||
line.getOptionValue("f"), line.getOptionValue("l"),
|
||||
line.getOptionValue("c"), line.getOptionValue("p"));
|
||||
} else {
|
||||
ca.negotiateAdministratorDetails(line);
|
||||
ca.negotiateAdministratorDetails();
|
||||
}
|
||||
}
|
||||
|
||||
@@ -114,7 +90,7 @@ public final class CreateAdministrator {
|
||||
* @throws Exception if error
|
||||
*/
|
||||
protected CreateAdministrator()
|
||||
throws Exception {
|
||||
throws Exception {
|
||||
context = new Context();
|
||||
groupService = EPersonServiceFactory.getInstance().getGroupService();
|
||||
ePersonService = EPersonServiceFactory.getInstance().getEPersonService();
|
||||
@@ -126,20 +102,20 @@ public final class CreateAdministrator {
|
||||
*
|
||||
* @throws Exception if error
|
||||
*/
|
||||
protected void negotiateAdministratorDetails(CommandLine line)
|
||||
throws Exception {
|
||||
protected void negotiateAdministratorDetails()
|
||||
throws Exception {
|
||||
Console console = System.console();
|
||||
|
||||
System.out.println("Creating an initial administrator account");
|
||||
|
||||
String email = line.getOptionValue('e');
|
||||
String firstName = line.getOptionValue('f');
|
||||
String lastName = line.getOptionValue('l');
|
||||
String language = I18nUtil.getDefaultLocale().getLanguage();
|
||||
ConfigurationService cfg = DSpaceServicesFactory.getInstance().getConfigurationService();
|
||||
boolean flag = line.hasOption('p');
|
||||
char[] password = null;
|
||||
boolean dataOK = line.hasOption('f') && line.hasOption('e') && line.hasOption('l');
|
||||
boolean dataOK = false;
|
||||
|
||||
String email = null;
|
||||
String firstName = null;
|
||||
String lastName = null;
|
||||
char[] password1 = null;
|
||||
char[] password2 = null;
|
||||
String language = I18nUtil.DEFAULTLOCALE.getLanguage();
|
||||
|
||||
while (!dataOK) {
|
||||
System.out.print("E-mail address: ");
|
||||
@@ -170,9 +146,10 @@ public final class CreateAdministrator {
|
||||
if (lastName != null) {
|
||||
lastName = lastName.trim();
|
||||
}
|
||||
if (cfg.hasProperty("webui.supported.locales")) {
|
||||
System.out.println("Select one of the following languages: "
|
||||
+ cfg.getProperty("webui.supported.locales"));
|
||||
|
||||
if (ConfigurationManager.getProperty("webui.supported.locales") != null) {
|
||||
System.out.println("Select one of the following languages: " + ConfigurationManager
|
||||
.getProperty("webui.supported.locales"));
|
||||
System.out.print("Language: ");
|
||||
System.out.flush();
|
||||
|
||||
@@ -184,59 +161,46 @@ public final class CreateAdministrator {
|
||||
}
|
||||
}
|
||||
|
||||
System.out.print("Is the above data correct? (y or n): ");
|
||||
System.out.println("Password will not display on screen.");
|
||||
System.out.print("Password: ");
|
||||
System.out.flush();
|
||||
|
||||
String s = console.readLine();
|
||||
password1 = console.readPassword();
|
||||
|
||||
if (s != null) {
|
||||
s = s.trim();
|
||||
if (s.toLowerCase().startsWith("y")) {
|
||||
dataOK = true;
|
||||
System.out.print("Again to confirm: ");
|
||||
System.out.flush();
|
||||
|
||||
password2 = console.readPassword();
|
||||
|
||||
//TODO real password validation
|
||||
if (password1.length > 1 && Arrays.equals(password1, password2)) {
|
||||
// password OK
|
||||
System.out.print("Is the above data correct? (y or n): ");
|
||||
System.out.flush();
|
||||
|
||||
String s = console.readLine();
|
||||
|
||||
if (s != null) {
|
||||
s = s.trim();
|
||||
if (s.toLowerCase().startsWith("y")) {
|
||||
dataOK = true;
|
||||
}
|
||||
}
|
||||
} else {
|
||||
System.out.println("Passwords don't match");
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
if (!flag) {
|
||||
password = getPassword(console);
|
||||
if (password == null) {
|
||||
return;
|
||||
}
|
||||
} else {
|
||||
password = line.getOptionValue("p").toCharArray();
|
||||
}
|
||||
// if we make it to here, we are ready to create an administrator
|
||||
createAdministrator(email, firstName, lastName, language, String.valueOf(password));
|
||||
createAdministrator(email, firstName, lastName, language, String.valueOf(password1));
|
||||
|
||||
}
|
||||
|
||||
private char[] getPassword(Console console) {
|
||||
char[] password1 = null;
|
||||
char[] password2 = null;
|
||||
System.out.println("Password will not display on screen.");
|
||||
System.out.print("Password: ");
|
||||
System.out.flush();
|
||||
|
||||
password1 = console.readPassword();
|
||||
|
||||
System.out.print("Again to confirm: ");
|
||||
System.out.flush();
|
||||
|
||||
password2 = console.readPassword();
|
||||
|
||||
// TODO real password validation
|
||||
if (password1.length > 1 && Arrays.equals(password1, password2)) {
|
||||
// password OK
|
||||
Arrays.fill(password2, ' ');
|
||||
return password1;
|
||||
} else {
|
||||
System.out.println("Passwords don't match");
|
||||
return null;
|
||||
}
|
||||
//Cleaning arrays that held password
|
||||
Arrays.fill(password1, ' ');
|
||||
Arrays.fill(password2, ' ');
|
||||
}
|
||||
|
||||
/**
|
||||
* Create the administrator with the given details. If the user
|
||||
* Create the administrator with the given details. If the user
|
||||
* already exists then they are simply upped to administrator status
|
||||
*
|
||||
* @param email the email for the user
|
||||
@@ -247,8 +211,8 @@ public final class CreateAdministrator {
|
||||
* @throws Exception if error
|
||||
*/
|
||||
protected void createAdministrator(String email, String first, String last,
|
||||
String language, String pw)
|
||||
throws Exception {
|
||||
String language, String pw)
|
||||
throws Exception {
|
||||
// Of course we aren't an administrator yet so we need to
|
||||
// circumvent authorisation
|
||||
context.turnOffAuthorisationSystem();
|
||||
|
@@ -10,7 +10,6 @@ package org.dspace.administer;
|
||||
import java.io.BufferedWriter;
|
||||
import java.io.FileWriter;
|
||||
import java.io.IOException;
|
||||
import java.io.Writer;
|
||||
import java.sql.SQLException;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
@@ -18,28 +17,25 @@ import java.util.Map;
|
||||
|
||||
import org.apache.commons.cli.CommandLine;
|
||||
import org.apache.commons.cli.CommandLineParser;
|
||||
import org.apache.commons.cli.DefaultParser;
|
||||
import org.apache.commons.cli.Options;
|
||||
import org.apache.commons.cli.ParseException;
|
||||
import org.apache.commons.cli.PosixParser;
|
||||
import org.apache.xml.serialize.Method;
|
||||
import org.apache.xml.serialize.OutputFormat;
|
||||
import org.apache.xml.serialize.XMLSerializer;
|
||||
import org.dspace.content.MetadataField;
|
||||
import org.dspace.content.MetadataSchema;
|
||||
import org.dspace.content.factory.ContentServiceFactory;
|
||||
import org.dspace.content.service.MetadataFieldService;
|
||||
import org.dspace.content.service.MetadataSchemaService;
|
||||
import org.dspace.core.Context;
|
||||
import org.w3c.dom.DOMConfiguration;
|
||||
import org.w3c.dom.Document;
|
||||
import org.w3c.dom.Element;
|
||||
import org.w3c.dom.bootstrap.DOMImplementationRegistry;
|
||||
import org.w3c.dom.ls.DOMImplementationLS;
|
||||
import org.w3c.dom.ls.LSOutput;
|
||||
import org.w3c.dom.ls.LSSerializer;
|
||||
import org.xml.sax.SAXException;
|
||||
|
||||
|
||||
/**
|
||||
* @author Graham Triggs
|
||||
*
|
||||
* This class creates an XML document as passed in the arguments and
|
||||
* This class creates an xml document as passed in the arguments and
|
||||
* from the metadata schemas for the repository.
|
||||
*
|
||||
* The form of the XML is as follows
|
||||
@@ -65,20 +61,17 @@ public class MetadataExporter {
|
||||
private MetadataExporter() { }
|
||||
|
||||
/**
|
||||
* @param args command line arguments
|
||||
* @param args commandline arguments
|
||||
* @throws ParseException if parser error
|
||||
* @throws SAXException if XML parse error
|
||||
* @throws IOException if IO error
|
||||
* @throws SQLException if database error
|
||||
* @throws RegistryExportException if export error
|
||||
* @throws ClassNotFoundException if no suitable DOM implementation
|
||||
* @throws InstantiationException if no suitable DOM implementation
|
||||
* @throws IllegalAccessException if no suitable DOM implementation
|
||||
*/
|
||||
public static void main(String[] args)
|
||||
throws ParseException, SQLException, IOException, RegistryExportException,
|
||||
ClassNotFoundException, InstantiationException, IllegalAccessException {
|
||||
throws ParseException, SQLException, IOException, SAXException, RegistryExportException {
|
||||
// create an options object and populate it
|
||||
CommandLineParser parser = new DefaultParser();
|
||||
CommandLineParser parser = new PosixParser();
|
||||
Options options = new Options();
|
||||
options.addOption("f", "file", true, "output xml file for registry");
|
||||
options.addOption("s", "schema", true, "the name of the schema to export");
|
||||
@@ -102,31 +95,32 @@ public class MetadataExporter {
|
||||
}
|
||||
|
||||
/**
|
||||
* Save a registry to a file path
|
||||
* Save a registry to a filepath
|
||||
*
|
||||
* @param file file path
|
||||
* @param file filepath
|
||||
* @param schema schema definition to save
|
||||
* @throws SQLException if database error
|
||||
* @throws IOException if IO error
|
||||
* @throws SAXException if XML error
|
||||
* @throws RegistryExportException if export error
|
||||
* @throws ClassNotFoundException if no suitable DOM implementation
|
||||
* @throws InstantiationException if no suitable DOM implementation
|
||||
* @throws IllegalAccessException if no suitable DOM implementation
|
||||
*/
|
||||
public static void saveRegistry(String file, String schema)
|
||||
throws SQLException, IOException, RegistryExportException,
|
||||
ClassNotFoundException, InstantiationException, IllegalAccessException {
|
||||
throws SQLException, IOException, SAXException, RegistryExportException {
|
||||
// create a context
|
||||
Context context = new Context();
|
||||
context.turnOffAuthorisationSystem();
|
||||
|
||||
// Initialize an XML document.
|
||||
Document document = DOMImplementationRegistry.newInstance()
|
||||
.getDOMImplementation("XML 3.0")
|
||||
.createDocument(null, "dspace-dc-types", null);
|
||||
OutputFormat xmlFormat = new OutputFormat(Method.XML, "UTF-8", true);
|
||||
xmlFormat.setLineWidth(120);
|
||||
xmlFormat.setIndent(4);
|
||||
|
||||
XMLSerializer xmlSerializer = new XMLSerializer(new BufferedWriter(new FileWriter(file)), xmlFormat);
|
||||
// XMLSerializer xmlSerializer = new XMLSerializer(System.out, xmlFormat);
|
||||
xmlSerializer.startDocument();
|
||||
xmlSerializer.startElement("dspace-dc-types", null);
|
||||
|
||||
// Save the schema definition(s)
|
||||
saveSchema(context, document, schema);
|
||||
saveSchema(context, xmlSerializer, schema);
|
||||
|
||||
List<MetadataField> mdFields = null;
|
||||
|
||||
@@ -145,64 +139,55 @@ public class MetadataExporter {
|
||||
mdFields = metadataFieldService.findAll(context);
|
||||
}
|
||||
|
||||
// Compose the metadata fields
|
||||
// Output the metadata fields
|
||||
for (MetadataField mdField : mdFields) {
|
||||
saveType(context, document, mdField);
|
||||
saveType(context, xmlSerializer, mdField);
|
||||
}
|
||||
|
||||
// Serialize the completed document to the output file.
|
||||
try (Writer writer = new BufferedWriter(new FileWriter(file))) {
|
||||
DOMImplementationLS lsImplementation
|
||||
= (DOMImplementationLS) DOMImplementationRegistry.newInstance()
|
||||
.getDOMImplementation("LS");
|
||||
LSSerializer serializer = lsImplementation.createLSSerializer();
|
||||
DOMConfiguration configuration = serializer.getDomConfig();
|
||||
configuration.setParameter("format-pretty-print", true);
|
||||
LSOutput lsOutput = lsImplementation.createLSOutput();
|
||||
lsOutput.setEncoding("UTF-8");
|
||||
lsOutput.setCharacterStream(writer);
|
||||
serializer.write(document, lsOutput);
|
||||
}
|
||||
xmlSerializer.endElement("dspace-dc-types");
|
||||
xmlSerializer.endDocument();
|
||||
|
||||
// abort the context, as we shouldn't have changed it!!
|
||||
context.abort();
|
||||
}
|
||||
|
||||
/**
|
||||
* Compose the schema registry. If the parameter 'schema' is null or empty, save all schemas.
|
||||
* Serialize the schema registry. If the parameter 'schema' is null or empty, save all schemas
|
||||
*
|
||||
* @param context DSpace Context
|
||||
* @param document the document being built
|
||||
* @param xmlSerializer XML serializer
|
||||
* @param schema schema (may be null to save all)
|
||||
* @throws SQLException if database error
|
||||
* @throws SAXException if XML error
|
||||
* @throws RegistryExportException if export error
|
||||
*/
|
||||
public static void saveSchema(Context context, Document document, String schema)
|
||||
throws SQLException, RegistryExportException {
|
||||
public static void saveSchema(Context context, XMLSerializer xmlSerializer, String schema)
|
||||
throws SQLException, SAXException, RegistryExportException {
|
||||
if (schema != null && !"".equals(schema)) {
|
||||
// Find a single named schema
|
||||
MetadataSchema mdSchema = metadataSchemaService.find(context, schema);
|
||||
|
||||
saveSchema(document, mdSchema);
|
||||
saveSchema(xmlSerializer, mdSchema);
|
||||
} else {
|
||||
// Find all schemas
|
||||
List<MetadataSchema> mdSchemas = metadataSchemaService.findAll(context);
|
||||
|
||||
for (MetadataSchema mdSchema : mdSchemas) {
|
||||
saveSchema(document, mdSchema);
|
||||
saveSchema(xmlSerializer, mdSchema);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Compose a single schema (namespace) registry entry
|
||||
* Serialize a single schema (namespace) registry entry
|
||||
*
|
||||
* @param document the output document being built.
|
||||
* @param mdSchema DSpace metadata schema
|
||||
* @param xmlSerializer XML serializer
|
||||
* @param mdSchema DSpace metadata schema
|
||||
* @throws SAXException if XML error
|
||||
* @throws RegistryExportException if export error
|
||||
*/
|
||||
private static void saveSchema(Document document, MetadataSchema mdSchema)
|
||||
throws RegistryExportException {
|
||||
private static void saveSchema(XMLSerializer xmlSerializer, MetadataSchema mdSchema)
|
||||
throws SAXException, RegistryExportException {
|
||||
// If we haven't got a schema, it's an error
|
||||
if (mdSchema == null) {
|
||||
throw new RegistryExportException("no schema to export");
|
||||
@@ -221,34 +206,35 @@ public class MetadataExporter {
|
||||
return;
|
||||
}
|
||||
|
||||
Element document_element = document.getDocumentElement();
|
||||
// Output the parent tag
|
||||
xmlSerializer.startElement("dc-schema", null);
|
||||
|
||||
// Compose the parent tag
|
||||
Element schema_element = document.createElement("dc-schema");
|
||||
document_element.appendChild(schema_element);
|
||||
// Output the schema name
|
||||
xmlSerializer.startElement("name", null);
|
||||
xmlSerializer.characters(name.toCharArray(), 0, name.length());
|
||||
xmlSerializer.endElement("name");
|
||||
|
||||
// Compose the schema name
|
||||
Element name_element = document.createElement("name");
|
||||
schema_element.appendChild(name_element);
|
||||
name_element.setTextContent(name);
|
||||
// Output the schema namespace
|
||||
xmlSerializer.startElement("namespace", null);
|
||||
xmlSerializer.characters(namespace.toCharArray(), 0, namespace.length());
|
||||
xmlSerializer.endElement("namespace");
|
||||
|
||||
// Compose the schema namespace
|
||||
Element namespace_element = document.createElement("namespace");
|
||||
schema_element.appendChild(namespace_element);
|
||||
namespace_element.setTextContent(namespace);
|
||||
xmlSerializer.endElement("dc-schema");
|
||||
}
|
||||
|
||||
/**
|
||||
* Compose a single metadata field registry entry to XML.
|
||||
* Serialize a single metadata field registry entry to xml
|
||||
*
|
||||
* @param context DSpace context
|
||||
* @param document the output document being built.
|
||||
* @param xmlSerializer xml serializer
|
||||
* @param mdField DSpace metadata field
|
||||
* @throws SAXException if XML error
|
||||
* @throws RegistryExportException if export error
|
||||
* @throws SQLException if database error
|
||||
* @throws IOException if IO error
|
||||
*/
|
||||
private static void saveType(Context context, Document document, MetadataField mdField)
|
||||
throws RegistryExportException, SQLException {
|
||||
private static void saveType(Context context, XMLSerializer xmlSerializer, MetadataField mdField)
|
||||
throws SAXException, RegistryExportException, SQLException, IOException {
|
||||
// If we haven't been given a field, it's an error
|
||||
if (mdField == null) {
|
||||
throw new RegistryExportException("no field to export");
|
||||
@@ -265,39 +251,38 @@ public class MetadataExporter {
|
||||
throw new RegistryExportException("incomplete field information");
|
||||
}
|
||||
|
||||
Element document_element = document.getDocumentElement();
|
||||
// Output the parent tag
|
||||
xmlSerializer.startElement("dc-type", null);
|
||||
|
||||
// Compose the parent tag
|
||||
Element dc_type = document.createElement("dc-type");
|
||||
document_element.appendChild(dc_type);
|
||||
// Output the schema name
|
||||
xmlSerializer.startElement("schema", null);
|
||||
xmlSerializer.characters(schemaName.toCharArray(), 0, schemaName.length());
|
||||
xmlSerializer.endElement("schema");
|
||||
|
||||
// Compose the schema name
|
||||
Element schema_element = document.createElement("schema");
|
||||
dc_type.appendChild(schema_element);
|
||||
schema_element.setTextContent(schemaName);
|
||||
// Output the element
|
||||
xmlSerializer.startElement("element", null);
|
||||
xmlSerializer.characters(element.toCharArray(), 0, element.length());
|
||||
xmlSerializer.endElement("element");
|
||||
|
||||
// Compose the element
|
||||
Element element_element = document.createElement("element");
|
||||
dc_type.appendChild(element_element);
|
||||
element_element.setTextContent(element);
|
||||
|
||||
// Compose the qualifier, if present
|
||||
// Output the qualifier, if present
|
||||
if (qualifier != null) {
|
||||
Element qualifier_element = document.createElement("qualifier");
|
||||
dc_type.appendChild(qualifier_element);
|
||||
qualifier_element.setTextContent(qualifier);
|
||||
xmlSerializer.startElement("qualifier", null);
|
||||
xmlSerializer.characters(qualifier.toCharArray(), 0, qualifier.length());
|
||||
xmlSerializer.endElement("qualifier");
|
||||
} else {
|
||||
dc_type.appendChild(document.createComment("unqualified"));
|
||||
xmlSerializer.comment("unqualified");
|
||||
}
|
||||
|
||||
// Compose the scope note, if present
|
||||
// Output the scope note, if present
|
||||
if (scopeNote != null) {
|
||||
Element scope_element = document.createElement("scope_note");
|
||||
dc_type.appendChild(scope_element);
|
||||
scope_element.setTextContent(scopeNote);
|
||||
xmlSerializer.startElement("scope_note", null);
|
||||
xmlSerializer.characters(scopeNote.toCharArray(), 0, scopeNote.length());
|
||||
xmlSerializer.endElement("scope_note");
|
||||
} else {
|
||||
dc_type.appendChild(document.createComment("no scope note"));
|
||||
xmlSerializer.comment("no scope note");
|
||||
}
|
||||
|
||||
xmlSerializer.endElement("dc-type");
|
||||
}
|
||||
|
||||
static Map<Integer, String> schemaMap = new HashMap<Integer, String>();
|
||||
@@ -332,7 +317,7 @@ public class MetadataExporter {
|
||||
}
|
||||
|
||||
/**
|
||||
* Print the usage message to standard output
|
||||
* Print the usage message to stdout
|
||||
*/
|
||||
public static void usage() {
|
||||
String usage = "Use this class with the following options:\n" +
|
||||
|
@@ -11,20 +11,16 @@ import java.io.IOException;
|
||||
import java.sql.SQLException;
|
||||
import javax.xml.parsers.ParserConfigurationException;
|
||||
import javax.xml.transform.TransformerException;
|
||||
import javax.xml.xpath.XPath;
|
||||
import javax.xml.xpath.XPathConstants;
|
||||
import javax.xml.xpath.XPathExpressionException;
|
||||
import javax.xml.xpath.XPathFactory;
|
||||
|
||||
import org.apache.commons.cli.CommandLine;
|
||||
import org.apache.commons.cli.CommandLineParser;
|
||||
import org.apache.commons.cli.DefaultParser;
|
||||
import org.apache.commons.cli.Options;
|
||||
import org.apache.commons.cli.ParseException;
|
||||
import org.apache.commons.cli.PosixParser;
|
||||
import org.apache.xpath.XPathAPI;
|
||||
import org.dspace.authorize.AuthorizeException;
|
||||
import org.dspace.content.MetadataField;
|
||||
import org.dspace.content.MetadataSchema;
|
||||
import org.dspace.content.MetadataSchemaEnum;
|
||||
import org.dspace.content.NonUniqueMetadataException;
|
||||
import org.dspace.content.factory.ContentServiceFactory;
|
||||
import org.dspace.content.service.MetadataFieldService;
|
||||
@@ -84,7 +80,7 @@ public class MetadataImporter {
|
||||
* @throws SQLException if database error
|
||||
* @throws IOException if IO error
|
||||
* @throws TransformerException if transformer error
|
||||
* @throws ParserConfigurationException if configuration error
|
||||
* @throws ParserConfigurationException if config error
|
||||
* @throws AuthorizeException if authorization error
|
||||
* @throws SAXException if parser error
|
||||
* @throws NonUniqueMetadataException if duplicate metadata
|
||||
@@ -93,23 +89,26 @@ public class MetadataImporter {
|
||||
public static void main(String[] args)
|
||||
throws ParseException, SQLException, IOException, TransformerException,
|
||||
ParserConfigurationException, AuthorizeException, SAXException,
|
||||
NonUniqueMetadataException, RegistryImportException, XPathExpressionException {
|
||||
NonUniqueMetadataException, RegistryImportException {
|
||||
boolean forceUpdate = false;
|
||||
|
||||
// create an options object and populate it
|
||||
CommandLineParser parser = new DefaultParser();
|
||||
CommandLineParser parser = new PosixParser();
|
||||
Options options = new Options();
|
||||
options.addOption("f", "file", true, "source xml file for DC fields");
|
||||
options.addOption("u", "update", false, "update an existing schema");
|
||||
CommandLine line = parser.parse(options, args);
|
||||
|
||||
String file = null;
|
||||
if (line.hasOption('f')) {
|
||||
String file = line.getOptionValue('f');
|
||||
boolean forceUpdate = line.hasOption('u');
|
||||
loadRegistry(file, forceUpdate);
|
||||
file = line.getOptionValue('f');
|
||||
} else {
|
||||
usage();
|
||||
System.exit(1);
|
||||
System.exit(0);
|
||||
}
|
||||
|
||||
forceUpdate = line.hasOption('u');
|
||||
loadRegistry(file, forceUpdate);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -120,15 +119,15 @@ public class MetadataImporter {
|
||||
* @throws SQLException if database error
|
||||
* @throws IOException if IO error
|
||||
* @throws TransformerException if transformer error
|
||||
* @throws ParserConfigurationException if configuration error
|
||||
* @throws ParserConfigurationException if config error
|
||||
* @throws AuthorizeException if authorization error
|
||||
* @throws SAXException if parser error
|
||||
* @throws NonUniqueMetadataException if duplicate metadata
|
||||
* @throws RegistryImportException if import fails
|
||||
*/
|
||||
public static void loadRegistry(String file, boolean forceUpdate)
|
||||
throws SQLException, IOException, TransformerException, ParserConfigurationException, AuthorizeException,
|
||||
SAXException, NonUniqueMetadataException, RegistryImportException, XPathExpressionException {
|
||||
throws SQLException, IOException, TransformerException, ParserConfigurationException,
|
||||
AuthorizeException, SAXException, NonUniqueMetadataException, RegistryImportException {
|
||||
Context context = null;
|
||||
|
||||
try {
|
||||
@@ -140,9 +139,7 @@ public class MetadataImporter {
|
||||
Document document = RegistryImporter.loadXML(file);
|
||||
|
||||
// Get the nodes corresponding to types
|
||||
XPath xPath = XPathFactory.newInstance().newXPath();
|
||||
NodeList schemaNodes = (NodeList) xPath.compile("/dspace-dc-types/dc-schema")
|
||||
.evaluate(document, XPathConstants.NODESET);
|
||||
NodeList schemaNodes = XPathAPI.selectNodeList(document, "/dspace-dc-types/dc-schema");
|
||||
|
||||
// Add each one as a new format to the registry
|
||||
for (int i = 0; i < schemaNodes.getLength(); i++) {
|
||||
@@ -151,8 +148,7 @@ public class MetadataImporter {
|
||||
}
|
||||
|
||||
// Get the nodes corresponding to types
|
||||
NodeList typeNodes = (NodeList) xPath.compile("/dspace-dc-types/dc-type")
|
||||
.evaluate(document, XPathConstants.NODESET);
|
||||
NodeList typeNodes = XPathAPI.selectNodeList(document, "/dspace-dc-types/dc-type");
|
||||
|
||||
// Add each one as a new format to the registry
|
||||
for (int i = 0; i < typeNodes.getLength(); i++) {
|
||||
@@ -184,8 +180,8 @@ public class MetadataImporter {
|
||||
* @throws RegistryImportException if import fails
|
||||
*/
|
||||
private static void loadSchema(Context context, Node node, boolean updateExisting)
|
||||
throws SQLException, AuthorizeException, NonUniqueMetadataException, RegistryImportException,
|
||||
XPathExpressionException {
|
||||
throws SQLException, IOException, TransformerException,
|
||||
AuthorizeException, NonUniqueMetadataException, RegistryImportException {
|
||||
// Get the values
|
||||
String name = RegistryImporter.getElementData(node, "name");
|
||||
String namespace = RegistryImporter.getElementData(node, "namespace");
|
||||
@@ -230,7 +226,7 @@ public class MetadataImporter {
|
||||
/**
|
||||
* Process a node in the metadata registry XML file. The node must
|
||||
* be a "dc-type" node. If the type already exists, then it
|
||||
* will not be re-imported.
|
||||
* will not be reimported
|
||||
*
|
||||
* @param context DSpace context object
|
||||
* @param node the node in the DOM tree
|
||||
@@ -242,8 +238,8 @@ public class MetadataImporter {
|
||||
* @throws RegistryImportException if import fails
|
||||
*/
|
||||
private static void loadType(Context context, Node node)
|
||||
throws SQLException, IOException, AuthorizeException, NonUniqueMetadataException, RegistryImportException,
|
||||
XPathExpressionException {
|
||||
throws SQLException, IOException, TransformerException,
|
||||
AuthorizeException, NonUniqueMetadataException, RegistryImportException {
|
||||
// Get the values
|
||||
String schema = RegistryImporter.getElementData(node, "schema");
|
||||
String element = RegistryImporter.getElementData(node, "element");
|
||||
@@ -252,7 +248,7 @@ public class MetadataImporter {
|
||||
|
||||
// If the schema is not provided default to DC
|
||||
if (schema == null) {
|
||||
schema = MetadataSchemaEnum.DC.getName();
|
||||
schema = MetadataSchema.DC_SCHEMA;
|
||||
}
|
||||
|
||||
|
||||
|
@@ -1,140 +0,0 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.administer;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.sql.SQLException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Date;
|
||||
import java.util.List;
|
||||
|
||||
import org.apache.commons.cli.ParseException;
|
||||
import org.apache.commons.lang.time.DateUtils;
|
||||
import org.dspace.authorize.AuthorizeException;
|
||||
import org.dspace.content.ProcessStatus;
|
||||
import org.dspace.core.Context;
|
||||
import org.dspace.scripts.DSpaceRunnable;
|
||||
import org.dspace.scripts.Process;
|
||||
import org.dspace.scripts.factory.ScriptServiceFactory;
|
||||
import org.dspace.scripts.service.ProcessService;
|
||||
import org.dspace.services.ConfigurationService;
|
||||
import org.dspace.services.factory.DSpaceServicesFactory;
|
||||
import org.dspace.utils.DSpace;
|
||||
|
||||
/**
|
||||
* Script to cleanup the old processes in the specified state.
|
||||
*
|
||||
* @author Luca Giamminonni (luca.giamminonni at 4science.it)
|
||||
*
|
||||
*/
|
||||
public class ProcessCleaner extends DSpaceRunnable<ProcessCleanerConfiguration<ProcessCleaner>> {
|
||||
|
||||
private ConfigurationService configurationService;
|
||||
|
||||
private ProcessService processService;
|
||||
|
||||
|
||||
private boolean cleanCompleted = false;
|
||||
|
||||
private boolean cleanFailed = false;
|
||||
|
||||
private boolean cleanRunning = false;
|
||||
|
||||
private boolean help = false;
|
||||
|
||||
private Integer days;
|
||||
|
||||
|
||||
@Override
|
||||
public void setup() throws ParseException {
|
||||
|
||||
this.configurationService = DSpaceServicesFactory.getInstance().getConfigurationService();
|
||||
this.processService = ScriptServiceFactory.getInstance().getProcessService();
|
||||
|
||||
this.help = commandLine.hasOption('h');
|
||||
this.cleanFailed = commandLine.hasOption('f');
|
||||
this.cleanRunning = commandLine.hasOption('r');
|
||||
this.cleanCompleted = commandLine.hasOption('c') || (!cleanFailed && !cleanRunning);
|
||||
|
||||
this.days = configurationService.getIntProperty("process-cleaner.days", 14);
|
||||
|
||||
if (this.days <= 0) {
|
||||
throw new IllegalStateException("The number of days must be a positive integer.");
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
public void internalRun() throws Exception {
|
||||
|
||||
if (help) {
|
||||
printHelp();
|
||||
return;
|
||||
}
|
||||
|
||||
Context context = new Context();
|
||||
|
||||
try {
|
||||
context.turnOffAuthorisationSystem();
|
||||
performDeletion(context);
|
||||
} finally {
|
||||
context.restoreAuthSystemState();
|
||||
context.complete();
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* Delete the processes based on the specified statuses and the configured days
|
||||
* from their creation.
|
||||
*/
|
||||
private void performDeletion(Context context) throws SQLException, IOException, AuthorizeException {
|
||||
|
||||
List<ProcessStatus> statuses = getProcessToDeleteStatuses();
|
||||
Date creationDate = calculateCreationDate();
|
||||
|
||||
handler.logInfo("Searching for processes with status: " + statuses);
|
||||
List<Process> processes = processService.findByStatusAndCreationTimeOlderThan(context, statuses, creationDate);
|
||||
handler.logInfo("Found " + processes.size() + " processes to be deleted");
|
||||
for (Process process : processes) {
|
||||
processService.delete(context, process);
|
||||
}
|
||||
|
||||
handler.logInfo("Process cleanup completed");
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the list of Process statuses do be deleted.
|
||||
*/
|
||||
private List<ProcessStatus> getProcessToDeleteStatuses() {
|
||||
List<ProcessStatus> statuses = new ArrayList<ProcessStatus>();
|
||||
if (cleanCompleted) {
|
||||
statuses.add(ProcessStatus.COMPLETED);
|
||||
}
|
||||
if (cleanFailed) {
|
||||
statuses.add(ProcessStatus.FAILED);
|
||||
}
|
||||
if (cleanRunning) {
|
||||
statuses.add(ProcessStatus.RUNNING);
|
||||
}
|
||||
return statuses;
|
||||
}
|
||||
|
||||
private Date calculateCreationDate() {
|
||||
return DateUtils.addDays(new Date(), -days);
|
||||
}
|
||||
|
||||
@Override
|
||||
@SuppressWarnings("unchecked")
|
||||
public ProcessCleanerConfiguration<ProcessCleaner> getScriptConfiguration() {
|
||||
return new DSpace().getServiceManager()
|
||||
.getServiceByName("process-cleaner", ProcessCleanerConfiguration.class);
|
||||
}
|
||||
|
||||
}
|
@@ -1,18 +0,0 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.administer;
|
||||
|
||||
/**
|
||||
* The {@link ProcessCleaner} for CLI.
|
||||
*
|
||||
* @author Luca Giamminonni (luca.giamminonni at 4science.it)
|
||||
*
|
||||
*/
|
||||
public class ProcessCleanerCli extends ProcessCleaner {
|
||||
|
||||
}
|
@@ -1,18 +0,0 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.administer;
|
||||
|
||||
/**
|
||||
* The {@link ProcessCleanerConfiguration} for CLI.
|
||||
*
|
||||
* @author Luca Giamminonni (luca.giamminonni at 4science.it)
|
||||
*
|
||||
*/
|
||||
public class ProcessCleanerCliConfiguration extends ProcessCleanerConfiguration<ProcessCleanerCli> {
|
||||
|
||||
}
|
@@ -1,70 +0,0 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.administer;
|
||||
|
||||
import java.sql.SQLException;
|
||||
|
||||
import org.apache.commons.cli.Options;
|
||||
import org.dspace.authorize.service.AuthorizeService;
|
||||
import org.dspace.core.Context;
|
||||
import org.dspace.scripts.configuration.ScriptConfiguration;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
|
||||
/**
|
||||
* The {@link ScriptConfiguration} for the {@link ProcessCleaner} script.
|
||||
*/
|
||||
public class ProcessCleanerConfiguration<T extends ProcessCleaner> extends ScriptConfiguration<T> {
|
||||
|
||||
@Autowired
|
||||
private AuthorizeService authorizeService;
|
||||
|
||||
private Class<T> dspaceRunnableClass;
|
||||
|
||||
@Override
|
||||
public boolean isAllowedToExecute(Context context) {
|
||||
try {
|
||||
return authorizeService.isAdmin(context);
|
||||
} catch (SQLException e) {
|
||||
throw new RuntimeException("SQLException occurred when checking if the current user is an admin", e);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public Options getOptions() {
|
||||
if (options == null) {
|
||||
|
||||
Options options = new Options();
|
||||
|
||||
options.addOption("h", "help", false, "help");
|
||||
|
||||
options.addOption("r", "running", false, "delete the process with RUNNING status");
|
||||
options.getOption("r").setType(boolean.class);
|
||||
|
||||
options.addOption("f", "failed", false, "delete the process with FAILED status");
|
||||
options.getOption("f").setType(boolean.class);
|
||||
|
||||
options.addOption("c", "completed", false,
|
||||
"delete the process with COMPLETED status (default if no statuses are specified)");
|
||||
options.getOption("c").setType(boolean.class);
|
||||
|
||||
super.options = options;
|
||||
}
|
||||
return options;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Class<T> getDspaceRunnableClass() {
|
||||
return dspaceRunnableClass;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setDspaceRunnableClass(Class<T> dspaceRunnableClass) {
|
||||
this.dspaceRunnableClass = dspaceRunnableClass;
|
||||
}
|
||||
|
||||
}
|
@@ -13,11 +13,8 @@ import javax.xml.parsers.DocumentBuilder;
|
||||
import javax.xml.parsers.DocumentBuilderFactory;
|
||||
import javax.xml.parsers.ParserConfigurationException;
|
||||
import javax.xml.transform.TransformerException;
|
||||
import javax.xml.xpath.XPath;
|
||||
import javax.xml.xpath.XPathConstants;
|
||||
import javax.xml.xpath.XPathExpressionException;
|
||||
import javax.xml.xpath.XPathFactory;
|
||||
|
||||
import org.apache.xpath.XPathAPI;
|
||||
import org.w3c.dom.Document;
|
||||
import org.w3c.dom.Node;
|
||||
import org.w3c.dom.NodeList;
|
||||
@@ -75,10 +72,9 @@ public class RegistryImporter {
|
||||
* @throws TransformerException if error
|
||||
*/
|
||||
public static String getElementData(Node parentElement, String childName)
|
||||
throws XPathExpressionException {
|
||||
throws TransformerException {
|
||||
// Grab the child node
|
||||
XPath xPath = XPathFactory.newInstance().newXPath();
|
||||
Node childNode = (Node) xPath.compile(childName).evaluate(parentElement, XPathConstants.NODE);
|
||||
Node childNode = XPathAPI.selectSingleNode(parentElement, childName);
|
||||
|
||||
if (childNode == null) {
|
||||
// No child node, so no values
|
||||
@@ -119,10 +115,9 @@ public class RegistryImporter {
|
||||
* @throws TransformerException if error
|
||||
*/
|
||||
public static String[] getRepeatedElementData(Node parentElement,
|
||||
String childName) throws XPathExpressionException {
|
||||
String childName) throws TransformerException {
|
||||
// Grab the child node
|
||||
XPath xPath = XPathFactory.newInstance().newXPath();
|
||||
NodeList childNodes = (NodeList) xPath.compile(childName).evaluate(parentElement, XPathConstants.NODESET);
|
||||
NodeList childNodes = XPathAPI.selectNodeList(parentElement, childName);
|
||||
|
||||
String[] data = new String[childNodes.getLength()];
|
||||
|
||||
|
@@ -16,18 +16,15 @@ import javax.xml.parsers.DocumentBuilder;
|
||||
import javax.xml.parsers.DocumentBuilderFactory;
|
||||
import javax.xml.parsers.ParserConfigurationException;
|
||||
import javax.xml.transform.TransformerException;
|
||||
import javax.xml.xpath.XPath;
|
||||
import javax.xml.xpath.XPathConstants;
|
||||
import javax.xml.xpath.XPathExpressionException;
|
||||
import javax.xml.xpath.XPathFactory;
|
||||
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.apache.log4j.Logger;
|
||||
import org.apache.xpath.XPathAPI;
|
||||
import org.dspace.authorize.AuthorizeException;
|
||||
import org.dspace.content.BitstreamFormat;
|
||||
import org.dspace.content.factory.ContentServiceFactory;
|
||||
import org.dspace.content.service.BitstreamFormatService;
|
||||
import org.dspace.core.Context;
|
||||
import org.dspace.core.LogHelper;
|
||||
import org.dspace.core.LogManager;
|
||||
import org.w3c.dom.Document;
|
||||
import org.w3c.dom.Node;
|
||||
import org.w3c.dom.NodeList;
|
||||
@@ -50,7 +47,7 @@ public class RegistryLoader {
|
||||
/**
|
||||
* log4j category
|
||||
*/
|
||||
private static Logger log = org.apache.logging.log4j.LogManager.getLogger(RegistryLoader.class);
|
||||
private static Logger log = Logger.getLogger(RegistryLoader.class);
|
||||
|
||||
protected static BitstreamFormatService bitstreamFormatService = ContentServiceFactory.getInstance()
|
||||
.getBitstreamFormatService();
|
||||
@@ -98,7 +95,7 @@ public class RegistryLoader {
|
||||
|
||||
System.exit(1);
|
||||
} catch (Exception e) {
|
||||
log.fatal(LogHelper.getHeader(context, "error_loading_registries",
|
||||
log.fatal(LogManager.getHeader(context, "error_loading_registries",
|
||||
""), e);
|
||||
|
||||
System.err.println("Error: \n - " + e.getMessage());
|
||||
@@ -125,13 +122,12 @@ public class RegistryLoader {
|
||||
*/
|
||||
public static void loadBitstreamFormats(Context context, String filename)
|
||||
throws SQLException, IOException, ParserConfigurationException,
|
||||
SAXException, TransformerException, AuthorizeException, XPathExpressionException {
|
||||
SAXException, TransformerException, AuthorizeException {
|
||||
Document document = loadXML(filename);
|
||||
|
||||
// Get the nodes corresponding to formats
|
||||
XPath xPath = XPathFactory.newInstance().newXPath();
|
||||
NodeList typeNodes = (NodeList) xPath.compile("dspace-bitstream-types/bitstream-type")
|
||||
.evaluate(document, XPathConstants.NODESET);
|
||||
NodeList typeNodes = XPathAPI.selectNodeList(document,
|
||||
"dspace-bitstream-types/bitstream-type");
|
||||
|
||||
// Add each one as a new format to the registry
|
||||
for (int i = 0; i < typeNodes.getLength(); i++) {
|
||||
@@ -139,7 +135,7 @@ public class RegistryLoader {
|
||||
loadFormat(context, n);
|
||||
}
|
||||
|
||||
log.info(LogHelper.getHeader(context, "load_bitstream_formats",
|
||||
log.info(LogManager.getHeader(context, "load_bitstream_formats",
|
||||
"number_loaded=" + typeNodes.getLength()));
|
||||
}
|
||||
|
||||
@@ -155,7 +151,8 @@ public class RegistryLoader {
|
||||
* @throws AuthorizeException if authorization error
|
||||
*/
|
||||
private static void loadFormat(Context context, Node node)
|
||||
throws SQLException, AuthorizeException, XPathExpressionException {
|
||||
throws SQLException, IOException, TransformerException,
|
||||
AuthorizeException {
|
||||
// Get the values
|
||||
String mimeType = getElementData(node, "mimetype");
|
||||
String shortDesc = getElementData(node, "short_description");
|
||||
@@ -234,10 +231,9 @@ public class RegistryLoader {
|
||||
* @throws TransformerException if transformer error
|
||||
*/
|
||||
private static String getElementData(Node parentElement, String childName)
|
||||
throws XPathExpressionException {
|
||||
throws TransformerException {
|
||||
// Grab the child node
|
||||
XPath xPath = XPathFactory.newInstance().newXPath();
|
||||
Node childNode = (Node) xPath.compile(childName).evaluate(parentElement, XPathConstants.NODE);
|
||||
Node childNode = XPathAPI.selectSingleNode(parentElement, childName);
|
||||
|
||||
if (childNode == null) {
|
||||
// No child node, so no values
|
||||
@@ -278,10 +274,9 @@ public class RegistryLoader {
|
||||
* @throws TransformerException if transformer error
|
||||
*/
|
||||
private static String[] getRepeatedElementData(Node parentElement,
|
||||
String childName) throws XPathExpressionException {
|
||||
String childName) throws TransformerException {
|
||||
// Grab the child node
|
||||
XPath xPath = XPathFactory.newInstance().newXPath();
|
||||
NodeList childNodes = (NodeList) xPath.compile(childName).evaluate(parentElement, XPathConstants.NODESET);
|
||||
NodeList childNodes = XPathAPI.selectNodeList(parentElement, childName);
|
||||
|
||||
String[] data = new String[childNodes.getLength()];
|
||||
|
||||
|
@@ -7,60 +7,34 @@
|
||||
*/
|
||||
package org.dspace.administer;
|
||||
|
||||
import static org.dspace.content.service.DSpaceObjectService.MD_COPYRIGHT_TEXT;
|
||||
import static org.dspace.content.service.DSpaceObjectService.MD_INTRODUCTORY_TEXT;
|
||||
import static org.dspace.content.service.DSpaceObjectService.MD_LICENSE;
|
||||
import static org.dspace.content.service.DSpaceObjectService.MD_NAME;
|
||||
import static org.dspace.content.service.DSpaceObjectService.MD_PROVENANCE_DESCRIPTION;
|
||||
import static org.dspace.content.service.DSpaceObjectService.MD_SHORT_DESCRIPTION;
|
||||
import static org.dspace.content.service.DSpaceObjectService.MD_SIDEBAR_TEXT;
|
||||
|
||||
import java.io.FileInputStream;
|
||||
import java.io.FileNotFoundException;
|
||||
import java.io.FileOutputStream;
|
||||
import java.io.BufferedWriter;
|
||||
import java.io.File;
|
||||
import java.io.FileWriter;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.io.OutputStream;
|
||||
import java.io.PrintWriter;
|
||||
import java.sql.SQLException;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import javax.xml.parsers.DocumentBuilder;
|
||||
import javax.xml.parsers.DocumentBuilderFactory;
|
||||
import javax.xml.parsers.ParserConfigurationException;
|
||||
import javax.xml.transform.TransformerException;
|
||||
import javax.xml.xpath.XPath;
|
||||
import javax.xml.xpath.XPathConstants;
|
||||
import javax.xml.xpath.XPathExpressionException;
|
||||
import javax.xml.xpath.XPathFactory;
|
||||
|
||||
import org.apache.commons.cli.CommandLine;
|
||||
import org.apache.commons.cli.CommandLineParser;
|
||||
import org.apache.commons.cli.DefaultParser;
|
||||
import org.apache.commons.cli.HelpFormatter;
|
||||
import org.apache.commons.cli.Option;
|
||||
import org.apache.commons.cli.Options;
|
||||
import org.apache.commons.cli.ParseException;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.apache.commons.cli.PosixParser;
|
||||
import org.apache.xpath.XPathAPI;
|
||||
import org.dspace.authorize.AuthorizeException;
|
||||
import org.dspace.content.Collection;
|
||||
import org.dspace.content.Community;
|
||||
import org.dspace.content.Item;
|
||||
import org.dspace.content.MetadataFieldName;
|
||||
import org.dspace.content.MetadataSchemaEnum;
|
||||
import org.dspace.content.MetadataValue;
|
||||
import org.dspace.content.factory.ContentServiceFactory;
|
||||
import org.dspace.content.service.CollectionService;
|
||||
import org.dspace.content.service.CommunityService;
|
||||
import org.dspace.core.Context;
|
||||
import org.dspace.eperson.factory.EPersonServiceFactory;
|
||||
import org.dspace.eperson.service.EPersonService;
|
||||
import org.dspace.handle.factory.HandleServiceFactory;
|
||||
import org.dspace.handle.service.HandleService;
|
||||
import org.jdom2.Element;
|
||||
import org.jdom2.output.Format;
|
||||
import org.jdom2.output.XMLOutputter;
|
||||
import org.jdom.Element;
|
||||
import org.jdom.output.XMLOutputter;
|
||||
import org.w3c.dom.Document;
|
||||
import org.w3c.dom.Node;
|
||||
import org.w3c.dom.NodeList;
|
||||
@@ -71,54 +45,44 @@ import org.xml.sax.SAXException;
|
||||
* an XML file.
|
||||
*
|
||||
* The XML file structure needs to be:
|
||||
* <pre>{@code
|
||||
* {@code
|
||||
* <import_structure>
|
||||
* <community>
|
||||
* <name>....</name>
|
||||
* <community>...</community>
|
||||
* <collection>
|
||||
* <name>....</name>
|
||||
* </collection>
|
||||
* </community>
|
||||
* <community>
|
||||
* <name>....</name>
|
||||
* <community>...</community>
|
||||
* <collection>
|
||||
* <name>....</name>
|
||||
* </collection>
|
||||
* </community>
|
||||
* </import_structure>
|
||||
* }</pre>
|
||||
*
|
||||
* <p>
|
||||
* It can be arbitrarily deep, and supports all the metadata elements
|
||||
* }
|
||||
* it can be arbitrarily deep, and supports all the metadata elements
|
||||
* that make up the community and collection metadata. See the system
|
||||
* documentation for more details.
|
||||
* documentation for more details
|
||||
*
|
||||
* @author Richard Jones
|
||||
*/
|
||||
|
||||
public class StructBuilder {
|
||||
/** Name of the root element for the document to be imported. */
|
||||
static final String INPUT_ROOT = "import_structure";
|
||||
|
||||
/*
|
||||
* Name of the root element for the document produced by importing.
|
||||
* Community and collection elements are annotated with their identifiers.
|
||||
/**
|
||||
* the output xml document which will contain updated information about the
|
||||
* imported structure
|
||||
*/
|
||||
static final String RESULT_ROOT = "imported_structure";
|
||||
private static org.jdom.Document xmlOutput = new org.jdom.Document(new Element("imported_structure"));
|
||||
|
||||
/**
|
||||
* A table to hold metadata for the collection being worked on.
|
||||
* a hashtable to hold metadata for the collection being worked on
|
||||
*/
|
||||
private static final Map<String, MetadataFieldName> collectionMap = new HashMap<>();
|
||||
private static Map<String, String> collectionMap = new HashMap<String, String>();
|
||||
|
||||
/**
|
||||
* A table to hold metadata for the community being worked on.
|
||||
* a hashtable to hold metadata for the community being worked on
|
||||
*/
|
||||
private static final Map<String, MetadataFieldName> communityMap = new HashMap<>();
|
||||
private static Map<String, String> communityMap = new HashMap<String, String>();
|
||||
|
||||
protected static final CommunityService communityService
|
||||
= ContentServiceFactory.getInstance().getCommunityService();
|
||||
protected static final CollectionService collectionService
|
||||
= ContentServiceFactory.getInstance().getCollectionService();
|
||||
protected static final EPersonService ePersonService
|
||||
= EPersonServiceFactory.getInstance().getEPersonService();
|
||||
protected static final HandleService handleService
|
||||
= HandleServiceFactory.getInstance().getHandleService();
|
||||
protected static CommunityService communityService = ContentServiceFactory.getInstance().getCommunityService();
|
||||
protected static CollectionService collectionService = ContentServiceFactory.getInstance().getCollectionService();
|
||||
protected static EPersonService ePersonService = EPersonServiceFactory.getInstance().getEPersonService();
|
||||
|
||||
/**
|
||||
* Default constructor
|
||||
@@ -127,384 +91,135 @@ public class StructBuilder {
|
||||
|
||||
/**
|
||||
* Main method to be run from the command line to import a structure into
|
||||
* DSpacee or export existing structure to a file.The command is of the form:
|
||||
* DSpace
|
||||
*
|
||||
* <p>{@code StructBuilder -f [XML source] -e [administrator email] -o [output file]}
|
||||
* This is of the form:
|
||||
*
|
||||
* <p>to import, or
|
||||
* {@code StructBuilder -f [xml source] -e [administrator email] -o [output file]}
|
||||
*
|
||||
* <p>{@code StructBuilder -x -e [administrator email] -o [output file]}</p>
|
||||
* The output file will contain exactly the same as the source xml document, but
|
||||
* with the handle for each imported item added as an attribute.
|
||||
*
|
||||
* <p>to export. The output will contain exactly the same as the source XML
|
||||
* document, but with the Handle for each imported item added as an attribute.
|
||||
*
|
||||
*
|
||||
* @param argv command line arguments.
|
||||
* @throws ParserConfigurationException passed through.
|
||||
* @throws SQLException passed through.
|
||||
* @throws FileNotFoundException if input or output could not be opened.
|
||||
* @throws TransformerException if the input document is invalid.
|
||||
* @throws XPathExpressionException passed through.
|
||||
* @param argv the command line arguments given
|
||||
* @throws Exception if an error occurs
|
||||
*/
|
||||
public static void main(String[] argv)
|
||||
throws ParserConfigurationException, SQLException,
|
||||
IOException, TransformerException, XPathExpressionException {
|
||||
// Define command line options.
|
||||
throws Exception {
|
||||
CommandLineParser parser = new PosixParser();
|
||||
|
||||
Options options = new Options();
|
||||
|
||||
options.addOption("h", "help", false, "Print this help message.");
|
||||
options.addOption("?", "help");
|
||||
options.addOption("x", "export", false, "Export the current structure as XML.");
|
||||
options.addOption("k", "keep-handles", false, "Apply Handles from input document.");
|
||||
options.addOption("f", "file", true, "file");
|
||||
options.addOption("e", "eperson", true, "eperson");
|
||||
options.addOption("o", "output", true, "output");
|
||||
|
||||
options.addOption(Option.builder("e").longOpt("eperson")
|
||||
.desc("User who is manipulating the repository's structure.")
|
||||
.hasArg().argName("eperson").required().build());
|
||||
CommandLine line = parser.parse(options, argv);
|
||||
|
||||
options.addOption(Option.builder("f").longOpt("file")
|
||||
.desc("File of new structure information.")
|
||||
.hasArg().argName("input").build());
|
||||
String file = null;
|
||||
String eperson = null;
|
||||
String output = null;
|
||||
|
||||
options.addOption(Option.builder("o").longOpt("output")
|
||||
.desc("File to receive the structure map ('-' for standard out).")
|
||||
.hasArg().argName("output").required().build());
|
||||
|
||||
// Parse the command line.
|
||||
CommandLineParser parser = new DefaultParser();
|
||||
CommandLine line = null;
|
||||
try {
|
||||
line = parser.parse(options, argv);
|
||||
} catch (ParseException ex) {
|
||||
System.err.println(ex.getMessage());
|
||||
usage(options);
|
||||
System.exit(1);
|
||||
if (line.hasOption('f')) {
|
||||
file = line.getOptionValue('f');
|
||||
}
|
||||
|
||||
// If the user asked for help, give it and exit.
|
||||
if (line.hasOption('h') || line.hasOption('?')) {
|
||||
giveHelp(options);
|
||||
if (line.hasOption('e')) {
|
||||
eperson = line.getOptionValue('e');
|
||||
}
|
||||
|
||||
if (line.hasOption('o')) {
|
||||
output = line.getOptionValue('o');
|
||||
}
|
||||
|
||||
if (output == null || eperson == null || file == null) {
|
||||
usage();
|
||||
System.exit(0);
|
||||
}
|
||||
|
||||
// Otherwise, analyze the command.
|
||||
// Must be import or export.
|
||||
if (!(line.hasOption('f') || line.hasOption('x'))) {
|
||||
giveHelp(options);
|
||||
System.exit(1);
|
||||
}
|
||||
|
||||
// Open the output stream.
|
||||
String output = line.getOptionValue('o');
|
||||
OutputStream outputStream;
|
||||
if ("-".equals(output)) {
|
||||
outputStream = System.out;
|
||||
} else {
|
||||
outputStream = new FileOutputStream(output);
|
||||
}
|
||||
|
||||
// create a context
|
||||
Context context = new Context();
|
||||
|
||||
// set the context.
|
||||
String eperson = line.getOptionValue('e');
|
||||
try {
|
||||
context.setCurrentUser(ePersonService.findByEmail(context, eperson));
|
||||
} catch (SQLException ex) {
|
||||
System.err.format("That user could not be found: %s%n", ex.getMessage());
|
||||
System.exit(1);
|
||||
}
|
||||
|
||||
// Export? Import?
|
||||
if (line.hasOption('x')) { // export
|
||||
exportStructure(context, outputStream);
|
||||
outputStream.close();
|
||||
} else { // Must be import
|
||||
String input = line.getOptionValue('f');
|
||||
if (null == input) {
|
||||
usage(options);
|
||||
System.exit(1);
|
||||
}
|
||||
|
||||
InputStream inputStream;
|
||||
if ("-".equals(input)) {
|
||||
inputStream = System.in;
|
||||
} else {
|
||||
inputStream = new FileInputStream(input);
|
||||
}
|
||||
|
||||
boolean keepHandles = options.hasOption("k");
|
||||
importStructure(context, inputStream, outputStream, keepHandles);
|
||||
|
||||
inputStream.close();
|
||||
outputStream.close();
|
||||
|
||||
// save changes from import
|
||||
context.complete();
|
||||
}
|
||||
System.exit(0);
|
||||
}
|
||||
|
||||
/**
|
||||
* Import new Community/Collection structure.
|
||||
*
|
||||
* @param context
|
||||
* @param input XML which describes the new communities and collections.
|
||||
* @param output input, annotated with the new objects' identifiers.
|
||||
* @param keepHandles true if Handles should be set from input.
|
||||
* @throws IOException
|
||||
* @throws ParserConfigurationException
|
||||
* @throws SAXException
|
||||
* @throws TransformerException
|
||||
* @throws SQLException
|
||||
*/
|
||||
static void importStructure(Context context, InputStream input,
|
||||
OutputStream output, boolean keepHandles)
|
||||
throws IOException, ParserConfigurationException, SQLException,
|
||||
TransformerException, XPathExpressionException {
|
||||
// set the context
|
||||
context.setCurrentUser(ePersonService.findByEmail(context, eperson));
|
||||
|
||||
// load the XML
|
||||
Document document = null;
|
||||
try {
|
||||
document = loadXML(input);
|
||||
} catch (IOException ex) {
|
||||
System.err.format("The input document could not be read: %s%n", ex.getMessage());
|
||||
System.exit(1);
|
||||
} catch (SAXException ex) {
|
||||
System.err.format("The input document could not be parsed: %s%n", ex.getMessage());
|
||||
System.exit(1);
|
||||
}
|
||||
Document document = loadXML(file);
|
||||
|
||||
// run the preliminary validation, to be sure that the the XML document
|
||||
// is properly structured.
|
||||
try {
|
||||
validate(document);
|
||||
} catch (XPathExpressionException ex) {
|
||||
System.err.format("The input document is invalid: %s%n", ex.getMessage());
|
||||
System.exit(1);
|
||||
}
|
||||
|
||||
// Check for 'identifier' attributes -- possibly output by this class.
|
||||
XPath xPath = XPathFactory.newInstance().newXPath();
|
||||
NodeList identifierNodes = (NodeList) xPath.compile("//*[@identifier]")
|
||||
.evaluate(document, XPathConstants.NODESET);
|
||||
if (identifierNodes.getLength() > 0) {
|
||||
if (!keepHandles) {
|
||||
System.err.println("The input document has 'identifier' attributes, which will be ignored.");
|
||||
} else {
|
||||
for (int i = 0; i < identifierNodes.getLength() ; i++) {
|
||||
String identifier = identifierNodes.item(i).getAttributes().item(0).getTextContent();
|
||||
if (handleService.resolveToURL(context, identifier) != null) {
|
||||
System.err.printf("The input document contains handle %s,"
|
||||
+ " which is in use already. Aborting...%n",
|
||||
identifier);
|
||||
System.exit(1);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
// is properly structured
|
||||
validate(document);
|
||||
|
||||
// load the mappings into the member variable hashmaps
|
||||
communityMap.put("name", MD_NAME);
|
||||
communityMap.put("description", MD_SHORT_DESCRIPTION);
|
||||
communityMap.put("intro", MD_INTRODUCTORY_TEXT);
|
||||
communityMap.put("copyright", MD_COPYRIGHT_TEXT);
|
||||
communityMap.put("sidebar", MD_SIDEBAR_TEXT);
|
||||
communityMap.put("name", "name");
|
||||
communityMap.put("description", "short_description");
|
||||
communityMap.put("intro", "introductory_text");
|
||||
communityMap.put("copyright", "copyright_text");
|
||||
communityMap.put("sidebar", "side_bar_text");
|
||||
|
||||
collectionMap.put("name", MD_NAME);
|
||||
collectionMap.put("description", MD_SHORT_DESCRIPTION);
|
||||
collectionMap.put("intro", MD_INTRODUCTORY_TEXT);
|
||||
collectionMap.put("copyright", MD_COPYRIGHT_TEXT);
|
||||
collectionMap.put("sidebar", MD_SIDEBAR_TEXT);
|
||||
collectionMap.put("license", MD_LICENSE);
|
||||
collectionMap.put("provenance", MD_PROVENANCE_DESCRIPTION);
|
||||
collectionMap.put("name", "name");
|
||||
collectionMap.put("description", "short_description");
|
||||
collectionMap.put("intro", "introductory_text");
|
||||
collectionMap.put("copyright", "copyright_text");
|
||||
collectionMap.put("sidebar", "side_bar_text");
|
||||
collectionMap.put("license", "license");
|
||||
collectionMap.put("provenance", "provenance_description");
|
||||
|
||||
Element[] elements = new Element[]{};
|
||||
try {
|
||||
// get the top level community list
|
||||
NodeList first = (NodeList) xPath.compile("/import_structure/community")
|
||||
.evaluate(document, XPathConstants.NODESET);
|
||||
// get the top level community list
|
||||
NodeList first = XPathAPI.selectNodeList(document, "/import_structure/community");
|
||||
|
||||
// run the import starting with the top level communities
|
||||
elements = handleCommunities(context, first, null, keepHandles);
|
||||
} catch (TransformerException ex) {
|
||||
System.err.format("Input content not understood: %s%n", ex.getMessage());
|
||||
System.exit(1);
|
||||
} catch (AuthorizeException ex) {
|
||||
System.err.format("Not authorized: %s%n", ex.getMessage());
|
||||
System.exit(1);
|
||||
}
|
||||
// run the import starting with the top level communities
|
||||
Element[] elements = handleCommunities(context, first, null);
|
||||
|
||||
// generate the output
|
||||
final Element root = new Element(RESULT_ROOT);
|
||||
|
||||
for (Element element : elements) {
|
||||
root.addContent(element);
|
||||
Element root = xmlOutput.getRootElement();
|
||||
for (int i = 0; i < elements.length; i++) {
|
||||
root.addContent(elements[i]);
|
||||
}
|
||||
|
||||
// finally write the string into the output file.
|
||||
final org.jdom2.Document xmlOutput = new org.jdom2.Document(root);
|
||||
// finally write the string into the output file
|
||||
try {
|
||||
new XMLOutputter().output(xmlOutput, output);
|
||||
BufferedWriter out = new BufferedWriter(new FileWriter(output));
|
||||
out.write(new XMLOutputter().outputString(xmlOutput));
|
||||
out.close();
|
||||
} catch (IOException e) {
|
||||
System.out.printf("Unable to write to output file %s: %s%n",
|
||||
output, e.getMessage());
|
||||
System.exit(1);
|
||||
System.out.println("Unable to write to output file " + output);
|
||||
System.exit(0);
|
||||
}
|
||||
|
||||
context.complete();
|
||||
}
|
||||
|
||||
/**
|
||||
* Add a single community, and its children, to the Document.
|
||||
*
|
||||
* @param community
|
||||
* @return a fragment representing this Community.
|
||||
* Output the usage information
|
||||
*/
|
||||
private static Element exportACommunity(Community community) {
|
||||
// Export this Community.
|
||||
Element element = new Element("community");
|
||||
element.setAttribute("identifier", community.getHandle());
|
||||
element.addContent(new Element("name").setText(community.getName()));
|
||||
element.addContent(new Element("description")
|
||||
.setText(communityService.getMetadataFirstValue(community,
|
||||
MetadataSchemaEnum.DC.getName(), "description", "abstract", Item.ANY)));
|
||||
element.addContent(new Element("intro")
|
||||
.setText(communityService.getMetadataFirstValue(community,
|
||||
MetadataSchemaEnum.DC.getName(), "description", null, Item.ANY)));
|
||||
element.addContent(new Element("copyright")
|
||||
.setText(communityService.getMetadataFirstValue(community,
|
||||
MetadataSchemaEnum.DC.getName(), "rights", null, Item.ANY)));
|
||||
element.addContent(new Element("sidebar")
|
||||
.setText(communityService.getMetadataFirstValue(community,
|
||||
MetadataSchemaEnum.DC.getName(), "description", "tableofcontents", Item.ANY)));
|
||||
|
||||
// Export this Community's Community children.
|
||||
for (Community subCommunity : community.getSubcommunities()) {
|
||||
element.addContent(exportACommunity(subCommunity));
|
||||
}
|
||||
|
||||
// Export this Community's Collection children.
|
||||
for (Collection collection : community.getCollections()) {
|
||||
element.addContent(exportACollection(collection));
|
||||
}
|
||||
|
||||
return element;
|
||||
private static void usage() {
|
||||
System.out.println("Usage: java StructBuilder -f <source XML file> -o <output file> -e <eperson email>");
|
||||
System.out.println(
|
||||
"Communities will be created from the top level, and a map of communities to handles will be returned in " +
|
||||
"the output file");
|
||||
return;
|
||||
}
|
||||
|
||||
/**
|
||||
* Add a single Collection to the Document.
|
||||
*
|
||||
* @param collection
|
||||
* @return a fragment representing this Collection.
|
||||
*/
|
||||
private static Element exportACollection(Collection collection) {
|
||||
// Export this Collection.
|
||||
Element element = new Element("collection");
|
||||
element.setAttribute("identifier", collection.getHandle());
|
||||
element.addContent(new Element("name").setText(collection.getName()));
|
||||
element.addContent(new Element("description")
|
||||
.setText(collectionService.getMetadataFirstValue(collection,
|
||||
MetadataSchemaEnum.DC.getName(), "description", "abstract", Item.ANY)));
|
||||
element.addContent(new Element("intro")
|
||||
.setText(collectionService.getMetadataFirstValue(collection,
|
||||
MetadataSchemaEnum.DC.getName(), "description", null, Item.ANY)));
|
||||
element.addContent(new Element("copyright")
|
||||
.setText(collectionService.getMetadataFirstValue(collection,
|
||||
MetadataSchemaEnum.DC.getName(), "rights", null, Item.ANY)));
|
||||
element.addContent(new Element("sidebar")
|
||||
.setText(collectionService.getMetadataFirstValue(collection,
|
||||
MetadataSchemaEnum.DC.getName(), "description", "tableofcontents", Item.ANY)));
|
||||
element.addContent(new Element("license")
|
||||
.setText(collectionService.getMetadataFirstValue(collection,
|
||||
MetadataSchemaEnum.DC.getName(), "rights", "license", Item.ANY)));
|
||||
// Provenance is special: multivalued
|
||||
for (MetadataValue value : collectionService.getMetadata(collection,
|
||||
MetadataSchemaEnum.DC.getName(), "provenance", null, Item.ANY)) {
|
||||
element.addContent(new Element("provenance")
|
||||
.setText(value.getValue()));
|
||||
}
|
||||
|
||||
return element;
|
||||
}
|
||||
|
||||
/**
|
||||
* Write out the existing Community/Collection structure.
|
||||
*/
|
||||
static void exportStructure(Context context, OutputStream output) {
|
||||
// Build a document from the Community/Collection hierarchy.
|
||||
Element rootElement = new Element(INPUT_ROOT); // To be read by importStructure, perhaps
|
||||
|
||||
List<Community> communities = null;
|
||||
try {
|
||||
communities = communityService.findAllTop(context);
|
||||
} catch (SQLException ex) {
|
||||
System.out.printf("Unable to get the list of top-level communities: %s%n",
|
||||
ex.getMessage());
|
||||
System.exit(1);
|
||||
}
|
||||
|
||||
for (Community community : communities) {
|
||||
rootElement.addContent(exportACommunity(community));
|
||||
}
|
||||
|
||||
// Now write the structure out.
|
||||
org.jdom2.Document xmlOutput = new org.jdom2.Document(rootElement);
|
||||
try {
|
||||
XMLOutputter outputter = new XMLOutputter(Format.getPrettyFormat());
|
||||
outputter.output(xmlOutput, output);
|
||||
} catch (IOException e) {
|
||||
System.out.printf("Unable to write to output file %s: %s%n",
|
||||
output, e.getMessage());
|
||||
System.exit(1);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Output the usage information.
|
||||
*/
|
||||
private static void usage(Options options) {
|
||||
HelpFormatter helper = new HelpFormatter();
|
||||
try (PrintWriter writer = new PrintWriter(System.out);) {
|
||||
helper.printUsage(writer, 80/* FIXME Magic */,
|
||||
"structure-builder", options);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Help the user more.
|
||||
*/
|
||||
private static void giveHelp(Options options) {
|
||||
HelpFormatter formatter = new HelpFormatter();
|
||||
formatter.printHelp("struct-builder",
|
||||
"Import or export Community/Collection structure.",
|
||||
options,
|
||||
"When importing (-f), communities will be created from the "
|
||||
+ "top level, and a map of communities to handles will "
|
||||
+ "be returned in the output file. When exporting (-x),"
|
||||
+ "the current structure will be written to the map file.",
|
||||
true);
|
||||
}
|
||||
|
||||
/**
|
||||
* Validate the XML document. This method returns if the document is valid.
|
||||
* If validation fails it generates an error and ceases execution.
|
||||
* Validate the XML document. This method does not return, but if validation
|
||||
* fails it generates an error and ceases execution
|
||||
*
|
||||
* @param document the XML document object
|
||||
* @throws TransformerException if transformer error
|
||||
*/
|
||||
private static void validate(org.w3c.dom.Document document)
|
||||
throws XPathExpressionException {
|
||||
StringBuilder err = new StringBuilder();
|
||||
throws TransformerException {
|
||||
StringBuffer err = new StringBuffer();
|
||||
boolean trip = false;
|
||||
|
||||
err.append("The following errors were encountered parsing the source XML.\n");
|
||||
err.append("No changes have been made to the DSpace instance.\n\n");
|
||||
err.append("The following errors were encountered parsing the source XML\n");
|
||||
err.append("No changes have been made to the DSpace instance\n\n");
|
||||
|
||||
XPath xPath = XPathFactory.newInstance().newXPath();
|
||||
NodeList first = (NodeList) xPath.compile("/import_structure/community")
|
||||
.evaluate(document, XPathConstants.NODESET);
|
||||
NodeList first = XPathAPI.selectNodeList(document, "/import_structure/community");
|
||||
if (first.getLength() == 0) {
|
||||
err.append("-There are no top level communities in the source document.");
|
||||
err.append("-There are no top level communities in the source document");
|
||||
System.out.println(err.toString());
|
||||
System.exit(1);
|
||||
System.exit(0);
|
||||
}
|
||||
|
||||
String errs = validateCommunities(first, 1);
|
||||
@@ -515,13 +230,13 @@ public class StructBuilder {
|
||||
|
||||
if (trip) {
|
||||
System.out.println(err.toString());
|
||||
System.exit(1);
|
||||
System.exit(0);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Validate the communities section of the XML document. This returns a string
|
||||
* containing any errors encountered, or null if there were no errors.
|
||||
* containing any errors encountered, or null if there were no errors
|
||||
*
|
||||
* @param communities the NodeList of communities to validate
|
||||
* @param level the level in the XML document that we are at, for the purposes
|
||||
@@ -530,25 +245,23 @@ public class StructBuilder {
|
||||
* no errors.
|
||||
*/
|
||||
private static String validateCommunities(NodeList communities, int level)
|
||||
throws XPathExpressionException {
|
||||
StringBuilder err = new StringBuilder();
|
||||
throws TransformerException {
|
||||
StringBuffer err = new StringBuffer();
|
||||
boolean trip = false;
|
||||
String errs = null;
|
||||
XPath xPath = XPathFactory.newInstance().newXPath();
|
||||
|
||||
for (int i = 0; i < communities.getLength(); i++) {
|
||||
Node n = communities.item(i);
|
||||
NodeList name = (NodeList) xPath.compile("name").evaluate(n, XPathConstants.NODESET);
|
||||
NodeList name = XPathAPI.selectNodeList(n, "name");
|
||||
if (name.getLength() != 1) {
|
||||
String pos = Integer.toString(i + 1);
|
||||
err.append("-The level ").append(level)
|
||||
.append(" community in position ").append(pos)
|
||||
.append(" does not contain exactly one name field.\n");
|
||||
err.append("-The level " + level + " community in position " + pos);
|
||||
err.append(" does not contain exactly one name field\n");
|
||||
trip = true;
|
||||
}
|
||||
|
||||
// validate sub communities
|
||||
NodeList subCommunities = (NodeList) xPath.compile("community").evaluate(n, XPathConstants.NODESET);
|
||||
NodeList subCommunities = XPathAPI.selectNodeList(n, "community");
|
||||
String comErrs = validateCommunities(subCommunities, level + 1);
|
||||
if (comErrs != null) {
|
||||
err.append(comErrs);
|
||||
@@ -556,7 +269,7 @@ public class StructBuilder {
|
||||
}
|
||||
|
||||
// validate collections
|
||||
NodeList collections = (NodeList) xPath.compile("collection").evaluate(n, XPathConstants.NODESET);
|
||||
NodeList collections = XPathAPI.selectNodeList(n, "collection");
|
||||
String colErrs = validateCollections(collections, level + 1);
|
||||
if (colErrs != null) {
|
||||
err.append(colErrs);
|
||||
@@ -573,27 +286,25 @@ public class StructBuilder {
|
||||
|
||||
/**
|
||||
* validate the collection section of the XML document. This generates a
|
||||
* string containing any errors encountered, or returns null if no errors.
|
||||
* string containing any errors encountered, or returns null if no errors
|
||||
*
|
||||
* @param collections a NodeList of collections to validate
|
||||
* @param level the level in the XML document for the purposes of error reporting
|
||||
* @return the errors to be generated by the calling method, or null if none
|
||||
*/
|
||||
private static String validateCollections(NodeList collections, int level)
|
||||
throws XPathExpressionException {
|
||||
StringBuilder err = new StringBuilder();
|
||||
throws TransformerException {
|
||||
StringBuffer err = new StringBuffer();
|
||||
boolean trip = false;
|
||||
String errs = null;
|
||||
XPath xPath = XPathFactory.newInstance().newXPath();
|
||||
|
||||
for (int i = 0; i < collections.getLength(); i++) {
|
||||
Node n = collections.item(i);
|
||||
NodeList name = (NodeList) xPath.compile("name").evaluate(n, XPathConstants.NODESET);
|
||||
NodeList name = XPathAPI.selectNodeList(n, "name");
|
||||
if (name.getLength() != 1) {
|
||||
String pos = Integer.toString(i + 1);
|
||||
err.append("-The level ").append(level)
|
||||
.append(" collection in position ").append(pos)
|
||||
.append(" does not contain exactly one name field.\n");
|
||||
err.append("-The level " + level + " collection in position " + pos);
|
||||
err.append(" does not contain exactly one name field\n");
|
||||
trip = true;
|
||||
}
|
||||
}
|
||||
@@ -606,17 +317,17 @@ public class StructBuilder {
|
||||
}
|
||||
|
||||
/**
|
||||
* Load the XML document from input.
|
||||
* Load in the XML from file.
|
||||
*
|
||||
* @param input the filename to load from.
|
||||
* @return the DOM representation of the XML input.
|
||||
* @param filename the filename to load from
|
||||
* @return the DOM representation of the XML file
|
||||
*/
|
||||
private static org.w3c.dom.Document loadXML(InputStream input)
|
||||
private static org.w3c.dom.Document loadXML(String filename)
|
||||
throws IOException, ParserConfigurationException, SAXException {
|
||||
DocumentBuilder builder = DocumentBuilderFactory.newInstance()
|
||||
.newDocumentBuilder();
|
||||
|
||||
org.w3c.dom.Document document = builder.parse(input);
|
||||
org.w3c.dom.Document document = builder.parse(new File(filename));
|
||||
|
||||
return document;
|
||||
}
|
||||
@@ -627,7 +338,7 @@ public class StructBuilder {
|
||||
* @param node the node from which we want to extract the string value
|
||||
* @return the string value of the node
|
||||
*/
|
||||
private static String getStringValue(Node node) {
|
||||
public static String getStringValue(Node node) {
|
||||
String value = node.getNodeValue();
|
||||
|
||||
if (node.hasChildNodes()) {
|
||||
@@ -648,49 +359,43 @@ public class StructBuilder {
|
||||
* @param context the context of the request
|
||||
* @param communities a nodelist of communities to create along with their sub-structures
|
||||
* @param parent the parent community of the nodelist of communities to create
|
||||
* @param keepHandles use Handles from input.
|
||||
* @return an element array containing additional information regarding the
|
||||
* created communities (e.g. the handles they have been assigned)
|
||||
*/
|
||||
private static Element[] handleCommunities(Context context, NodeList communities,
|
||||
Community parent, boolean keepHandles)
|
||||
throws TransformerException, SQLException, AuthorizeException,
|
||||
XPathExpressionException {
|
||||
private static Element[] handleCommunities(Context context, NodeList communities, Community parent)
|
||||
throws TransformerException, SQLException, Exception {
|
||||
Element[] elements = new Element[communities.getLength()];
|
||||
XPath xPath = XPathFactory.newInstance().newXPath();
|
||||
|
||||
for (int i = 0; i < communities.getLength(); i++) {
|
||||
Node tn = communities.item(i);
|
||||
Node identifier = tn.getAttributes().getNamedItem("identifier");
|
||||
Community community;
|
||||
Element element = new Element("community");
|
||||
|
||||
// create the community or sub community
|
||||
Community community;
|
||||
if (null == identifier
|
||||
|| StringUtils.isBlank(identifier.getNodeValue())
|
||||
|| !keepHandles) {
|
||||
if (parent != null) {
|
||||
community = communityService.create(parent, context);
|
||||
} else {
|
||||
community = communityService.create(parent, context, identifier.getNodeValue());
|
||||
community = communityService.create(null, context);
|
||||
}
|
||||
|
||||
// default the short description to be an empty string
|
||||
communityService.setMetadataSingleValue(context, community,
|
||||
MD_SHORT_DESCRIPTION, null, " ");
|
||||
communityService.setMetadata(context, community, "short_description", " ");
|
||||
|
||||
// now update the metadata
|
||||
for (Map.Entry<String, MetadataFieldName> entry : communityMap.entrySet()) {
|
||||
NodeList nl = (NodeList) xPath.compile(entry.getKey()).evaluate(tn, XPathConstants.NODESET);
|
||||
Node tn = communities.item(i);
|
||||
for (Map.Entry<String, String> entry : communityMap.entrySet()) {
|
||||
NodeList nl = XPathAPI.selectNodeList(tn, entry.getKey());
|
||||
if (nl.getLength() == 1) {
|
||||
communityService.setMetadataSingleValue(context, community,
|
||||
entry.getValue(), null, getStringValue(nl.item(0)));
|
||||
communityService.setMetadata(context, community, entry.getValue(), getStringValue(nl.item(0)));
|
||||
}
|
||||
}
|
||||
|
||||
// FIXME: at the moment, if the community already exists by name
|
||||
// then this will throw an SQLException on a duplicate key
|
||||
// violation.
|
||||
// Ideally we'd skip this row and continue to create sub communities
|
||||
// and so forth where they don't exist, but it's proving difficult
|
||||
// then this will throw a PSQLException on a duplicate key
|
||||
// violation
|
||||
// Ideally we'd skip this row and continue to create sub
|
||||
// communities
|
||||
// and so forth where they don't exist, but it's proving
|
||||
// difficult
|
||||
// to isolate the community that already exists without hitting
|
||||
// the database directly.
|
||||
communityService.update(context, community);
|
||||
@@ -703,59 +408,43 @@ public class StructBuilder {
|
||||
// but it's here to keep it separate from the create process in
|
||||
// case
|
||||
// we want to move it or make it switchable later
|
||||
Element element = new Element("community");
|
||||
element.setAttribute("identifier", community.getHandle());
|
||||
|
||||
Element nameElement = new Element("name");
|
||||
nameElement.setText(communityService.getMetadataFirstValue(
|
||||
community, CommunityService.MD_NAME, Item.ANY));
|
||||
nameElement.setText(communityService.getMetadata(community, "name"));
|
||||
element.addContent(nameElement);
|
||||
|
||||
String fieldValue;
|
||||
|
||||
fieldValue = communityService.getMetadataFirstValue(community,
|
||||
CommunityService.MD_SHORT_DESCRIPTION, Item.ANY);
|
||||
if (fieldValue != null) {
|
||||
if (communityService.getMetadata(community, "short_description") != null) {
|
||||
Element descriptionElement = new Element("description");
|
||||
descriptionElement.setText(fieldValue);
|
||||
descriptionElement.setText(communityService.getMetadata(community, "short_description"));
|
||||
element.addContent(descriptionElement);
|
||||
}
|
||||
|
||||
fieldValue = communityService.getMetadataFirstValue(community,
|
||||
CommunityService.MD_INTRODUCTORY_TEXT, Item.ANY);
|
||||
if (fieldValue != null) {
|
||||
if (communityService.getMetadata(community, "introductory_text") != null) {
|
||||
Element introElement = new Element("intro");
|
||||
introElement.setText(fieldValue);
|
||||
introElement.setText(communityService.getMetadata(community, "introductory_text"));
|
||||
element.addContent(introElement);
|
||||
}
|
||||
|
||||
fieldValue = communityService.getMetadataFirstValue(community,
|
||||
CommunityService.MD_COPYRIGHT_TEXT, Item.ANY);
|
||||
if (fieldValue != null) {
|
||||
if (communityService.getMetadata(community, "copyright_text") != null) {
|
||||
Element copyrightElement = new Element("copyright");
|
||||
copyrightElement.setText(fieldValue);
|
||||
copyrightElement.setText(communityService.getMetadata(community, "copyright_text"));
|
||||
element.addContent(copyrightElement);
|
||||
}
|
||||
|
||||
fieldValue = communityService.getMetadataFirstValue(community,
|
||||
CommunityService.MD_SIDEBAR_TEXT, Item.ANY);
|
||||
if (fieldValue != null) {
|
||||
if (communityService.getMetadata(community, "side_bar_text") != null) {
|
||||
Element sidebarElement = new Element("sidebar");
|
||||
sidebarElement.setText(fieldValue);
|
||||
sidebarElement.setText(communityService.getMetadata(community, "side_bar_text"));
|
||||
element.addContent(sidebarElement);
|
||||
}
|
||||
|
||||
// handle sub communities
|
||||
NodeList subCommunities = (NodeList) xPath.compile("community")
|
||||
.evaluate(tn, XPathConstants.NODESET);
|
||||
Element[] subCommunityElements = handleCommunities(context,
|
||||
subCommunities, community, keepHandles);
|
||||
NodeList subCommunities = XPathAPI.selectNodeList(tn, "community");
|
||||
Element[] subCommunityElements = handleCommunities(context, subCommunities, community);
|
||||
|
||||
// handle collections
|
||||
NodeList collections = (NodeList) xPath.compile("collection")
|
||||
.evaluate(tn, XPathConstants.NODESET);
|
||||
Element[] collectionElements = handleCollections(context,
|
||||
collections, community, keepHandles);
|
||||
NodeList collections = XPathAPI.selectNodeList(tn, "collection");
|
||||
Element[] collectionElements = handleCollections(context, collections, community);
|
||||
|
||||
int j;
|
||||
for (j = 0; j < subCommunityElements.length; j++) {
|
||||
@@ -780,96 +469,67 @@ public class StructBuilder {
|
||||
* @return an Element array containing additional information about the
|
||||
* created collections (e.g. the handle)
|
||||
*/
|
||||
private static Element[] handleCollections(Context context,
|
||||
NodeList collections, Community parent, boolean keepHandles)
|
||||
throws SQLException, AuthorizeException, XPathExpressionException {
|
||||
private static Element[] handleCollections(Context context, NodeList collections, Community parent)
|
||||
throws TransformerException, SQLException, AuthorizeException, IOException, Exception {
|
||||
Element[] elements = new Element[collections.getLength()];
|
||||
XPath xPath = XPathFactory.newInstance().newXPath();
|
||||
|
||||
for (int i = 0; i < collections.getLength(); i++) {
|
||||
Node tn = collections.item(i);
|
||||
Node identifier = tn.getAttributes().getNamedItem("identifier");
|
||||
|
||||
// Create the Collection.
|
||||
Collection collection;
|
||||
if (null == identifier
|
||||
|| StringUtils.isBlank(identifier.getNodeValue())
|
||||
|| !keepHandles) {
|
||||
collection = collectionService.create(context, parent);
|
||||
} else {
|
||||
collection = collectionService.create(context, parent, identifier.getNodeValue());
|
||||
}
|
||||
Element element = new Element("collection");
|
||||
Collection collection = collectionService.create(context, parent);
|
||||
|
||||
// default the short description to the empty string
|
||||
collectionService.setMetadataSingleValue(context, collection,
|
||||
MD_SHORT_DESCRIPTION, Item.ANY, " ");
|
||||
collectionService.setMetadata(context, collection, "short_description", " ");
|
||||
|
||||
// import the rest of the metadata
|
||||
for (Map.Entry<String, MetadataFieldName> entry : collectionMap.entrySet()) {
|
||||
NodeList nl = (NodeList) xPath.compile(entry.getKey()).evaluate(tn, XPathConstants.NODESET);
|
||||
Node tn = collections.item(i);
|
||||
for (Map.Entry<String, String> entry : collectionMap.entrySet()) {
|
||||
NodeList nl = XPathAPI.selectNodeList(tn, entry.getKey());
|
||||
if (nl.getLength() == 1) {
|
||||
collectionService.setMetadataSingleValue(context, collection,
|
||||
entry.getValue(), null, getStringValue(nl.item(0)));
|
||||
collectionService.setMetadata(context, collection, entry.getValue(), getStringValue(nl.item(0)));
|
||||
}
|
||||
}
|
||||
|
||||
collectionService.update(context, collection);
|
||||
|
||||
Element element = new Element("collection");
|
||||
element.setAttribute("identifier", collection.getHandle());
|
||||
|
||||
Element nameElement = new Element("name");
|
||||
nameElement.setText(collectionService.getMetadataFirstValue(collection,
|
||||
CollectionService.MD_NAME, Item.ANY));
|
||||
nameElement.setText(collectionService.getMetadata(collection, "name"));
|
||||
element.addContent(nameElement);
|
||||
|
||||
String fieldValue;
|
||||
|
||||
fieldValue = collectionService.getMetadataFirstValue(collection,
|
||||
CollectionService.MD_SHORT_DESCRIPTION, Item.ANY);
|
||||
if (fieldValue != null) {
|
||||
if (collectionService.getMetadata(collection, "short_description") != null) {
|
||||
Element descriptionElement = new Element("description");
|
||||
descriptionElement.setText(fieldValue);
|
||||
descriptionElement.setText(collectionService.getMetadata(collection, "short_description"));
|
||||
element.addContent(descriptionElement);
|
||||
}
|
||||
|
||||
fieldValue = collectionService.getMetadataFirstValue(collection,
|
||||
CollectionService.MD_INTRODUCTORY_TEXT, Item.ANY);
|
||||
if (fieldValue != null) {
|
||||
if (collectionService.getMetadata(collection, "introductory_text") != null) {
|
||||
Element introElement = new Element("intro");
|
||||
introElement.setText(fieldValue);
|
||||
introElement.setText(collectionService.getMetadata(collection, "introductory_text"));
|
||||
element.addContent(introElement);
|
||||
}
|
||||
|
||||
fieldValue = collectionService.getMetadataFirstValue(collection,
|
||||
CollectionService.MD_COPYRIGHT_TEXT, Item.ANY);
|
||||
if (fieldValue != null) {
|
||||
if (collectionService.getMetadata(collection, "copyright_text") != null) {
|
||||
Element copyrightElement = new Element("copyright");
|
||||
copyrightElement.setText(fieldValue);
|
||||
copyrightElement.setText(collectionService.getMetadata(collection, "copyright_text"));
|
||||
element.addContent(copyrightElement);
|
||||
}
|
||||
|
||||
fieldValue = collectionService.getMetadataFirstValue(collection,
|
||||
CollectionService.MD_SIDEBAR_TEXT, Item.ANY);
|
||||
if (fieldValue != null) {
|
||||
if (collectionService.getMetadata(collection, "side_bar_text") != null) {
|
||||
Element sidebarElement = new Element("sidebar");
|
||||
sidebarElement.setText(fieldValue);
|
||||
sidebarElement.setText(collectionService.getMetadata(collection, "side_bar_text"));
|
||||
element.addContent(sidebarElement);
|
||||
}
|
||||
|
||||
fieldValue = collectionService.getMetadataFirstValue(collection,
|
||||
CollectionService.MD_LICENSE, Item.ANY);
|
||||
if (fieldValue != null) {
|
||||
if (collectionService.getMetadata(collection, "license") != null) {
|
||||
Element sidebarElement = new Element("license");
|
||||
sidebarElement.setText(fieldValue);
|
||||
sidebarElement.setText(collectionService.getMetadata(collection, "license"));
|
||||
element.addContent(sidebarElement);
|
||||
}
|
||||
|
||||
fieldValue = collectionService.getMetadataFirstValue(collection,
|
||||
CollectionService.MD_PROVENANCE_DESCRIPTION, Item.ANY);
|
||||
if (fieldValue != null) {
|
||||
if (collectionService.getMetadata(collection, "provenance_description") != null) {
|
||||
Element sidebarElement = new Element("provenance");
|
||||
sidebarElement.setText(fieldValue);
|
||||
sidebarElement.setText(collectionService.getMetadata(collection, "provenance_description"));
|
||||
element.addContent(sidebarElement);
|
||||
}
|
||||
|
||||
@@ -878,4 +538,5 @@ public class StructBuilder {
|
||||
|
||||
return elements;
|
||||
}
|
||||
|
||||
}
|
||||
|
@@ -19,7 +19,6 @@ import org.dspace.content.Item;
|
||||
* @author Stuart Lewis
|
||||
*/
|
||||
public class BulkEditChange {
|
||||
|
||||
/**
|
||||
* The item these changes relate to
|
||||
*/
|
||||
|
@@ -8,10 +8,14 @@
|
||||
package org.dspace.app.bulkedit;
|
||||
|
||||
import java.io.BufferedReader;
|
||||
import java.io.InputStream;
|
||||
import java.io.BufferedWriter;
|
||||
import java.io.File;
|
||||
import java.io.FileInputStream;
|
||||
import java.io.FileOutputStream;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStreamReader;
|
||||
import java.io.OutputStreamWriter;
|
||||
import java.io.Serializable;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
@@ -23,8 +27,6 @@ import java.util.UUID;
|
||||
import java.util.regex.Matcher;
|
||||
import java.util.regex.Pattern;
|
||||
|
||||
import org.apache.commons.io.IOUtils;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.dspace.authority.AuthorityValue;
|
||||
import org.dspace.authority.factory.AuthorityServiceFactory;
|
||||
import org.dspace.authority.service.AuthorityValueService;
|
||||
@@ -32,7 +34,6 @@ import org.dspace.content.Collection;
|
||||
import org.dspace.content.Item;
|
||||
import org.dspace.content.MetadataField;
|
||||
import org.dspace.content.MetadataSchema;
|
||||
import org.dspace.content.MetadataSchemaEnum;
|
||||
import org.dspace.content.MetadataValue;
|
||||
import org.dspace.content.authority.Choices;
|
||||
import org.dspace.content.factory.ContentServiceFactory;
|
||||
@@ -138,18 +139,18 @@ public class DSpaceCSV implements Serializable {
|
||||
/**
|
||||
* Create a new instance, reading the lines in from file
|
||||
*
|
||||
* @param inputStream the input stream to read from
|
||||
* @param f The file to read from
|
||||
* @param c The DSpace Context
|
||||
* @throws Exception thrown if there is an error reading or processing the file
|
||||
*/
|
||||
public DSpaceCSV(InputStream inputStream, Context c) throws Exception {
|
||||
public DSpaceCSV(File f, Context c) throws Exception {
|
||||
// Initialise the class
|
||||
init();
|
||||
|
||||
// Open the CSV file
|
||||
BufferedReader input = null;
|
||||
try {
|
||||
input = new BufferedReader(new InputStreamReader(inputStream, StandardCharsets.UTF_8));
|
||||
input = new BufferedReader(new InputStreamReader(new FileInputStream(f), "UTF-8"));
|
||||
|
||||
// Read the heading line
|
||||
String head = input.readLine();
|
||||
@@ -159,7 +160,7 @@ public class DSpaceCSV implements Serializable {
|
||||
columnCounter++;
|
||||
|
||||
// Remove surrounding quotes if there are any
|
||||
if (element.startsWith("\"") && element.endsWith("\"")) {
|
||||
if ((element.startsWith("\"")) && (element.endsWith("\""))) {
|
||||
element = element.substring(1, element.length() - 1);
|
||||
}
|
||||
|
||||
@@ -167,22 +168,16 @@ public class DSpaceCSV implements Serializable {
|
||||
if ("collection".equals(element)) {
|
||||
// Store the heading
|
||||
headings.add(element);
|
||||
} else if ("rowName".equals(element)) {
|
||||
// Store the heading
|
||||
headings.add(element);
|
||||
} else if ("action".equals(element)) { // Store the action
|
||||
// Store the heading
|
||||
headings.add(element);
|
||||
} else if (!"id".equals(element)) {
|
||||
String authorityPrefix = "";
|
||||
if (StringUtils.startsWith(element, "[authority]")) {
|
||||
element = StringUtils.substringAfter(element, "[authority]");
|
||||
AuthorityValue authorityValueType = authorityValueService.getAuthorityValueType(element);
|
||||
if (authorityValueType != null) {
|
||||
String authorityType = authorityValueType.getAuthorityType();
|
||||
authorityPrefix = element.substring(0, authorityType.length() + 1);
|
||||
element = element.substring(authorityPrefix.length());
|
||||
}
|
||||
AuthorityValue authorityValueType = authorityValueService.getAuthorityValueType(element);
|
||||
if (authorityValueType != null) {
|
||||
String authorityType = authorityValueType.getAuthorityType();
|
||||
authorityPrefix = element.substring(0, authorityType.length() + 1);
|
||||
element = element.substring(authorityPrefix.length());
|
||||
}
|
||||
|
||||
// Verify that the heading is valid in the metadata registry
|
||||
@@ -203,24 +198,20 @@ public class DSpaceCSV implements Serializable {
|
||||
}
|
||||
|
||||
// Check that the scheme exists
|
||||
if (!StringUtils.equals(metadataSchema, MetadataSchemaEnum.RELATION.getName())) {
|
||||
MetadataSchema foundSchema = metadataSchemaService.find(c, metadataSchema);
|
||||
if (foundSchema == null) {
|
||||
throw new MetadataImportInvalidHeadingException(clean[0],
|
||||
MetadataImportInvalidHeadingException
|
||||
.SCHEMA,
|
||||
columnCounter);
|
||||
}
|
||||
MetadataSchema foundSchema = metadataSchemaService.find(c, metadataSchema);
|
||||
if (foundSchema == null) {
|
||||
throw new MetadataImportInvalidHeadingException(clean[0],
|
||||
MetadataImportInvalidHeadingException.SCHEMA,
|
||||
columnCounter);
|
||||
}
|
||||
|
||||
// Check that the metadata element exists in the schema
|
||||
MetadataField foundField = metadataFieldService
|
||||
.findByElement(c, foundSchema, metadataElement, metadataQualifier);
|
||||
if (foundField == null) {
|
||||
throw new MetadataImportInvalidHeadingException(clean[0],
|
||||
MetadataImportInvalidHeadingException
|
||||
.ELEMENT,
|
||||
columnCounter);
|
||||
}
|
||||
// Check that the metadata element exists in the schema
|
||||
MetadataField foundField = metadataFieldService
|
||||
.findByElement(c, foundSchema, metadataElement, metadataQualifier);
|
||||
if (foundField == null) {
|
||||
throw new MetadataImportInvalidHeadingException(clean[0],
|
||||
MetadataImportInvalidHeadingException.ELEMENT,
|
||||
columnCounter);
|
||||
}
|
||||
|
||||
// Store the heading
|
||||
@@ -306,7 +297,7 @@ public class DSpaceCSV implements Serializable {
|
||||
// Specify default values
|
||||
String[] defaultValues =
|
||||
new String[] {
|
||||
"dc.date.accessioned", "dc.date.available", "dc.date.updated", "dc.description.provenance"
|
||||
"dc.date.accessioned, dc.date.available, dc.date.updated, dc.description.provenance"
|
||||
};
|
||||
String[] toIgnoreArray =
|
||||
DSpaceServicesFactory.getInstance()
|
||||
@@ -337,15 +328,15 @@ public class DSpaceCSV implements Serializable {
|
||||
/**
|
||||
* Set the value separator for multiple values stored in one csv value.
|
||||
*
|
||||
* Is set in {@code bulkedit.cfg} as {@code valueseparator}.
|
||||
* Is set in bulkedit.cfg as valueseparator
|
||||
*
|
||||
* If not set, defaults to double pipe '||'.
|
||||
* If not set, defaults to double pipe '||'
|
||||
*/
|
||||
private void setValueSeparator() {
|
||||
// Get the value separator
|
||||
valueSeparator = DSpaceServicesFactory.getInstance().getConfigurationService()
|
||||
.getProperty("bulkedit.valueseparator");
|
||||
if ((valueSeparator != null) && !valueSeparator.trim().isEmpty()) {
|
||||
if ((valueSeparator != null) && (!"".equals(valueSeparator.trim()))) {
|
||||
valueSeparator = valueSeparator.trim();
|
||||
} else {
|
||||
valueSeparator = "||";
|
||||
@@ -360,7 +351,7 @@ public class DSpaceCSV implements Serializable {
|
||||
/**
|
||||
* Set the field separator use to separate fields in the csv.
|
||||
*
|
||||
* Is set in {@code bulkedit.cfg} as {@code fieldseparator}.
|
||||
* Is set in bulkedit.cfg as fieldseparator
|
||||
*
|
||||
* If not set, defaults to comma ','.
|
||||
*
|
||||
@@ -371,7 +362,7 @@ public class DSpaceCSV implements Serializable {
|
||||
// Get the value separator
|
||||
fieldSeparator = DSpaceServicesFactory.getInstance().getConfigurationService()
|
||||
.getProperty("bulkedit.fieldseparator");
|
||||
if ((fieldSeparator != null) && !fieldSeparator.trim().isEmpty()) {
|
||||
if ((fieldSeparator != null) && (!"".equals(fieldSeparator.trim()))) {
|
||||
fieldSeparator = fieldSeparator.trim();
|
||||
if ("tab".equals(fieldSeparator)) {
|
||||
fieldSeparator = "\t";
|
||||
@@ -395,15 +386,15 @@ public class DSpaceCSV implements Serializable {
|
||||
/**
|
||||
* Set the authority separator for value with authority data.
|
||||
*
|
||||
* Is set in {@code dspace.cfg} as {@code bulkedit.authorityseparator}.
|
||||
* Is set in dspace.cfg as bulkedit.authorityseparator
|
||||
*
|
||||
* If not set, defaults to double colon '::'.
|
||||
* If not set, defaults to double colon '::'
|
||||
*/
|
||||
private void setAuthoritySeparator() {
|
||||
// Get the value separator
|
||||
authoritySeparator = DSpaceServicesFactory.getInstance().getConfigurationService()
|
||||
.getProperty("bulkedit.authorityseparator");
|
||||
if ((authoritySeparator != null) && !authoritySeparator.trim().isEmpty()) {
|
||||
if ((authoritySeparator != null) && (!"".equals(authoritySeparator.trim()))) {
|
||||
authoritySeparator = authoritySeparator.trim();
|
||||
} else {
|
||||
authoritySeparator = "::";
|
||||
@@ -508,7 +499,7 @@ public class DSpaceCSV implements Serializable {
|
||||
int i = 0;
|
||||
for (String part : bits) {
|
||||
int bitcounter = part.length() - part.replaceAll("\"", "").length();
|
||||
if (part.startsWith("\"") && (!part.endsWith("\"") || ((bitcounter & 1) == 1))) {
|
||||
if ((part.startsWith("\"")) && ((!part.endsWith("\"")) || ((bitcounter & 1) == 1))) {
|
||||
found = true;
|
||||
String add = bits.get(i) + fieldSeparator + bits.get(i + 1);
|
||||
bits.remove(i);
|
||||
@@ -524,7 +515,7 @@ public class DSpaceCSV implements Serializable {
|
||||
// Deal with quotes around the elements
|
||||
int i = 0;
|
||||
for (String part : bits) {
|
||||
if (part.startsWith("\"") && part.endsWith("\"")) {
|
||||
if ((part.startsWith("\"")) && (part.endsWith("\""))) {
|
||||
part = part.substring(1, part.length() - 1);
|
||||
bits.set(i, part);
|
||||
}
|
||||
@@ -564,7 +555,7 @@ public class DSpaceCSV implements Serializable {
|
||||
for (String part : bits) {
|
||||
if (i > 0) {
|
||||
// Is this a last empty item?
|
||||
if (last && (i == headings.size())) {
|
||||
if ((last) && (i == headings.size())) {
|
||||
part = "";
|
||||
}
|
||||
|
||||
@@ -577,7 +568,7 @@ public class DSpaceCSV implements Serializable {
|
||||
csvLine.add(headings.get(i - 1), null);
|
||||
String[] elements = part.split(escapedValueSeparator);
|
||||
for (String element : elements) {
|
||||
if ((element != null) && !element.isEmpty()) {
|
||||
if ((element != null) && (!"".equals(element))) {
|
||||
csvLine.add(headings.get(i - 1), element);
|
||||
}
|
||||
}
|
||||
@@ -623,24 +614,30 @@ public class DSpaceCSV implements Serializable {
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates and returns an InputStream from the CSV Lines in this DSpaceCSV
|
||||
* @return The InputStream created from the CSVLines in this DSpaceCSV
|
||||
* Save the CSV file to the given filename
|
||||
*
|
||||
* @param filename The filename to save the CSV file to
|
||||
* @throws IOException Thrown if an error occurs when writing the file
|
||||
*/
|
||||
public InputStream getInputStream() {
|
||||
StringBuilder stringBuilder = new StringBuilder();
|
||||
public final void save(String filename) throws IOException {
|
||||
// Save the file
|
||||
BufferedWriter out = new BufferedWriter(
|
||||
new OutputStreamWriter(
|
||||
new FileOutputStream(filename), "UTF-8"));
|
||||
for (String csvLine : getCSVLinesAsStringArray()) {
|
||||
stringBuilder.append(csvLine).append("\n");
|
||||
out.write(csvLine + "\n");
|
||||
}
|
||||
return IOUtils.toInputStream(stringBuilder.toString(), StandardCharsets.UTF_8);
|
||||
out.flush();
|
||||
out.close();
|
||||
}
|
||||
|
||||
/**
|
||||
* Is it okay to export this value? When exportAll is set to false, we don't export
|
||||
* Is it Ok to export this value? When exportAll is set to false, we don't export
|
||||
* some of the metadata elements.
|
||||
*
|
||||
* The list can be configured via the key ignore-on-export in {@code bulkedit.cfg}.
|
||||
* The list can be configured via the key ignore-on-export in bulkedit.cfg
|
||||
*
|
||||
* @param md The MetadataField to examine
|
||||
* @param md The Metadatum to examine
|
||||
* @return Whether or not it is OK to export this element
|
||||
*/
|
||||
protected boolean okToExport(MetadataField md) {
|
||||
@@ -649,8 +646,12 @@ public class DSpaceCSV implements Serializable {
|
||||
if (md.getQualifier() != null) {
|
||||
key += "." + md.getQualifier();
|
||||
}
|
||||
if (ignore.get(key) != null) {
|
||||
return false;
|
||||
}
|
||||
|
||||
// Must be OK, so don't ignore
|
||||
return ignore.get(key) == null;
|
||||
return true;
|
||||
}
|
||||
|
||||
/**
|
||||
|
@@ -1,115 +0,0 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.app.bulkedit;
|
||||
|
||||
import java.sql.SQLException;
|
||||
|
||||
import org.apache.commons.cli.ParseException;
|
||||
import org.apache.commons.lang3.ArrayUtils;
|
||||
import org.dspace.content.MetadataField;
|
||||
import org.dspace.content.factory.ContentServiceFactory;
|
||||
import org.dspace.content.service.MetadataFieldService;
|
||||
import org.dspace.content.service.MetadataValueService;
|
||||
import org.dspace.core.Context;
|
||||
import org.dspace.scripts.DSpaceRunnable;
|
||||
import org.dspace.services.ConfigurationService;
|
||||
import org.dspace.services.factory.DSpaceServicesFactory;
|
||||
import org.dspace.utils.DSpace;
|
||||
|
||||
/**
|
||||
* {@link DSpaceRunnable} implementation to delete all the values of the given
|
||||
* metadata field.
|
||||
*
|
||||
* @author Luca Giamminonni (luca.giamminonni at 4science.it)
|
||||
*
|
||||
*/
|
||||
public class MetadataDeletion extends DSpaceRunnable<MetadataDeletionScriptConfiguration<MetadataDeletion>> {
|
||||
|
||||
private MetadataValueService metadataValueService;
|
||||
|
||||
private MetadataFieldService metadataFieldService;
|
||||
|
||||
private ConfigurationService configurationService;
|
||||
|
||||
private String metadataField;
|
||||
|
||||
private boolean list;
|
||||
|
||||
@Override
|
||||
public void internalRun() throws Exception {
|
||||
|
||||
if (list) {
|
||||
listErasableMetadata();
|
||||
return;
|
||||
}
|
||||
|
||||
Context context = new Context();
|
||||
|
||||
try {
|
||||
context.turnOffAuthorisationSystem();
|
||||
performMetadataValuesDeletion(context);
|
||||
} finally {
|
||||
context.restoreAuthSystemState();
|
||||
context.complete();
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
private void listErasableMetadata() {
|
||||
String[] erasableMetadata = getErasableMetadata();
|
||||
if (ArrayUtils.isEmpty(erasableMetadata)) {
|
||||
handler.logInfo("No fields has been configured to be cleared via bulk deletion");
|
||||
} else {
|
||||
handler.logInfo("The fields that can be bulk deleted are: " + String.join(", ", erasableMetadata));
|
||||
}
|
||||
}
|
||||
|
||||
private void performMetadataValuesDeletion(Context context) throws SQLException {
|
||||
|
||||
MetadataField field = metadataFieldService.findByString(context, metadataField, '.');
|
||||
if (field == null) {
|
||||
throw new IllegalArgumentException("No metadata field found with name " + metadataField);
|
||||
}
|
||||
|
||||
if (!ArrayUtils.contains(getErasableMetadata(), metadataField)) {
|
||||
throw new IllegalArgumentException("The given metadata field cannot be bulk deleted");
|
||||
}
|
||||
|
||||
handler.logInfo(String.format("Deleting the field '%s' from all objects", metadataField));
|
||||
|
||||
metadataValueService.deleteByMetadataField(context, field);
|
||||
}
|
||||
|
||||
private String[] getErasableMetadata() {
|
||||
return configurationService.getArrayProperty("bulkedit.allow-bulk-deletion");
|
||||
}
|
||||
|
||||
@Override
|
||||
@SuppressWarnings("unchecked")
|
||||
public MetadataDeletionScriptConfiguration<MetadataDeletion> getScriptConfiguration() {
|
||||
return new DSpace().getServiceManager()
|
||||
.getServiceByName("metadata-deletion", MetadataDeletionScriptConfiguration.class);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setup() throws ParseException {
|
||||
|
||||
metadataValueService = ContentServiceFactory.getInstance().getMetadataValueService();
|
||||
metadataFieldService = ContentServiceFactory.getInstance().getMetadataFieldService();
|
||||
configurationService = DSpaceServicesFactory.getInstance().getConfigurationService();
|
||||
|
||||
metadataField = commandLine.getOptionValue('m');
|
||||
list = commandLine.hasOption('l');
|
||||
|
||||
if (!list && metadataField == null) {
|
||||
throw new ParseException("One of the following parameters is required: -m or -l");
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
}
|
@@ -1,18 +0,0 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.app.bulkedit;
|
||||
|
||||
/**
|
||||
* The {@link MetadataDeletion} for CLI.
|
||||
*
|
||||
* @author Luca Giamminonni (luca.giamminonni at 4science.it)
|
||||
*
|
||||
*/
|
||||
public class MetadataDeletionCli extends MetadataDeletion {
|
||||
|
||||
}
|
@@ -1,18 +0,0 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.app.bulkedit;
|
||||
|
||||
/**
|
||||
* Script configuration for {@link MetadataDeletionCli}.
|
||||
*
|
||||
* @author Luca Giamminonni (luca.giamminonni at 4science.it)
|
||||
*
|
||||
*/
|
||||
public class MetadataDeletionCliScriptConfiguration extends MetadataDeletionScriptConfiguration<MetadataDeletionCli> {
|
||||
|
||||
}
|
@@ -1,66 +0,0 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.app.bulkedit;
|
||||
|
||||
import java.sql.SQLException;
|
||||
|
||||
import org.apache.commons.cli.Options;
|
||||
import org.dspace.authorize.service.AuthorizeService;
|
||||
import org.dspace.core.Context;
|
||||
import org.dspace.scripts.configuration.ScriptConfiguration;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
|
||||
/**
|
||||
* The {@link ScriptConfiguration} for the {@link MetadataDeletion} script.
|
||||
*/
|
||||
public class MetadataDeletionScriptConfiguration<T extends MetadataDeletion> extends ScriptConfiguration<T> {
|
||||
|
||||
@Autowired
|
||||
private AuthorizeService authorizeService;
|
||||
|
||||
private Class<T> dspaceRunnableClass;
|
||||
|
||||
@Override
|
||||
public boolean isAllowedToExecute(Context context) {
|
||||
try {
|
||||
return authorizeService.isAdmin(context);
|
||||
} catch (SQLException e) {
|
||||
throw new RuntimeException("SQLException occurred when checking if the current user is an admin", e);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public Options getOptions() {
|
||||
if (options == null) {
|
||||
|
||||
Options options = new Options();
|
||||
|
||||
options.addOption("m", "metadata", true, "metadata field name");
|
||||
|
||||
options.addOption("l", "list", false, "lists the metadata fields that can be deleted");
|
||||
|
||||
super.options = options;
|
||||
}
|
||||
return options;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Class<T> getDspaceRunnableClass() {
|
||||
return dspaceRunnableClass;
|
||||
}
|
||||
|
||||
/**
|
||||
* Generic setter for the dspaceRunnableClass
|
||||
* @param dspaceRunnableClass The dspaceRunnableClass to be set on this MetadataDeletionScriptConfiguration
|
||||
*/
|
||||
@Override
|
||||
public void setDspaceRunnableClass(Class<T> dspaceRunnableClass) {
|
||||
this.dspaceRunnableClass = dspaceRunnableClass;
|
||||
}
|
||||
|
||||
}
|
@@ -8,115 +8,271 @@
|
||||
package org.dspace.app.bulkedit;
|
||||
|
||||
import java.sql.SQLException;
|
||||
import java.util.UUID;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
|
||||
import com.google.common.collect.Iterators;
|
||||
import org.apache.commons.cli.CommandLine;
|
||||
import org.apache.commons.cli.CommandLineParser;
|
||||
import org.apache.commons.cli.HelpFormatter;
|
||||
import org.apache.commons.cli.Options;
|
||||
import org.apache.commons.cli.ParseException;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.dspace.app.util.factory.UtilServiceFactory;
|
||||
import org.dspace.app.util.service.DSpaceObjectUtils;
|
||||
import org.apache.commons.cli.PosixParser;
|
||||
import org.dspace.content.Collection;
|
||||
import org.dspace.content.Community;
|
||||
import org.dspace.content.DSpaceObject;
|
||||
import org.dspace.content.Item;
|
||||
import org.dspace.content.factory.ContentServiceFactory;
|
||||
import org.dspace.content.service.MetadataDSpaceCsvExportService;
|
||||
import org.dspace.content.service.ItemService;
|
||||
import org.dspace.core.Constants;
|
||||
import org.dspace.core.Context;
|
||||
import org.dspace.eperson.factory.EPersonServiceFactory;
|
||||
import org.dspace.eperson.service.EPersonService;
|
||||
import org.dspace.handle.factory.HandleServiceFactory;
|
||||
import org.dspace.scripts.DSpaceRunnable;
|
||||
import org.dspace.utils.DSpace;
|
||||
|
||||
/**
|
||||
* Metadata exporter to allow the batch export of metadata into a file
|
||||
*
|
||||
* @author Stuart Lewis
|
||||
*/
|
||||
public class MetadataExport extends DSpaceRunnable<MetadataExportScriptConfiguration> {
|
||||
public class MetadataExport {
|
||||
/**
|
||||
* The items to export
|
||||
*/
|
||||
protected Iterator<Item> toExport;
|
||||
|
||||
private boolean help = false;
|
||||
private String filename = null;
|
||||
private String identifier = null;
|
||||
private boolean exportAllMetadata = false;
|
||||
private boolean exportAllItems = false;
|
||||
protected ItemService itemService;
|
||||
|
||||
private static final String EXPORT_CSV = "exportCSV";
|
||||
protected Context context;
|
||||
|
||||
private MetadataDSpaceCsvExportService metadataDSpaceCsvExportService = new DSpace().getServiceManager()
|
||||
.getServicesByType(MetadataDSpaceCsvExportService.class).get(0);
|
||||
/**
|
||||
* Whether to export all metadata, or just normally edited metadata
|
||||
*/
|
||||
protected boolean exportAll;
|
||||
|
||||
private EPersonService ePersonService = EPersonServiceFactory.getInstance().getEPersonService();
|
||||
protected MetadataExport() {
|
||||
itemService = ContentServiceFactory.getInstance().getItemService();
|
||||
}
|
||||
|
||||
private DSpaceObjectUtils dSpaceObjectUtils = UtilServiceFactory.getInstance().getDSpaceObjectUtils();
|
||||
/**
|
||||
* Set up a new metadata export
|
||||
*
|
||||
* @param c The Context
|
||||
* @param toExport The ItemIterator of items to export
|
||||
* @param exportAll whether to export all metadata or not (include handle, provenance etc)
|
||||
*/
|
||||
public MetadataExport(Context c, Iterator<Item> toExport, boolean exportAll) {
|
||||
itemService = ContentServiceFactory.getInstance().getItemService();
|
||||
|
||||
@Override
|
||||
public void internalRun() throws Exception {
|
||||
// Store the export settings
|
||||
this.toExport = toExport;
|
||||
this.exportAll = exportAll;
|
||||
this.context = c;
|
||||
}
|
||||
|
||||
/**
|
||||
* Method to export a community (and sub-communities and collections)
|
||||
*
|
||||
* @param c The Context
|
||||
* @param toExport The Community to export
|
||||
* @param exportAll whether to export all metadata or not (include handle, provenance etc)
|
||||
*/
|
||||
public MetadataExport(Context c, Community toExport, boolean exportAll) {
|
||||
itemService = ContentServiceFactory.getInstance().getItemService();
|
||||
|
||||
if (help) {
|
||||
logHelpInfo();
|
||||
printHelp();
|
||||
return;
|
||||
}
|
||||
Context context = new Context();
|
||||
context.turnOffAuthorisationSystem();
|
||||
try {
|
||||
context.setCurrentUser(ePersonService.find(context, this.getEpersonIdentifier()));
|
||||
} catch (SQLException e) {
|
||||
handler.handleException(e);
|
||||
// Try to export the community
|
||||
this.toExport = buildFromCommunity(c, toExport, 0);
|
||||
this.exportAll = exportAll;
|
||||
this.context = c;
|
||||
} catch (SQLException sqle) {
|
||||
// Something went wrong...
|
||||
System.err.println("Error running exporter:");
|
||||
sqle.printStackTrace(System.err);
|
||||
System.exit(1);
|
||||
}
|
||||
DSpaceCSV dSpaceCSV = metadataDSpaceCsvExportService
|
||||
.handleExport(context, exportAllItems, exportAllMetadata, identifier,
|
||||
handler);
|
||||
handler.writeFilestream(context, filename, dSpaceCSV.getInputStream(), EXPORT_CSV);
|
||||
context.restoreAuthSystemState();
|
||||
context.complete();
|
||||
}
|
||||
|
||||
protected void logHelpInfo() {
|
||||
handler.logInfo("\nfull export: metadata-export");
|
||||
handler.logInfo("partial export: metadata-export -i handle/UUID");
|
||||
}
|
||||
|
||||
@Override
|
||||
public MetadataExportScriptConfiguration getScriptConfiguration() {
|
||||
return new DSpace().getServiceManager().getServiceByName("metadata-export",
|
||||
MetadataExportScriptConfiguration.class);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setup() throws ParseException {
|
||||
|
||||
if (commandLine.hasOption('h')) {
|
||||
help = true;
|
||||
return;
|
||||
}
|
||||
|
||||
if (!commandLine.hasOption('i')) {
|
||||
exportAllItems = true;
|
||||
}
|
||||
identifier = commandLine.getOptionValue('i');
|
||||
filename = getFileNameForExportFile();
|
||||
|
||||
exportAllMetadata = commandLine.hasOption('a');
|
||||
|
||||
}
|
||||
|
||||
protected String getFileNameForExportFile() throws ParseException {
|
||||
Context context = new Context();
|
||||
try {
|
||||
DSpaceObject dso = null;
|
||||
if (StringUtils.isNotBlank(identifier)) {
|
||||
dso = HandleServiceFactory.getInstance().getHandleService().resolveToObject(context, identifier);
|
||||
if (dso == null) {
|
||||
dso = dSpaceObjectUtils.findDSpaceObject(context, UUID.fromString(identifier));
|
||||
}
|
||||
} else {
|
||||
dso = ContentServiceFactory.getInstance().getSiteService().findSite(context);
|
||||
/**
|
||||
* Build an array list of item ids that are in a community (include sub-communities and collections)
|
||||
*
|
||||
* @param context DSpace context
|
||||
* @param community The community to build from
|
||||
* @param indent How many spaces to use when writing out the names of items added
|
||||
* @return The list of item ids
|
||||
* @throws SQLException if database error
|
||||
*/
|
||||
protected Iterator<Item> buildFromCommunity(Context context, Community community, int indent)
|
||||
throws SQLException {
|
||||
// Add all the collections
|
||||
List<Collection> collections = community.getCollections();
|
||||
Iterator<Item> result = null;
|
||||
for (Collection collection : collections) {
|
||||
for (int i = 0; i < indent; i++) {
|
||||
System.out.print(" ");
|
||||
}
|
||||
|
||||
Iterator<Item> items = itemService.findByCollection(context, collection);
|
||||
result = addItemsToResult(result, items);
|
||||
|
||||
}
|
||||
// Add all the sub-communities
|
||||
List<Community> communities = community.getSubcommunities();
|
||||
for (Community subCommunity : communities) {
|
||||
for (int i = 0; i < indent; i++) {
|
||||
System.out.print(" ");
|
||||
}
|
||||
Iterator<Item> items = buildFromCommunity(context, subCommunity, indent + 1);
|
||||
result = addItemsToResult(result, items);
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
private Iterator<Item> addItemsToResult(Iterator<Item> result, Iterator<Item> items) {
|
||||
if (result == null) {
|
||||
result = items;
|
||||
} else {
|
||||
result = Iterators.concat(result, items);
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
* Run the export
|
||||
*
|
||||
* @return the exported CSV lines
|
||||
*/
|
||||
public DSpaceCSV export() {
|
||||
try {
|
||||
Context.Mode originalMode = context.getCurrentMode();
|
||||
context.setMode(Context.Mode.READ_ONLY);
|
||||
|
||||
// Process each item
|
||||
DSpaceCSV csv = new DSpaceCSV(exportAll);
|
||||
while (toExport.hasNext()) {
|
||||
Item item = toExport.next();
|
||||
csv.addItem(item);
|
||||
context.uncacheEntity(item);
|
||||
}
|
||||
|
||||
context.setMode(originalMode);
|
||||
// Return the results
|
||||
return csv;
|
||||
} catch (Exception e) {
|
||||
// Something went wrong...
|
||||
System.err.println("Error exporting to CSV:");
|
||||
e.printStackTrace();
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Print the help message
|
||||
*
|
||||
* @param options The command line options the user gave
|
||||
* @param exitCode the system exit code to use
|
||||
*/
|
||||
private static void printHelp(Options options, int exitCode) {
|
||||
// print the help message
|
||||
HelpFormatter myhelp = new HelpFormatter();
|
||||
myhelp.printHelp("MetadataExport\n", options);
|
||||
System.out.println("\nfull export: metadataexport -f filename");
|
||||
System.out.println("partial export: metadataexport -i handle -f filename");
|
||||
System.exit(exitCode);
|
||||
}
|
||||
|
||||
/**
|
||||
* main method to run the metadata exporter
|
||||
*
|
||||
* @param argv the command line arguments given
|
||||
* @throws Exception if error occurs
|
||||
*/
|
||||
public static void main(String[] argv) throws Exception {
|
||||
// Create an options object and populate it
|
||||
CommandLineParser parser = new PosixParser();
|
||||
|
||||
Options options = new Options();
|
||||
|
||||
options.addOption("i", "id", true, "ID or handle of thing to export (item, collection, or community)");
|
||||
options.addOption("f", "file", true, "destination where you want file written");
|
||||
options.addOption("a", "all", false,
|
||||
"include all metadata fields that are not normally changed (e.g. provenance)");
|
||||
options.addOption("h", "help", false, "help");
|
||||
|
||||
CommandLine line = null;
|
||||
|
||||
try {
|
||||
line = parser.parse(options, argv);
|
||||
} catch (ParseException pe) {
|
||||
System.err.println("Error with commands.");
|
||||
printHelp(options, 1);
|
||||
System.exit(0);
|
||||
}
|
||||
|
||||
if (line.hasOption('h')) {
|
||||
printHelp(options, 0);
|
||||
}
|
||||
|
||||
// Check a filename is given
|
||||
if (!line.hasOption('f')) {
|
||||
System.err.println("Required parameter -f missing!");
|
||||
printHelp(options, 1);
|
||||
}
|
||||
String filename = line.getOptionValue('f');
|
||||
|
||||
// Create a context
|
||||
Context c = new Context(Context.Mode.READ_ONLY);
|
||||
c.turnOffAuthorisationSystem();
|
||||
|
||||
// The things we'll export
|
||||
Iterator<Item> toExport = null;
|
||||
MetadataExport exporter = null;
|
||||
|
||||
// Export everything?
|
||||
boolean exportAll = line.hasOption('a');
|
||||
|
||||
ContentServiceFactory contentServiceFactory = ContentServiceFactory.getInstance();
|
||||
// Check we have an item OK
|
||||
ItemService itemService = contentServiceFactory.getItemService();
|
||||
if (!line.hasOption('i')) {
|
||||
System.out.println("Exporting whole repository WARNING: May take some time!");
|
||||
exporter = new MetadataExport(c, itemService.findAll(c), exportAll);
|
||||
} else {
|
||||
String handle = line.getOptionValue('i');
|
||||
DSpaceObject dso = HandleServiceFactory.getInstance().getHandleService().resolveToObject(c, handle);
|
||||
if (dso == null) {
|
||||
throw new ParseException("An identifier was given that wasn't able to be parsed to a DSpaceObject");
|
||||
System.err.println("Item '" + handle + "' does not resolve to an item in your repository!");
|
||||
printHelp(options, 1);
|
||||
}
|
||||
|
||||
if (dso.getType() == Constants.ITEM) {
|
||||
System.out.println("Exporting item '" + dso.getName() + "' (" + handle + ")");
|
||||
List<Item> item = new ArrayList<>();
|
||||
item.add((Item) dso);
|
||||
exporter = new MetadataExport(c, item.iterator(), exportAll);
|
||||
} else if (dso.getType() == Constants.COLLECTION) {
|
||||
System.out.println("Exporting collection '" + dso.getName() + "' (" + handle + ")");
|
||||
Collection collection = (Collection) dso;
|
||||
toExport = itemService.findByCollection(c, collection);
|
||||
exporter = new MetadataExport(c, toExport, exportAll);
|
||||
} else if (dso.getType() == Constants.COMMUNITY) {
|
||||
System.out.println("Exporting community '" + dso.getName() + "' (" + handle + ")");
|
||||
exporter = new MetadataExport(c, (Community) dso, exportAll);
|
||||
} else {
|
||||
System.err.println("Error identifying '" + handle + "'");
|
||||
System.exit(1);
|
||||
}
|
||||
return dso.getID().toString() + ".csv";
|
||||
} catch (SQLException e) {
|
||||
handler.handleException("Something went wrong trying to retrieve DSO for identifier: " + identifier, e);
|
||||
}
|
||||
return null;
|
||||
|
||||
// Perform the export
|
||||
DSpaceCSV csv = exporter.export();
|
||||
|
||||
// Save the files to the file
|
||||
csv.save(filename);
|
||||
|
||||
// Finish off and tidy up
|
||||
c.restoreAuthSystemState();
|
||||
c.complete();
|
||||
}
|
||||
}
|
||||
|
@@ -1,33 +0,0 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.app.bulkedit;
|
||||
|
||||
import org.apache.commons.cli.ParseException;
|
||||
|
||||
public class MetadataExportCli extends MetadataExport {
|
||||
|
||||
@Override
|
||||
protected String getFileNameForExportFile() {
|
||||
return commandLine.getOptionValue('f');
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setup() throws ParseException {
|
||||
super.setup();
|
||||
// Check a filename is given
|
||||
if (!commandLine.hasOption('f')) {
|
||||
throw new ParseException("Required parameter -f missing!");
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void logHelpInfo() {
|
||||
handler.logInfo("\nfull export: metadata-export -f filename");
|
||||
handler.logInfo("partial export: metadata-export -i handle -f filename");
|
||||
}
|
||||
}
|
@@ -1,26 +0,0 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.app.bulkedit;
|
||||
|
||||
import java.io.OutputStream;
|
||||
|
||||
import org.apache.commons.cli.Options;
|
||||
|
||||
public class MetadataExportCliScriptConfiguration extends MetadataExportScriptConfiguration<MetadataExportCli> {
|
||||
|
||||
|
||||
@Override
|
||||
public Options getOptions() {
|
||||
Options options = super.getOptions();
|
||||
options.addOption("f", "file", true, "destination where you want file written");
|
||||
options.getOption("f").setType(OutputStream .class);
|
||||
options.getOption("f").setRequired(true);
|
||||
super.options = options;
|
||||
return options;
|
||||
}
|
||||
}
|
@@ -1,67 +0,0 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.app.bulkedit;
|
||||
|
||||
import java.sql.SQLException;
|
||||
|
||||
import org.apache.commons.cli.Options;
|
||||
import org.dspace.authorize.service.AuthorizeService;
|
||||
import org.dspace.core.Context;
|
||||
import org.dspace.scripts.configuration.ScriptConfiguration;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
|
||||
/**
|
||||
* The {@link ScriptConfiguration} for the {@link MetadataExport} script
|
||||
*/
|
||||
public class MetadataExportScriptConfiguration<T extends MetadataExport> extends ScriptConfiguration<T> {
|
||||
|
||||
@Autowired
|
||||
private AuthorizeService authorizeService;
|
||||
|
||||
private Class<T> dspaceRunnableClass;
|
||||
|
||||
@Override
|
||||
public Class<T> getDspaceRunnableClass() {
|
||||
return dspaceRunnableClass;
|
||||
}
|
||||
|
||||
/**
|
||||
* Generic setter for the dspaceRunnableClass
|
||||
* @param dspaceRunnableClass The dspaceRunnableClass to be set on this MetadataExportScriptConfiguration
|
||||
*/
|
||||
@Override
|
||||
public void setDspaceRunnableClass(Class<T> dspaceRunnableClass) {
|
||||
this.dspaceRunnableClass = dspaceRunnableClass;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isAllowedToExecute(Context context) {
|
||||
try {
|
||||
return authorizeService.isAdmin(context);
|
||||
} catch (SQLException e) {
|
||||
throw new RuntimeException("SQLException occurred when checking if the current user is an admin", e);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public Options getOptions() {
|
||||
if (options == null) {
|
||||
Options options = new Options();
|
||||
|
||||
options.addOption("i", "id", true, "ID or handle of thing to export (item, collection, or community)");
|
||||
options.addOption("a", "all", false,
|
||||
"include all metadata fields that are not normally changed (e.g. provenance)");
|
||||
options.addOption("h", "help", false, "help");
|
||||
|
||||
|
||||
super.options = options;
|
||||
}
|
||||
return options;
|
||||
}
|
||||
|
||||
}
|
@@ -1,170 +0,0 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
|
||||
package org.dspace.app.bulkedit;
|
||||
|
||||
import java.sql.SQLException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
import java.util.UUID;
|
||||
|
||||
import org.apache.commons.cli.ParseException;
|
||||
import org.dspace.content.Item;
|
||||
import org.dspace.content.MetadataDSpaceCsvExportServiceImpl;
|
||||
import org.dspace.content.factory.ContentServiceFactory;
|
||||
import org.dspace.content.service.CollectionService;
|
||||
import org.dspace.content.service.CommunityService;
|
||||
import org.dspace.content.service.MetadataDSpaceCsvExportService;
|
||||
import org.dspace.core.Context;
|
||||
import org.dspace.discovery.DiscoverQuery;
|
||||
import org.dspace.discovery.IndexableObject;
|
||||
import org.dspace.discovery.SearchService;
|
||||
import org.dspace.discovery.SearchUtils;
|
||||
import org.dspace.discovery.configuration.DiscoveryConfiguration;
|
||||
import org.dspace.discovery.configuration.DiscoveryConfigurationService;
|
||||
import org.dspace.discovery.indexobject.IndexableCollection;
|
||||
import org.dspace.discovery.indexobject.IndexableCommunity;
|
||||
import org.dspace.discovery.utils.DiscoverQueryBuilder;
|
||||
import org.dspace.discovery.utils.parameter.QueryBuilderSearchFilter;
|
||||
import org.dspace.eperson.factory.EPersonServiceFactory;
|
||||
import org.dspace.eperson.service.EPersonService;
|
||||
import org.dspace.scripts.DSpaceRunnable;
|
||||
import org.dspace.sort.SortOption;
|
||||
import org.dspace.utils.DSpace;
|
||||
|
||||
/**
|
||||
* Metadata exporter to allow the batch export of metadata from a discovery search into a file
|
||||
*
|
||||
*/
|
||||
public class MetadataExportSearch extends DSpaceRunnable<MetadataExportSearchScriptConfiguration> {
|
||||
private static final String EXPORT_CSV = "exportCSV";
|
||||
private boolean help = false;
|
||||
private String identifier;
|
||||
private String discoveryConfigName;
|
||||
private String[] filterQueryStrings;
|
||||
private boolean hasScope = false;
|
||||
private String query;
|
||||
|
||||
private SearchService searchService;
|
||||
private MetadataDSpaceCsvExportService metadataDSpaceCsvExportService;
|
||||
private EPersonService ePersonService;
|
||||
private DiscoveryConfigurationService discoveryConfigurationService;
|
||||
private CommunityService communityService;
|
||||
private CollectionService collectionService;
|
||||
private DiscoverQueryBuilder queryBuilder;
|
||||
|
||||
@Override
|
||||
public MetadataExportSearchScriptConfiguration getScriptConfiguration() {
|
||||
return new DSpace().getServiceManager()
|
||||
.getServiceByName("metadata-export-search", MetadataExportSearchScriptConfiguration.class);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setup() throws ParseException {
|
||||
searchService = SearchUtils.getSearchService();
|
||||
metadataDSpaceCsvExportService = new DSpace().getServiceManager()
|
||||
.getServiceByName(
|
||||
MetadataDSpaceCsvExportServiceImpl.class.getCanonicalName(),
|
||||
MetadataDSpaceCsvExportService.class
|
||||
);
|
||||
ePersonService = EPersonServiceFactory.getInstance().getEPersonService();
|
||||
discoveryConfigurationService = SearchUtils.getConfigurationService();
|
||||
communityService = ContentServiceFactory.getInstance().getCommunityService();
|
||||
collectionService = ContentServiceFactory.getInstance().getCollectionService();
|
||||
queryBuilder = SearchUtils.getQueryBuilder();
|
||||
|
||||
if (commandLine.hasOption('h')) {
|
||||
help = true;
|
||||
return;
|
||||
}
|
||||
|
||||
if (commandLine.hasOption('q')) {
|
||||
query = commandLine.getOptionValue('q');
|
||||
}
|
||||
|
||||
if (commandLine.hasOption('s')) {
|
||||
hasScope = true;
|
||||
identifier = commandLine.getOptionValue('s');
|
||||
}
|
||||
|
||||
if (commandLine.hasOption('c')) {
|
||||
discoveryConfigName = commandLine.getOptionValue('c');
|
||||
}
|
||||
|
||||
if (commandLine.hasOption('f')) {
|
||||
filterQueryStrings = commandLine.getOptionValues('f');
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void internalRun() throws Exception {
|
||||
if (help) {
|
||||
loghelpinfo();
|
||||
printHelp();
|
||||
return;
|
||||
}
|
||||
handler.logDebug("starting search export");
|
||||
|
||||
IndexableObject dso = null;
|
||||
Context context = new Context();
|
||||
context.setCurrentUser(ePersonService.find(context, this.getEpersonIdentifier()));
|
||||
|
||||
if (hasScope) {
|
||||
dso = resolveScope(context, identifier);
|
||||
}
|
||||
|
||||
DiscoveryConfiguration discoveryConfiguration =
|
||||
discoveryConfigurationService.getDiscoveryConfiguration(discoveryConfigName);
|
||||
|
||||
List<QueryBuilderSearchFilter> queryBuilderSearchFilters = new ArrayList<>();
|
||||
|
||||
handler.logDebug("processing filter queries");
|
||||
if (filterQueryStrings != null) {
|
||||
for (String filterQueryString: filterQueryStrings) {
|
||||
String field = filterQueryString.split(",", 2)[0];
|
||||
String operator = filterQueryString.split("(,|=)", 3)[1];
|
||||
String value = filterQueryString.split("=", 2)[1];
|
||||
QueryBuilderSearchFilter queryBuilderSearchFilter =
|
||||
new QueryBuilderSearchFilter(field, operator, value);
|
||||
queryBuilderSearchFilters.add(queryBuilderSearchFilter);
|
||||
}
|
||||
}
|
||||
handler.logDebug("building query");
|
||||
DiscoverQuery discoverQuery =
|
||||
queryBuilder.buildQuery(context, dso, discoveryConfiguration, query, queryBuilderSearchFilters,
|
||||
"Item", 10, Long.getLong("0"), null, SortOption.DESCENDING);
|
||||
handler.logDebug("creating iterator");
|
||||
|
||||
Iterator<Item> itemIterator = searchService.iteratorSearch(context, dso, discoverQuery);
|
||||
handler.logDebug("creating dspacecsv");
|
||||
DSpaceCSV dSpaceCSV = metadataDSpaceCsvExportService.export(context, itemIterator, true);
|
||||
handler.logDebug("writing to file " + getFileNameOrExportFile());
|
||||
handler.writeFilestream(context, getFileNameOrExportFile(), dSpaceCSV.getInputStream(), EXPORT_CSV);
|
||||
context.restoreAuthSystemState();
|
||||
context.complete();
|
||||
|
||||
}
|
||||
|
||||
protected void loghelpinfo() {
|
||||
handler.logInfo("metadata-export");
|
||||
}
|
||||
|
||||
protected String getFileNameOrExportFile() {
|
||||
return "metadataExportSearch.csv";
|
||||
}
|
||||
|
||||
public IndexableObject resolveScope(Context context, String id) throws SQLException {
|
||||
UUID uuid = UUID.fromString(id);
|
||||
IndexableObject scopeObj = new IndexableCommunity(communityService.find(context, uuid));
|
||||
if (scopeObj.getIndexedObject() == null) {
|
||||
scopeObj = new IndexableCollection(collectionService.find(context, uuid));
|
||||
}
|
||||
return scopeObj;
|
||||
}
|
||||
}
|
@@ -1,20 +0,0 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
|
||||
package org.dspace.app.bulkedit;
|
||||
|
||||
/**
|
||||
* The cli version of the {@link MetadataExportSearch} script
|
||||
*/
|
||||
public class MetadataExportSearchCli extends MetadataExportSearch {
|
||||
|
||||
@Override
|
||||
protected String getFileNameOrExportFile() {
|
||||
return commandLine.getOptionValue('n');
|
||||
}
|
||||
}
|
@@ -1,26 +0,0 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
|
||||
package org.dspace.app.bulkedit;
|
||||
|
||||
import org.apache.commons.cli.Options;
|
||||
|
||||
/**
|
||||
* This is the CLI version of the {@link MetadataExportSearchScriptConfiguration} class that handles the
|
||||
* configuration for the {@link MetadataExportSearchCli} script
|
||||
*/
|
||||
public class MetadataExportSearchCliScriptConfiguration
|
||||
extends MetadataExportSearchScriptConfiguration<MetadataExportSearchCli> {
|
||||
|
||||
@Override
|
||||
public Options getOptions() {
|
||||
Options options = super.getOptions();
|
||||
options.addOption("n", "filename", true, "the filename to export to");
|
||||
return super.getOptions();
|
||||
}
|
||||
}
|
@@ -1,62 +0,0 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
|
||||
package org.dspace.app.bulkedit;
|
||||
|
||||
import org.apache.commons.cli.Options;
|
||||
import org.dspace.core.Context;
|
||||
import org.dspace.scripts.configuration.ScriptConfiguration;
|
||||
|
||||
/**
|
||||
* The {@link ScriptConfiguration} for the {@link MetadataExportSearch} script
|
||||
*/
|
||||
public class MetadataExportSearchScriptConfiguration<T extends MetadataExportSearch> extends ScriptConfiguration<T> {
|
||||
|
||||
private Class<T> dspaceRunnableclass;
|
||||
|
||||
@Override
|
||||
public Class<T> getDspaceRunnableClass() {
|
||||
return dspaceRunnableclass;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setDspaceRunnableClass(Class<T> dspaceRunnableClass) {
|
||||
this.dspaceRunnableclass = dspaceRunnableClass;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isAllowedToExecute(Context context) {
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Options getOptions() {
|
||||
if (options == null) {
|
||||
Options options = new Options();
|
||||
options.addOption("q", "query", true,
|
||||
"The discovery search string to will be used to match records. Not URL encoded");
|
||||
options.getOption("q").setType(String.class);
|
||||
options.addOption("s", "scope", true,
|
||||
"UUID of a specific DSpace container (site, community or collection) to which the search has to be " +
|
||||
"limited");
|
||||
options.getOption("s").setType(String.class);
|
||||
options.addOption("c", "configuration", true,
|
||||
"The name of a Discovery configuration that should be used by this search");
|
||||
options.getOption("c").setType(String.class);
|
||||
options.addOption("f", "filter", true,
|
||||
"Advanced search filter that has to be used to filter the result set, with syntax `<:filter-name>," +
|
||||
"<:filter-operator>=<:filter-value>`. Not URL encoded. For example `author," +
|
||||
"authority=5df05073-3be7-410d-8166-e254369e4166` or `title,contains=sample text`");
|
||||
options.getOption("f").setType(String.class);
|
||||
options.addOption("h", "help", false, "help");
|
||||
|
||||
super.options = options;
|
||||
}
|
||||
return options;
|
||||
}
|
||||
}
|
File diff suppressed because it is too large
Load Diff
@@ -1,68 +0,0 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.app.bulkedit;
|
||||
|
||||
import java.io.BufferedReader;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStreamReader;
|
||||
import java.util.UUID;
|
||||
|
||||
import org.apache.commons.cli.ParseException;
|
||||
import org.dspace.core.Context;
|
||||
import org.dspace.eperson.EPerson;
|
||||
import org.dspace.eperson.factory.EPersonServiceFactory;
|
||||
import org.dspace.scripts.handler.DSpaceRunnableHandler;
|
||||
|
||||
/**
|
||||
* CLI variant for the {@link MetadataImport} class
|
||||
* This has been made so that we can specify the behaviour of the determineChanges method to be specific for the CLI
|
||||
*/
|
||||
public class MetadataImportCLI extends MetadataImport {
|
||||
|
||||
@Override
|
||||
protected boolean determineChange(DSpaceRunnableHandler handler) throws IOException {
|
||||
handler.logInfo("Do you want to make these changes? [y/n] ");
|
||||
try (BufferedReader bufferedReader = new BufferedReader(new InputStreamReader(System.in))) {
|
||||
String yn = bufferedReader.readLine();
|
||||
if ("y".equalsIgnoreCase(yn)) {
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void assignCurrentUserInContext(Context context) throws ParseException {
|
||||
try {
|
||||
if (commandLine.hasOption('e')) {
|
||||
EPerson eperson;
|
||||
String e = commandLine.getOptionValue('e');
|
||||
if (e.indexOf('@') != -1) {
|
||||
eperson = EPersonServiceFactory.getInstance().getEPersonService().findByEmail(context, e);
|
||||
} else {
|
||||
eperson = EPersonServiceFactory.getInstance().getEPersonService().find(context, UUID.fromString(e));
|
||||
}
|
||||
|
||||
if (eperson == null) {
|
||||
throw new ParseException("Error, eperson cannot be found: " + e);
|
||||
}
|
||||
context.setCurrentUser(eperson);
|
||||
}
|
||||
} catch (Exception e) {
|
||||
throw new ParseException("Unable to find DSpace user: " + e.getMessage());
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setup() throws ParseException {
|
||||
super.setup();
|
||||
if (!commandLine.hasOption('e')) {
|
||||
throw new ParseException("Required parameter -e missing!");
|
||||
}
|
||||
}
|
||||
}
|
@@ -1,26 +0,0 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.app.bulkedit;
|
||||
|
||||
import org.apache.commons.cli.Options;
|
||||
import org.dspace.scripts.configuration.ScriptConfiguration;
|
||||
|
||||
/**
|
||||
* The {@link ScriptConfiguration} for the {@link org.dspace.app.bulkedit.MetadataImportCLI} CLI script
|
||||
*/
|
||||
public class MetadataImportCliScriptConfiguration extends MetadataImportScriptConfiguration<MetadataImportCLI> {
|
||||
|
||||
@Override
|
||||
public Options getOptions() {
|
||||
Options options = super.getOptions();
|
||||
options.addOption("e", "email", true, "email address or user id of user (required if adding new items)");
|
||||
options.getOption("e").setRequired(true);
|
||||
super.options = options;
|
||||
return options;
|
||||
}
|
||||
}
|
@@ -1,75 +0,0 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.app.bulkedit;
|
||||
|
||||
import java.io.InputStream;
|
||||
import java.sql.SQLException;
|
||||
|
||||
import org.apache.commons.cli.Options;
|
||||
import org.dspace.authorize.service.AuthorizeService;
|
||||
import org.dspace.core.Context;
|
||||
import org.dspace.scripts.configuration.ScriptConfiguration;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
|
||||
/**
|
||||
* The {@link ScriptConfiguration} for the {@link MetadataImport} script
|
||||
*/
|
||||
public class MetadataImportScriptConfiguration<T extends MetadataImport> extends ScriptConfiguration<T> {
|
||||
|
||||
@Autowired
|
||||
private AuthorizeService authorizeService;
|
||||
|
||||
private Class<T> dspaceRunnableClass;
|
||||
|
||||
@Override
|
||||
public Class<T> getDspaceRunnableClass() {
|
||||
return dspaceRunnableClass;
|
||||
}
|
||||
|
||||
/**
|
||||
* Generic setter for the dspaceRunnableClass
|
||||
* @param dspaceRunnableClass The dspaceRunnableClass to be set on this MetadataImportScriptConfiguration
|
||||
*/
|
||||
@Override
|
||||
public void setDspaceRunnableClass(Class<T> dspaceRunnableClass) {
|
||||
this.dspaceRunnableClass = dspaceRunnableClass;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isAllowedToExecute(Context context) {
|
||||
try {
|
||||
return authorizeService.isAdmin(context);
|
||||
} catch (SQLException e) {
|
||||
throw new RuntimeException("SQLException occurred when checking if the current user is an admin", e);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public Options getOptions() {
|
||||
if (options == null) {
|
||||
Options options = new Options();
|
||||
|
||||
options.addOption("f", "file", true, "source file");
|
||||
options.getOption("f").setType(InputStream.class);
|
||||
options.getOption("f").setRequired(true);
|
||||
options.addOption("s", "silent", false,
|
||||
"silent operation - doesn't request confirmation of changes USE WITH CAUTION");
|
||||
options.addOption("w", "workflow", false, "workflow - when adding new items, use collection workflow");
|
||||
options.addOption("n", "notify", false,
|
||||
"notify - when adding new items using a workflow, send notification emails");
|
||||
options.addOption("v", "validate-only", false,
|
||||
"validate - just validate the csv, don't run the import");
|
||||
options.addOption("t", "template", false,
|
||||
"template - when adding new items, use the collection template (if it exists)");
|
||||
options.addOption("h", "help", false, "help");
|
||||
|
||||
super.options = options;
|
||||
}
|
||||
return options;
|
||||
}
|
||||
}
|
@@ -17,13 +17,13 @@ import java.util.UUID;
|
||||
|
||||
import org.apache.commons.cli.CommandLine;
|
||||
import org.apache.commons.cli.CommandLineParser;
|
||||
import org.apache.commons.cli.DefaultParser;
|
||||
import org.apache.commons.cli.HelpFormatter;
|
||||
import org.apache.commons.cli.Option;
|
||||
import org.apache.commons.cli.OptionBuilder;
|
||||
import org.apache.commons.cli.Options;
|
||||
import org.apache.commons.cli.ParseException;
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.apache.commons.cli.PosixParser;
|
||||
import org.apache.log4j.Logger;
|
||||
import org.dspace.checker.BitstreamDispatcher;
|
||||
import org.dspace.checker.CheckerCommand;
|
||||
import org.dspace.checker.HandleDispatcher;
|
||||
@@ -48,7 +48,7 @@ import org.dspace.core.Utils;
|
||||
* @author Nathan Sarr
|
||||
*/
|
||||
public final class ChecksumChecker {
|
||||
private static final Logger LOG = LogManager.getLogger(ChecksumChecker.class);
|
||||
private static final Logger LOG = Logger.getLogger(ChecksumChecker.class);
|
||||
|
||||
private static final BitstreamService bitstreamService = ContentServiceFactory.getInstance().getBitstreamService();
|
||||
|
||||
@@ -86,7 +86,7 @@ public final class ChecksumChecker {
|
||||
*/
|
||||
public static void main(String[] args) throws SQLException {
|
||||
// set up command line parser
|
||||
CommandLineParser parser = new DefaultParser();
|
||||
CommandLineParser parser = new PosixParser();
|
||||
CommandLine line = null;
|
||||
|
||||
// create an options object and populate it
|
||||
@@ -101,21 +101,19 @@ public final class ChecksumChecker {
|
||||
options.addOption("a", "handle", true, "Specify a handle to check");
|
||||
options.addOption("v", "verbose", false, "Report all processing");
|
||||
|
||||
Option option;
|
||||
OptionBuilder.withArgName("bitstream-ids").hasArgs().withDescription(
|
||||
"Space separated list of bitstream ids");
|
||||
Option useBitstreamIds = OptionBuilder.create('b');
|
||||
|
||||
option = Option.builder("b")
|
||||
.longOpt("bitstream-ids")
|
||||
.hasArgs()
|
||||
.desc("Space separated list of bitstream ids")
|
||||
.build();
|
||||
options.addOption(option);
|
||||
options.addOption(useBitstreamIds);
|
||||
|
||||
option = Option.builder("p")
|
||||
.longOpt("prune")
|
||||
.optionalArg(true)
|
||||
.desc("Prune old results (optionally using specified properties file for configuration)")
|
||||
.build();
|
||||
options.addOption(option);
|
||||
options.addOption("p", "prune", false, "Prune configuration file");
|
||||
options.addOption(OptionBuilder
|
||||
.withArgName("prune")
|
||||
.hasOptionalArgs(1)
|
||||
.withDescription(
|
||||
"Prune old results (optionally using specified properties file for configuration)")
|
||||
.create('p'));
|
||||
|
||||
try {
|
||||
line = parser.parse(options, args);
|
||||
|
@@ -1,32 +0,0 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.app.exception;
|
||||
|
||||
/**
|
||||
* This class provides an exception to be used when trying to save a resource
|
||||
* that already exists.
|
||||
*
|
||||
* @author Luca Giamminonni (luca.giamminonni at 4science.it)
|
||||
*
|
||||
*/
|
||||
public class ResourceAlreadyExistsException extends RuntimeException {
|
||||
|
||||
private static final long serialVersionUID = 1L;
|
||||
|
||||
/**
|
||||
* Create a ResourceAlreadyExistsException with a message and the already
|
||||
* existing resource.
|
||||
*
|
||||
* @param message the error message
|
||||
*/
|
||||
public ResourceAlreadyExistsException(String message) {
|
||||
super(message);
|
||||
}
|
||||
|
||||
|
||||
}
|
@@ -13,8 +13,11 @@ import java.util.Iterator;
|
||||
import java.util.List;
|
||||
import java.util.UUID;
|
||||
|
||||
import org.apache.commons.cli.ParseException;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.apache.commons.cli.CommandLine;
|
||||
import org.apache.commons.cli.CommandLineParser;
|
||||
import org.apache.commons.cli.HelpFormatter;
|
||||
import org.apache.commons.cli.Options;
|
||||
import org.apache.commons.cli.PosixParser;
|
||||
import org.dspace.authorize.AuthorizeException;
|
||||
import org.dspace.content.Collection;
|
||||
import org.dspace.content.DSpaceObject;
|
||||
@@ -33,223 +36,224 @@ import org.dspace.harvest.HarvestingException;
|
||||
import org.dspace.harvest.OAIHarvester;
|
||||
import org.dspace.harvest.factory.HarvestServiceFactory;
|
||||
import org.dspace.harvest.service.HarvestedCollectionService;
|
||||
import org.dspace.scripts.DSpaceRunnable;
|
||||
import org.dspace.utils.DSpace;
|
||||
|
||||
/**
|
||||
* Test class for harvested collections.
|
||||
*
|
||||
* @author Alexey Maslov
|
||||
*/
|
||||
public class Harvest extends DSpaceRunnable<HarvestScriptConfiguration> {
|
||||
public class Harvest {
|
||||
private static Context context;
|
||||
|
||||
private HarvestedCollectionService harvestedCollectionService;
|
||||
protected EPersonService ePersonService;
|
||||
private CollectionService collectionService;
|
||||
private static final HarvestedCollectionService harvestedCollectionService =
|
||||
HarvestServiceFactory.getInstance().getHarvestedCollectionService();
|
||||
private static final EPersonService ePersonService = EPersonServiceFactory.getInstance().getEPersonService();
|
||||
private static final CollectionService collectionService =
|
||||
ContentServiceFactory.getInstance().getCollectionService();
|
||||
|
||||
private boolean help;
|
||||
private String command = null;
|
||||
private String collection = null;
|
||||
private String oaiSource = null;
|
||||
private String oaiSetID = null;
|
||||
private String metadataKey = null;
|
||||
private int harvestType = 0;
|
||||
public static void main(String[] argv) throws Exception {
|
||||
// create an options object and populate it
|
||||
CommandLineParser parser = new PosixParser();
|
||||
|
||||
protected Context context;
|
||||
Options options = new Options();
|
||||
|
||||
options.addOption("p", "purge", false, "delete all items in the collection");
|
||||
options.addOption("r", "run", false, "run the standard harvest procedure");
|
||||
options.addOption("g", "ping", false, "test the OAI server and set");
|
||||
options.addOption("o", "once", false, "run the harvest procedure with specified parameters");
|
||||
options.addOption("s", "setup", false, "Set the collection up for harvesting");
|
||||
options.addOption("S", "start", false, "start the harvest loop");
|
||||
options.addOption("R", "reset", false, "reset harvest status on all collections");
|
||||
options.addOption("P", "purge", false, "purge all harvestable collections");
|
||||
|
||||
|
||||
public HarvestScriptConfiguration getScriptConfiguration() {
|
||||
return new DSpace().getServiceManager()
|
||||
.getServiceByName("harvest", HarvestScriptConfiguration.class);
|
||||
}
|
||||
options.addOption("e", "eperson", true,
|
||||
"eperson");
|
||||
options.addOption("c", "collection", true,
|
||||
"harvesting collection (handle or id)");
|
||||
options.addOption("t", "type", true,
|
||||
"type of harvesting (0 for none)");
|
||||
options.addOption("a", "address", true,
|
||||
"address of the OAI-PMH server");
|
||||
options.addOption("i", "oai_set_id", true,
|
||||
"id of the PMH set representing the harvested collection");
|
||||
options.addOption("m", "metadata_format", true,
|
||||
"the name of the desired metadata format for harvesting, resolved to namespace and " +
|
||||
"crosswalk in dspace.cfg");
|
||||
|
||||
public void setup() throws ParseException {
|
||||
harvestedCollectionService =
|
||||
HarvestServiceFactory.getInstance().getHarvestedCollectionService();
|
||||
ePersonService = EPersonServiceFactory.getInstance().getEPersonService();
|
||||
collectionService =
|
||||
ContentServiceFactory.getInstance().getCollectionService();
|
||||
options.addOption("h", "help", false, "help");
|
||||
|
||||
assignCurrentUserInContext();
|
||||
CommandLine line = parser.parse(options, argv);
|
||||
|
||||
help = commandLine.hasOption('h');
|
||||
String command = null;
|
||||
String eperson = null;
|
||||
String collection = null;
|
||||
String oaiSource = null;
|
||||
String oaiSetID = null;
|
||||
String metadataKey = null;
|
||||
int harvestType = 0;
|
||||
|
||||
if (line.hasOption('h')) {
|
||||
HelpFormatter myhelp = new HelpFormatter();
|
||||
myhelp.printHelp("Harvest\n", options);
|
||||
System.out.println("\nPING OAI server: Harvest -g -a oai_source -i oai_set_id");
|
||||
System.out.println(
|
||||
"RUNONCE harvest with arbitrary options: Harvest -o -e eperson -c collection -t harvest_type -a " +
|
||||
"oai_source -i oai_set_id -m metadata_format");
|
||||
System.out.println(
|
||||
"SETUP a collection for harvesting: Harvest -s -c collection -t harvest_type -a oai_source -i " +
|
||||
"oai_set_id -m metadata_format");
|
||||
System.out.println("RUN harvest once: Harvest -r -e eperson -c collection");
|
||||
System.out.println("START harvest scheduler: Harvest -S");
|
||||
System.out.println("RESET all harvest status: Harvest -R");
|
||||
System.out.println("PURGE a collection of items and settings: Harvest -p -e eperson -c collection");
|
||||
System.out.println("PURGE all harvestable collections: Harvest -P -e eperson");
|
||||
|
||||
|
||||
if (commandLine.hasOption('s')) {
|
||||
System.exit(0);
|
||||
}
|
||||
|
||||
if (line.hasOption('s')) {
|
||||
command = "config";
|
||||
}
|
||||
if (commandLine.hasOption('p')) {
|
||||
if (line.hasOption('p')) {
|
||||
command = "purge";
|
||||
}
|
||||
if (commandLine.hasOption('r')) {
|
||||
if (line.hasOption('r')) {
|
||||
command = "run";
|
||||
}
|
||||
if (commandLine.hasOption('g')) {
|
||||
if (line.hasOption('g')) {
|
||||
command = "ping";
|
||||
}
|
||||
if (commandLine.hasOption('S')) {
|
||||
if (line.hasOption('o')) {
|
||||
command = "runOnce";
|
||||
}
|
||||
if (line.hasOption('S')) {
|
||||
command = "start";
|
||||
}
|
||||
if (commandLine.hasOption('R')) {
|
||||
if (line.hasOption('R')) {
|
||||
command = "reset";
|
||||
}
|
||||
if (commandLine.hasOption('P')) {
|
||||
if (line.hasOption('P')) {
|
||||
command = "purgeAll";
|
||||
}
|
||||
if (commandLine.hasOption('o')) {
|
||||
command = "reimport";
|
||||
|
||||
|
||||
if (line.hasOption('e')) {
|
||||
eperson = line.getOptionValue('e');
|
||||
}
|
||||
if (commandLine.hasOption('c')) {
|
||||
collection = commandLine.getOptionValue('c');
|
||||
if (line.hasOption('c')) {
|
||||
collection = line.getOptionValue('c');
|
||||
}
|
||||
if (commandLine.hasOption('t')) {
|
||||
harvestType = Integer.parseInt(commandLine.getOptionValue('t'));
|
||||
if (line.hasOption('t')) {
|
||||
harvestType = Integer.parseInt(line.getOptionValue('t'));
|
||||
} else {
|
||||
harvestType = 0;
|
||||
}
|
||||
if (commandLine.hasOption('a')) {
|
||||
oaiSource = commandLine.getOptionValue('a');
|
||||
if (line.hasOption('a')) {
|
||||
oaiSource = line.getOptionValue('a');
|
||||
}
|
||||
if (commandLine.hasOption('i')) {
|
||||
oaiSetID = commandLine.getOptionValue('i');
|
||||
if (line.hasOption('i')) {
|
||||
oaiSetID = line.getOptionValue('i');
|
||||
}
|
||||
if (commandLine.hasOption('m')) {
|
||||
metadataKey = commandLine.getOptionValue('m');
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* This method will assign the currentUser to the {@link Context} variable which is also created in this method.
|
||||
* The instance of the method in this class will fetch the EPersonIdentifier from this class, this identifier
|
||||
* was given to this class upon instantiation, it'll then be used to find the {@link EPerson} associated with it
|
||||
* and this {@link EPerson} will be set as the currentUser of the created {@link Context}
|
||||
* @throws ParseException If something went wrong with the retrieval of the EPerson Identifier
|
||||
*/
|
||||
protected void assignCurrentUserInContext() throws ParseException {
|
||||
UUID currentUserUuid = this.getEpersonIdentifier();
|
||||
try {
|
||||
this.context = new Context(Context.Mode.BATCH_EDIT);
|
||||
EPerson eperson = ePersonService.find(context, currentUserUuid);
|
||||
if (eperson == null) {
|
||||
super.handler.logError("EPerson not found: " + currentUserUuid);
|
||||
throw new IllegalArgumentException("Unable to find a user with uuid: " + currentUserUuid);
|
||||
}
|
||||
this.context.setCurrentUser(eperson);
|
||||
} catch (SQLException e) {
|
||||
handler.handleException("Something went wrong trying to fetch eperson for uuid: " + currentUserUuid, e);
|
||||
}
|
||||
}
|
||||
|
||||
public void internalRun() throws Exception {
|
||||
if (help) {
|
||||
printHelp();
|
||||
handler.logInfo("PING OAI server: Harvest -g -a oai_source -i oai_set_id");
|
||||
handler.logInfo(
|
||||
"SETUP a collection for harvesting: Harvest -s -c collection -t harvest_type -a oai_source -i " +
|
||||
"oai_set_id -m metadata_format");
|
||||
handler.logInfo("RUN harvest once: Harvest -r -e eperson -c collection");
|
||||
handler.logInfo("START harvest scheduler: Harvest -S");
|
||||
handler.logInfo("RESET all harvest status: Harvest -R");
|
||||
handler.logInfo("PURGE a collection of items and settings: Harvest -p -e eperson -c collection");
|
||||
handler.logInfo("PURGE all harvestable collections: Harvest -P -e eperson");
|
||||
|
||||
return;
|
||||
if (line.hasOption('m')) {
|
||||
metadataKey = line.getOptionValue('m');
|
||||
}
|
||||
|
||||
if (StringUtils.isBlank(command)) {
|
||||
handler.logError("No parameters specified (run with -h flag for details)");
|
||||
throw new UnsupportedOperationException("No command specified");
|
||||
|
||||
// Instantiate our class
|
||||
Harvest harvester = new Harvest();
|
||||
harvester.context = new Context(Context.Mode.BATCH_EDIT);
|
||||
|
||||
|
||||
// Check our options
|
||||
if (command == null) {
|
||||
System.out
|
||||
.println("Error - no parameters specified (run with -h flag for details)");
|
||||
System.exit(1);
|
||||
} else if ("run".equals(command)) {
|
||||
// Run a single harvest cycle on a collection using saved settings.
|
||||
if (collection == null || context.getCurrentUser() == null) {
|
||||
handler.logError("A target collection and eperson must be provided (run with -h flag for details)");
|
||||
throw new UnsupportedOperationException("A target collection and eperson must be provided");
|
||||
if (collection == null || eperson == null) {
|
||||
System.out
|
||||
.println("Error - a target collection and eperson must be provided");
|
||||
System.out.println(" (run with -h flag for details)");
|
||||
System.exit(1);
|
||||
}
|
||||
runHarvest(context, collection);
|
||||
|
||||
harvester.runHarvest(collection, eperson);
|
||||
} else if ("start".equals(command)) {
|
||||
// start the harvest loop
|
||||
startHarvester();
|
||||
} else if ("reset".equals(command)) {
|
||||
// reset harvesting status
|
||||
resetHarvesting(context);
|
||||
resetHarvesting();
|
||||
} else if ("purgeAll".equals(command)) {
|
||||
// purge all collections that are set up for harvesting (obviously for testing purposes only)
|
||||
if (context.getCurrentUser() == null) {
|
||||
handler.logError("An eperson must be provided (run with -h flag for details)");
|
||||
throw new UnsupportedOperationException("An eperson must be provided");
|
||||
if (eperson == null) {
|
||||
System.out
|
||||
.println("Error - an eperson must be provided");
|
||||
System.out.println(" (run with -h flag for details)");
|
||||
System.exit(1);
|
||||
}
|
||||
|
||||
List<HarvestedCollection> harvestedCollections = harvestedCollectionService.findAll(context);
|
||||
for (HarvestedCollection harvestedCollection : harvestedCollections) {
|
||||
handler.logInfo(
|
||||
"Purging the following collections (deleting items and resetting harvest status): " +
|
||||
harvestedCollection
|
||||
.getCollection().getID().toString());
|
||||
purgeCollection(context, harvestedCollection.getCollection().getID().toString());
|
||||
System.out.println(
|
||||
"Purging the following collections (deleting items and resetting harvest status): " +
|
||||
harvestedCollection
|
||||
.getCollection().getID().toString());
|
||||
harvester.purgeCollection(harvestedCollection.getCollection().getID().toString(), eperson);
|
||||
}
|
||||
context.complete();
|
||||
} else if ("purge".equals(command)) {
|
||||
// Delete all items in a collection. Useful for testing fresh harvests.
|
||||
if (collection == null || context.getCurrentUser() == null) {
|
||||
handler.logError("A target collection and eperson must be provided (run with -h flag for details)");
|
||||
throw new UnsupportedOperationException("A target collection and eperson must be provided");
|
||||
if (collection == null || eperson == null) {
|
||||
System.out
|
||||
.println("Error - a target collection and eperson must be provided");
|
||||
System.out.println(" (run with -h flag for details)");
|
||||
System.exit(1);
|
||||
}
|
||||
|
||||
purgeCollection(context, collection);
|
||||
context.complete();
|
||||
|
||||
} else if ("reimport".equals(command)) {
|
||||
// Delete all items in a collection. Useful for testing fresh harvests.
|
||||
if (collection == null || context.getCurrentUser() == null) {
|
||||
handler.logError("A target collection and eperson must be provided (run with -h flag for details)");
|
||||
throw new UnsupportedOperationException("A target collection and eperson must be provided");
|
||||
}
|
||||
purgeCollection(context, collection);
|
||||
runHarvest(context, collection);
|
||||
harvester.purgeCollection(collection, eperson);
|
||||
context.complete();
|
||||
|
||||
//TODO: implement this... remove all items and remember to unset "last-harvested" settings
|
||||
} else if ("config".equals(command)) {
|
||||
// Configure a collection with the three main settings
|
||||
if (collection == null) {
|
||||
handler.logError("A target collection must be provided (run with -h flag for details)");
|
||||
throw new UnsupportedOperationException("A target collection must be provided");
|
||||
System.out.println("Error - a target collection must be provided");
|
||||
System.out.println(" (run with -h flag for details)");
|
||||
System.exit(1);
|
||||
}
|
||||
if (oaiSource == null || oaiSetID == null) {
|
||||
handler.logError(
|
||||
"Both the OAI server address and OAI set id must be specified (run with -h flag for details)");
|
||||
throw new UnsupportedOperationException("Both the OAI server address and OAI set id must be specified");
|
||||
System.out.println("Error - both the OAI server address and OAI set id must be specified");
|
||||
System.out.println(" (run with -h flag for details)");
|
||||
System.exit(1);
|
||||
}
|
||||
if (metadataKey == null) {
|
||||
handler.logError(
|
||||
"A metadata key (commonly the prefix) must be specified for this collection (run with -h flag" +
|
||||
" for details)");
|
||||
throw new UnsupportedOperationException(
|
||||
"A metadata key (commonly the prefix) must be specified for this collection");
|
||||
System.out
|
||||
.println("Error - a metadata key (commonly the prefix) must be specified for this collection");
|
||||
System.out.println(" (run with -h flag for details)");
|
||||
System.exit(1);
|
||||
}
|
||||
|
||||
configureCollection(context, collection, harvestType, oaiSource, oaiSetID, metadataKey);
|
||||
harvester.configureCollection(collection, harvestType, oaiSource, oaiSetID, metadataKey);
|
||||
} else if ("ping".equals(command)) {
|
||||
if (oaiSource == null || oaiSetID == null) {
|
||||
handler.logError(
|
||||
"Both the OAI server address and OAI set id must be specified (run with -h flag for details)");
|
||||
throw new UnsupportedOperationException("Both the OAI server address and OAI set id must be specified");
|
||||
System.out.println("Error - both the OAI server address and OAI set id must be specified");
|
||||
System.out.println(" (run with -h flag for details)");
|
||||
System.exit(1);
|
||||
}
|
||||
|
||||
pingResponder(oaiSource, oaiSetID, metadataKey);
|
||||
} else {
|
||||
handler.logError(
|
||||
"Your command '" + command + "' was not recognized properly (run with -h flag for details)");
|
||||
throw new UnsupportedOperationException("Your command '" + command + "' was not recognized properly");
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
|
||||
/*
|
||||
* Resolve the ID into a collection and check to see if its harvesting options are set. If so, return
|
||||
* the collection, if not, bail out.
|
||||
*/
|
||||
private Collection resolveCollection(Context context, String collectionID) {
|
||||
private Collection resolveCollection(String collectionID) {
|
||||
|
||||
DSpaceObject dso;
|
||||
Collection targetCollection = null;
|
||||
@@ -268,15 +272,16 @@ public class Harvest extends DSpaceRunnable<HarvestScriptConfiguration> {
|
||||
targetCollection = (Collection) dso;
|
||||
}
|
||||
} else {
|
||||
// not a handle, try and treat it as an collection database UUID
|
||||
handler.logInfo("Looking up by UUID: " + collectionID + ", " + "in context: " + context);
|
||||
// not a handle, try and treat it as an integer collection database ID
|
||||
System.out.println("Looking up by id: " + collectionID + ", parsed as '" + Integer
|
||||
.parseInt(collectionID) + "', " + "in context: " + context);
|
||||
targetCollection = collectionService.find(context, UUID.fromString(collectionID));
|
||||
}
|
||||
}
|
||||
// was the collection valid?
|
||||
if (targetCollection == null) {
|
||||
handler.logError("Cannot resolve " + collectionID + " to collection");
|
||||
throw new UnsupportedOperationException("Cannot resolve " + collectionID + " to collection");
|
||||
System.out.println("Cannot resolve " + collectionID + " to collection");
|
||||
System.exit(1);
|
||||
}
|
||||
} catch (SQLException se) {
|
||||
se.printStackTrace();
|
||||
@@ -286,12 +291,12 @@ public class Harvest extends DSpaceRunnable<HarvestScriptConfiguration> {
|
||||
}
|
||||
|
||||
|
||||
private void configureCollection(Context context, String collectionID, int type, String oaiSource, String oaiSetId,
|
||||
private void configureCollection(String collectionID, int type, String oaiSource, String oaiSetId,
|
||||
String mdConfigId) {
|
||||
handler.logInfo("Running: configure collection");
|
||||
System.out.println("Running: configure collection");
|
||||
|
||||
Collection collection = resolveCollection(context, collectionID);
|
||||
handler.logInfo(String.valueOf(collection.getID()));
|
||||
Collection collection = resolveCollection(collectionID);
|
||||
System.out.println(collection.getID());
|
||||
|
||||
try {
|
||||
HarvestedCollection hc = harvestedCollectionService.find(context, collection);
|
||||
@@ -306,8 +311,9 @@ public class Harvest extends DSpaceRunnable<HarvestScriptConfiguration> {
|
||||
context.restoreAuthSystemState();
|
||||
context.complete();
|
||||
} catch (Exception e) {
|
||||
handler.logError("Changes could not be committed");
|
||||
handler.handleException(e);
|
||||
System.out.println("Changes could not be committed");
|
||||
e.printStackTrace();
|
||||
System.exit(1);
|
||||
} finally {
|
||||
if (context != null) {
|
||||
context.restoreAuthSystemState();
|
||||
@@ -318,15 +324,18 @@ public class Harvest extends DSpaceRunnable<HarvestScriptConfiguration> {
|
||||
|
||||
/**
|
||||
* Purges a collection of all harvest-related data and settings. All items in the collection will be deleted.
|
||||
* @param collectionID
|
||||
*
|
||||
* @param collectionID
|
||||
* @param email
|
||||
*/
|
||||
private void purgeCollection(Context context, String collectionID) {
|
||||
handler.logInfo(
|
||||
"Purging collection of all items and resetting last_harvested and harvest_message: " + collectionID);
|
||||
Collection collection = resolveCollection(context, collectionID);
|
||||
private void purgeCollection(String collectionID, String email) {
|
||||
System.out.println(
|
||||
"Purging collection of all items and resetting last_harvested and harvest_message: " + collectionID);
|
||||
Collection collection = resolveCollection(collectionID);
|
||||
|
||||
try {
|
||||
EPerson eperson = ePersonService.findByEmail(context, email);
|
||||
context.setCurrentUser(eperson);
|
||||
context.turnOffAuthorisationSystem();
|
||||
|
||||
ItemService itemService = ContentServiceFactory.getInstance().getItemService();
|
||||
@@ -335,7 +344,7 @@ public class Harvest extends DSpaceRunnable<HarvestScriptConfiguration> {
|
||||
while (it.hasNext()) {
|
||||
i++;
|
||||
Item item = it.next();
|
||||
handler.logInfo("Deleting: " + item.getHandle());
|
||||
System.out.println("Deleting: " + item.getHandle());
|
||||
collectionService.removeItem(context, collection, item);
|
||||
context.uncacheEntity(item);// Dispatch events every 50 items
|
||||
if (i % 50 == 0) {
|
||||
@@ -355,8 +364,9 @@ public class Harvest extends DSpaceRunnable<HarvestScriptConfiguration> {
|
||||
context.restoreAuthSystemState();
|
||||
context.dispatchEvents();
|
||||
} catch (Exception e) {
|
||||
handler.logError("Changes could not be committed");
|
||||
handler.handleException(e);
|
||||
System.out.println("Changes could not be committed");
|
||||
e.printStackTrace();
|
||||
System.exit(1);
|
||||
} finally {
|
||||
context.restoreAuthSystemState();
|
||||
}
|
||||
@@ -366,42 +376,50 @@ public class Harvest extends DSpaceRunnable<HarvestScriptConfiguration> {
|
||||
/**
|
||||
* Run a single harvest cycle on the specified collection under the authorization of the supplied EPerson
|
||||
*/
|
||||
private void runHarvest(Context context, String collectionID) {
|
||||
handler.logInfo("Running: a harvest cycle on " + collectionID);
|
||||
private void runHarvest(String collectionID, String email) {
|
||||
System.out.println("Running: a harvest cycle on " + collectionID);
|
||||
|
||||
handler.logInfo("Initializing the harvester... ");
|
||||
System.out.print("Initializing the harvester... ");
|
||||
OAIHarvester harvester = null;
|
||||
try {
|
||||
Collection collection = resolveCollection(context, collectionID);
|
||||
Collection collection = resolveCollection(collectionID);
|
||||
HarvestedCollection hc = harvestedCollectionService.find(context, collection);
|
||||
harvester = new OAIHarvester(context, collection, hc);
|
||||
handler.logInfo("Initialized the harvester successfully");
|
||||
System.out.println("success. ");
|
||||
} catch (HarvestingException hex) {
|
||||
handler.logError("Initializing the harvester failed.");
|
||||
System.out.print("failed. ");
|
||||
System.out.println(hex.getMessage());
|
||||
throw new IllegalStateException("Unable to harvest", hex);
|
||||
} catch (SQLException se) {
|
||||
handler.logError("Initializing the harvester failed.");
|
||||
System.out.print("failed. ");
|
||||
System.out.println(se.getMessage());
|
||||
throw new IllegalStateException("Unable to access database", se);
|
||||
}
|
||||
|
||||
try {
|
||||
// Harvest will not work for an anonymous user
|
||||
handler.logInfo("Harvest started... ");
|
||||
EPerson eperson = ePersonService.findByEmail(context, email);
|
||||
System.out.println("Harvest started... ");
|
||||
context.setCurrentUser(eperson);
|
||||
harvester.runHarvest();
|
||||
context.complete();
|
||||
} catch (SQLException | AuthorizeException | IOException e) {
|
||||
} catch (SQLException e) {
|
||||
throw new IllegalStateException("Failed to run harvester", e);
|
||||
} catch (AuthorizeException e) {
|
||||
throw new IllegalStateException("Failed to run harvester", e);
|
||||
} catch (IOException e) {
|
||||
throw new IllegalStateException("Failed to run harvester", e);
|
||||
}
|
||||
|
||||
handler.logInfo("Harvest complete. ");
|
||||
System.out.println("Harvest complete. ");
|
||||
}
|
||||
|
||||
/**
|
||||
* Resets harvest_status and harvest_start_time flags for all collections that have a row in the
|
||||
* harvested_collections table
|
||||
*/
|
||||
private void resetHarvesting(Context context) {
|
||||
handler.logInfo("Resetting harvest status flag on all collections... ");
|
||||
private static void resetHarvesting() {
|
||||
System.out.print("Resetting harvest status flag on all collections... ");
|
||||
|
||||
try {
|
||||
List<HarvestedCollection> harvestedCollections = harvestedCollectionService.findAll(context);
|
||||
@@ -411,21 +429,21 @@ public class Harvest extends DSpaceRunnable<HarvestScriptConfiguration> {
|
||||
harvestedCollection.setHarvestStatus(HarvestedCollection.STATUS_READY);
|
||||
harvestedCollectionService.update(context, harvestedCollection);
|
||||
}
|
||||
handler.logInfo("Reset harvest status flag successfully");
|
||||
System.out.println("success. ");
|
||||
} catch (Exception ex) {
|
||||
handler.logError("Resetting harvest status flag failed");
|
||||
handler.handleException(ex);
|
||||
System.out.println("failed. ");
|
||||
ex.printStackTrace();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Starts up the harvest scheduler. Terminating this process will stop the scheduler.
|
||||
*/
|
||||
private void startHarvester() {
|
||||
private static void startHarvester() {
|
||||
try {
|
||||
handler.logInfo("Starting harvest loop... ");
|
||||
System.out.print("Starting harvest loop... ");
|
||||
HarvestServiceFactory.getInstance().getHarvestSchedulingService().startNewScheduler();
|
||||
handler.logInfo("running. ");
|
||||
System.out.println("running. ");
|
||||
} catch (Exception ex) {
|
||||
ex.printStackTrace();
|
||||
}
|
||||
@@ -438,31 +456,29 @@ public class Harvest extends DSpaceRunnable<HarvestScriptConfiguration> {
|
||||
* @param set name of an item set.
|
||||
* @param metadataFormat local prefix name, or null for "dc".
|
||||
*/
|
||||
private void pingResponder(String server, String set, String metadataFormat) {
|
||||
private static void pingResponder(String server, String set, String metadataFormat) {
|
||||
List<String> errors;
|
||||
|
||||
handler.logInfo("Testing basic PMH access: ");
|
||||
errors = harvestedCollectionService.verifyOAIharvester(server, set,
|
||||
(null != metadataFormat) ? metadataFormat : "dc", false);
|
||||
System.out.print("Testing basic PMH access: ");
|
||||
errors = OAIHarvester.verifyOAIharvester(server, set,
|
||||
(null != metadataFormat) ? metadataFormat : "dc", false);
|
||||
if (errors.isEmpty()) {
|
||||
handler.logInfo("OK");
|
||||
System.out.println("OK");
|
||||
} else {
|
||||
for (String error : errors) {
|
||||
handler.logError(error);
|
||||
System.err.println(error);
|
||||
}
|
||||
}
|
||||
|
||||
handler.logInfo("Testing ORE support: ");
|
||||
errors = harvestedCollectionService.verifyOAIharvester(server, set,
|
||||
(null != metadataFormat) ? metadataFormat : "dc", true);
|
||||
System.out.print("Testing ORE support: ");
|
||||
errors = OAIHarvester.verifyOAIharvester(server, set,
|
||||
(null != metadataFormat) ? metadataFormat : "dc", true);
|
||||
if (errors.isEmpty()) {
|
||||
handler.logInfo("OK");
|
||||
System.out.println("OK");
|
||||
} else {
|
||||
for (String error : errors) {
|
||||
handler.logError(error);
|
||||
System.err.println(error);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
|
@@ -1,45 +0,0 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.app.harvest;
|
||||
|
||||
import java.sql.SQLException;
|
||||
|
||||
import org.apache.commons.cli.ParseException;
|
||||
import org.dspace.core.Context;
|
||||
import org.dspace.eperson.EPerson;
|
||||
|
||||
public class HarvestCli extends Harvest {
|
||||
|
||||
/**
|
||||
* This is the overridden instance of the {@link Harvest#assignCurrentUserInContext()} method in the parent class
|
||||
* {@link Harvest}.
|
||||
* This is done so that the CLI version of the Script is able to retrieve its currentUser from the -e flag given
|
||||
* with the parameters of the Script.
|
||||
*
|
||||
* @throws ParseException If the e flag was not given to the parameters when calling the script
|
||||
*/
|
||||
@Override
|
||||
protected void assignCurrentUserInContext() throws ParseException {
|
||||
if (this.commandLine.hasOption('e')) {
|
||||
String ePersonEmail = this.commandLine.getOptionValue('e');
|
||||
this.context = new Context(Context.Mode.BATCH_EDIT);
|
||||
try {
|
||||
EPerson ePerson = ePersonService.findByEmail(this.context, ePersonEmail);
|
||||
if (ePerson == null) {
|
||||
super.handler.logError("EPerson not found: " + ePersonEmail);
|
||||
throw new IllegalArgumentException("Unable to find a user with email: " + ePersonEmail);
|
||||
}
|
||||
this.context.setCurrentUser(ePerson);
|
||||
} catch (SQLException e) {
|
||||
throw new IllegalArgumentException("SQLException trying to find user with email: " + ePersonEmail);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
}
|
@@ -1,22 +0,0 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.app.harvest;
|
||||
|
||||
import org.apache.commons.cli.Options;
|
||||
|
||||
|
||||
public class HarvestCliScriptConfiguration extends HarvestScriptConfiguration {
|
||||
|
||||
public Options getOptions() {
|
||||
Options options = super.getOptions();
|
||||
options.addOption("e", "eperson", true,
|
||||
"eperson");
|
||||
|
||||
return options;
|
||||
}
|
||||
}
|
@@ -1,70 +0,0 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.app.harvest;
|
||||
|
||||
import java.sql.SQLException;
|
||||
|
||||
import org.apache.commons.cli.Options;
|
||||
import org.dspace.authorize.service.AuthorizeService;
|
||||
import org.dspace.core.Context;
|
||||
import org.dspace.scripts.configuration.ScriptConfiguration;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
|
||||
|
||||
public class HarvestScriptConfiguration<T extends Harvest> extends ScriptConfiguration<T> {
|
||||
@Autowired
|
||||
private AuthorizeService authorizeService;
|
||||
|
||||
private Class<T> dspaceRunnableClass;
|
||||
|
||||
@Override
|
||||
public Class<T> getDspaceRunnableClass() {
|
||||
return dspaceRunnableClass;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setDspaceRunnableClass(Class<T> dspaceRunnableClass) {
|
||||
this.dspaceRunnableClass = dspaceRunnableClass;
|
||||
}
|
||||
|
||||
public boolean isAllowedToExecute(final Context context) {
|
||||
try {
|
||||
return authorizeService.isAdmin(context);
|
||||
} catch (SQLException e) {
|
||||
throw new RuntimeException("SQLException occurred when checking if the current user is an admin", e);
|
||||
}
|
||||
}
|
||||
|
||||
public Options getOptions() {
|
||||
Options options = new Options();
|
||||
options.addOption("p", "purge", false, "delete all items in the collection");
|
||||
options.addOption("r", "run", false, "run the standard harvest procedure");
|
||||
options.addOption("g", "ping", false, "test the OAI server and set");
|
||||
options.addOption("s", "setup", false, "Set the collection up for harvesting");
|
||||
options.addOption("S", "start", false, "start the harvest loop");
|
||||
options.addOption("R", "reset", false, "reset harvest status on all collections");
|
||||
options.addOption("P", "purgeCollections", false, "purge all harvestable collections");
|
||||
options.addOption("o", "reimport", false, "reimport all items in the collection, " +
|
||||
"this is equivalent to -p -r, purging all items in a collection and reimporting them");
|
||||
options.addOption("c", "collection", true,
|
||||
"harvesting collection (handle or id)");
|
||||
options.addOption("t", "type", true,
|
||||
"type of harvesting (0 for none)");
|
||||
options.addOption("a", "address", true,
|
||||
"address of the OAI-PMH server");
|
||||
options.addOption("i", "oai_set_id", true,
|
||||
"id of the PMH set representing the harvested collection");
|
||||
options.addOption("m", "metadata_format", true,
|
||||
"the name of the desired metadata format for harvesting, resolved to namespace and " +
|
||||
"crosswalk in dspace.cfg");
|
||||
|
||||
options.addOption("h", "help", false, "help");
|
||||
|
||||
return options;
|
||||
}
|
||||
}
|
@@ -1,264 +0,0 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.app.itemexport;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.FileInputStream;
|
||||
import java.io.InputStream;
|
||||
import java.nio.file.Path;
|
||||
import java.sql.SQLException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
import java.util.UUID;
|
||||
|
||||
import org.apache.commons.cli.ParseException;
|
||||
import org.apache.commons.io.file.PathUtils;
|
||||
import org.dspace.app.itemexport.factory.ItemExportServiceFactory;
|
||||
import org.dspace.app.itemexport.service.ItemExportService;
|
||||
import org.dspace.content.Collection;
|
||||
import org.dspace.content.Item;
|
||||
import org.dspace.content.factory.ContentServiceFactory;
|
||||
import org.dspace.content.service.CollectionService;
|
||||
import org.dspace.content.service.ItemService;
|
||||
import org.dspace.core.Constants;
|
||||
import org.dspace.core.Context;
|
||||
import org.dspace.eperson.EPerson;
|
||||
import org.dspace.eperson.factory.EPersonServiceFactory;
|
||||
import org.dspace.eperson.service.EPersonService;
|
||||
import org.dspace.handle.factory.HandleServiceFactory;
|
||||
import org.dspace.handle.service.HandleService;
|
||||
import org.dspace.scripts.DSpaceRunnable;
|
||||
import org.dspace.utils.DSpace;
|
||||
|
||||
/**
|
||||
* Item exporter to create simple AIPs for DSpace content. Currently exports
|
||||
* individual items, or entire collections. For instructions on use, see
|
||||
* printUsage() method.
|
||||
* <P>
|
||||
* ItemExport creates the simple AIP package that the importer also uses. It
|
||||
* consists of:
|
||||
* <P>
|
||||
* /exportdir/42/ (one directory per item) / dublin_core.xml - qualified dublin
|
||||
* core in RDF schema / contents - text file, listing one file per line / file1
|
||||
* - files contained in the item / file2 / ...
|
||||
* <P>
|
||||
* issues -doesn't handle special characters in metadata (needs to turn {@code &'s} into
|
||||
* {@code &}, etc.)
|
||||
* <P>
|
||||
* Modified by David Little, UCSD Libraries 12/21/04 to allow the registration
|
||||
* of files (bitstreams) into DSpace.
|
||||
*
|
||||
* @author David Little
|
||||
* @author Jay Paz
|
||||
*/
|
||||
public class ItemExport extends DSpaceRunnable<ItemExportScriptConfiguration> {
|
||||
|
||||
public static final String TEMP_DIR = "exportSAF";
|
||||
public static final String ZIP_NAME = "exportSAFZip";
|
||||
public static final String ZIP_FILENAME = "saf-export";
|
||||
public static final String ZIP_EXT = "zip";
|
||||
|
||||
protected String typeString = null;
|
||||
protected String destDirName = null;
|
||||
protected String idString = null;
|
||||
protected int seqStart = -1;
|
||||
protected int type = -1;
|
||||
protected Item item = null;
|
||||
protected Collection collection = null;
|
||||
protected boolean migrate = false;
|
||||
protected boolean zip = false;
|
||||
protected String zipFileName = "";
|
||||
protected boolean excludeBitstreams = false;
|
||||
protected boolean help = false;
|
||||
|
||||
protected static HandleService handleService = HandleServiceFactory.getInstance().getHandleService();
|
||||
protected static ItemService itemService = ContentServiceFactory.getInstance().getItemService();
|
||||
protected static CollectionService collectionService = ContentServiceFactory.getInstance().getCollectionService();
|
||||
protected static final EPersonService epersonService =
|
||||
EPersonServiceFactory.getInstance().getEPersonService();
|
||||
|
||||
@Override
|
||||
public ItemExportScriptConfiguration getScriptConfiguration() {
|
||||
return new DSpace().getServiceManager()
|
||||
.getServiceByName("export", ItemExportScriptConfiguration.class);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setup() throws ParseException {
|
||||
help = commandLine.hasOption('h');
|
||||
|
||||
if (commandLine.hasOption('t')) { // type
|
||||
typeString = commandLine.getOptionValue('t');
|
||||
|
||||
if ("ITEM".equals(typeString)) {
|
||||
type = Constants.ITEM;
|
||||
} else if ("COLLECTION".equals(typeString)) {
|
||||
type = Constants.COLLECTION;
|
||||
}
|
||||
}
|
||||
|
||||
if (commandLine.hasOption('i')) { // id
|
||||
idString = commandLine.getOptionValue('i');
|
||||
}
|
||||
|
||||
setNumber();
|
||||
|
||||
if (commandLine.hasOption('m')) { // number
|
||||
migrate = true;
|
||||
}
|
||||
|
||||
if (commandLine.hasOption('x')) {
|
||||
excludeBitstreams = true;
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void internalRun() throws Exception {
|
||||
if (help) {
|
||||
printHelp();
|
||||
return;
|
||||
}
|
||||
|
||||
validate();
|
||||
|
||||
Context context = new Context();
|
||||
context.turnOffAuthorisationSystem();
|
||||
|
||||
if (type == Constants.ITEM) {
|
||||
// first, is myIDString a handle?
|
||||
if (idString.indexOf('/') != -1) {
|
||||
item = (Item) handleService.resolveToObject(context, idString);
|
||||
|
||||
if ((item == null) || (item.getType() != Constants.ITEM)) {
|
||||
item = null;
|
||||
}
|
||||
} else {
|
||||
item = itemService.find(context, UUID.fromString(idString));
|
||||
}
|
||||
|
||||
if (item == null) {
|
||||
handler.logError("The item cannot be found: " + idString + " (run with -h flag for details)");
|
||||
throw new UnsupportedOperationException("The item cannot be found: " + idString);
|
||||
}
|
||||
} else {
|
||||
if (idString.indexOf('/') != -1) {
|
||||
// has a / must be a handle
|
||||
collection = (Collection) handleService.resolveToObject(context,
|
||||
idString);
|
||||
|
||||
// ensure it's a collection
|
||||
if ((collection == null)
|
||||
|| (collection.getType() != Constants.COLLECTION)) {
|
||||
collection = null;
|
||||
}
|
||||
} else {
|
||||
collection = collectionService.find(context, UUID.fromString(idString));
|
||||
}
|
||||
|
||||
if (collection == null) {
|
||||
handler.logError("The collection cannot be found: " + idString + " (run with -h flag for details)");
|
||||
throw new UnsupportedOperationException("The collection cannot be found: " + idString);
|
||||
}
|
||||
}
|
||||
|
||||
ItemExportService itemExportService = ItemExportServiceFactory.getInstance()
|
||||
.getItemExportService();
|
||||
try {
|
||||
itemExportService.setHandler(handler);
|
||||
process(context, itemExportService);
|
||||
context.complete();
|
||||
} catch (Exception e) {
|
||||
context.abort();
|
||||
throw new Exception(e);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Validate the options
|
||||
*/
|
||||
protected void validate() {
|
||||
if (type == -1) {
|
||||
handler.logError("The type must be either COLLECTION or ITEM (run with -h flag for details)");
|
||||
throw new UnsupportedOperationException("The type must be either COLLECTION or ITEM");
|
||||
}
|
||||
|
||||
if (idString == null) {
|
||||
handler.logError("The ID must be set to either a database ID or a handle (run with -h flag for details)");
|
||||
throw new UnsupportedOperationException("The ID must be set to either a database ID or a handle");
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Process the export
|
||||
* @param context
|
||||
* @throws Exception
|
||||
*/
|
||||
protected void process(Context context, ItemExportService itemExportService) throws Exception {
|
||||
setEPerson(context);
|
||||
setDestDirName(context, itemExportService);
|
||||
setZip(context);
|
||||
|
||||
Iterator<Item> items;
|
||||
if (item != null) {
|
||||
List<Item> myItems = new ArrayList<>();
|
||||
myItems.add(item);
|
||||
items = myItems.iterator();
|
||||
} else {
|
||||
handler.logInfo("Exporting from collection: " + idString);
|
||||
items = itemService.findByCollection(context, collection);
|
||||
}
|
||||
itemExportService.exportAsZip(context, items, destDirName, zipFileName,
|
||||
seqStart, migrate, excludeBitstreams);
|
||||
|
||||
File zip = new File(destDirName + System.getProperty("file.separator") + zipFileName);
|
||||
try (InputStream is = new FileInputStream(zip)) {
|
||||
// write input stream on handler
|
||||
handler.writeFilestream(context, ZIP_FILENAME + "." + ZIP_EXT, is, ZIP_NAME);
|
||||
} finally {
|
||||
PathUtils.deleteDirectory(Path.of(destDirName));
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the destination directory option
|
||||
*/
|
||||
protected void setDestDirName(Context context, ItemExportService itemExportService) throws Exception {
|
||||
destDirName = itemExportService.getExportWorkDirectory() + File.separator + TEMP_DIR;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the zip option
|
||||
*/
|
||||
protected void setZip(Context context) {
|
||||
zip = true;
|
||||
zipFileName = ZIP_FILENAME + "-" + context.getCurrentUser().getID() + "." + ZIP_EXT;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the number option
|
||||
*/
|
||||
protected void setNumber() {
|
||||
seqStart = 1;
|
||||
if (commandLine.hasOption('n')) { // number
|
||||
seqStart = Integer.parseInt(commandLine.getOptionValue('n'));
|
||||
}
|
||||
}
|
||||
|
||||
private void setEPerson(Context context) throws SQLException {
|
||||
EPerson myEPerson = epersonService.find(context, this.getEpersonIdentifier());
|
||||
|
||||
// check eperson
|
||||
if (myEPerson == null) {
|
||||
handler.logError("EPerson cannot be found: " + this.getEpersonIdentifier());
|
||||
throw new UnsupportedOperationException("EPerson cannot be found: " + this.getEpersonIdentifier());
|
||||
}
|
||||
|
||||
context.setCurrentUser(myEPerson);
|
||||
}
|
||||
}
|
@@ -1,96 +0,0 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.app.itemexport;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
|
||||
import org.dspace.app.itemexport.service.ItemExportService;
|
||||
import org.dspace.content.Item;
|
||||
import org.dspace.core.Context;
|
||||
|
||||
/**
|
||||
* CLI variant for the {@link ItemExport} class.
|
||||
* This was done to specify the specific behaviors for the CLI.
|
||||
*
|
||||
* @author Francesco Pio Scognamiglio (francescopio.scognamiglio at 4science.com)
|
||||
*/
|
||||
public class ItemExportCLI extends ItemExport {
|
||||
|
||||
@Override
|
||||
protected void validate() {
|
||||
super.validate();
|
||||
|
||||
setDestDirName();
|
||||
|
||||
if (destDirName == null) {
|
||||
handler.logError("The destination directory must be set (run with -h flag for details)");
|
||||
throw new UnsupportedOperationException("The destination directory must be set");
|
||||
}
|
||||
|
||||
if (seqStart == -1) {
|
||||
handler.logError("The sequence start number must be set (run with -h flag for details)");
|
||||
throw new UnsupportedOperationException("The sequence start number must be set");
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void process(Context context, ItemExportService itemExportService) throws Exception {
|
||||
setZip(context);
|
||||
|
||||
if (zip) {
|
||||
Iterator<Item> items;
|
||||
if (item != null) {
|
||||
List<Item> myItems = new ArrayList<>();
|
||||
myItems.add(item);
|
||||
items = myItems.iterator();
|
||||
} else {
|
||||
handler.logInfo("Exporting from collection: " + idString);
|
||||
items = itemService.findByCollection(context, collection);
|
||||
}
|
||||
itemExportService.exportAsZip(context, items, destDirName, zipFileName,
|
||||
seqStart, migrate, excludeBitstreams);
|
||||
} else {
|
||||
if (item != null) {
|
||||
// it's only a single item
|
||||
itemExportService
|
||||
.exportItem(context, Collections.singletonList(item).iterator(), destDirName,
|
||||
seqStart, migrate, excludeBitstreams);
|
||||
} else {
|
||||
handler.logInfo("Exporting from collection: " + idString);
|
||||
|
||||
// it's a collection, so do a bunch of items
|
||||
Iterator<Item> i = itemService.findByCollection(context, collection);
|
||||
itemExportService.exportItem(context, i, destDirName, seqStart, migrate, excludeBitstreams);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
protected void setDestDirName() {
|
||||
if (commandLine.hasOption('d')) { // dest
|
||||
destDirName = commandLine.getOptionValue('d');
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void setZip(Context context) {
|
||||
if (commandLine.hasOption('z')) {
|
||||
zip = true;
|
||||
zipFileName = commandLine.getOptionValue('z');
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void setNumber() {
|
||||
if (commandLine.hasOption('n')) { // number
|
||||
seqStart = Integer.parseInt(commandLine.getOptionValue('n'));
|
||||
}
|
||||
}
|
||||
}
|
@@ -1,56 +0,0 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.app.itemexport;
|
||||
|
||||
import org.apache.commons.cli.Option;
|
||||
import org.apache.commons.cli.Options;
|
||||
import org.dspace.scripts.configuration.ScriptConfiguration;
|
||||
|
||||
/**
|
||||
* The {@link ScriptConfiguration} for the {@link ItemExportCLI} script
|
||||
*
|
||||
* @author Francesco Pio Scognamiglio (francescopio.scognamiglio at 4science.com)
|
||||
*/
|
||||
public class ItemExportCLIScriptConfiguration extends ItemExportScriptConfiguration<ItemExportCLI> {
|
||||
|
||||
@Override
|
||||
public Options getOptions() {
|
||||
Options options = new Options();
|
||||
|
||||
options.addOption(Option.builder("t").longOpt("type")
|
||||
.desc("type: COLLECTION or ITEM")
|
||||
.hasArg().required().build());
|
||||
options.addOption(Option.builder("i").longOpt("id")
|
||||
.desc("ID or handle of thing to export")
|
||||
.hasArg().required().build());
|
||||
options.addOption(Option.builder("d").longOpt("dest")
|
||||
.desc("destination where you want items to go")
|
||||
.hasArg().required().build());
|
||||
options.addOption(Option.builder("n").longOpt("number")
|
||||
.desc("sequence number to begin exporting items with")
|
||||
.hasArg().required().build());
|
||||
options.addOption(Option.builder("z").longOpt("zip")
|
||||
.desc("export as zip file (specify filename e.g. export.zip)")
|
||||
.hasArg().required(false).build());
|
||||
options.addOption(Option.builder("m").longOpt("migrate")
|
||||
.desc("export for migration (remove handle and metadata that will be re-created in new system)")
|
||||
.hasArg(false).required(false).build());
|
||||
|
||||
// as pointed out by Peter Dietz this provides similar functionality to export metadata
|
||||
// but it is needed since it directly exports to Simple Archive Format (SAF)
|
||||
options.addOption(Option.builder("x").longOpt("exclude-bitstreams")
|
||||
.desc("do not export bitstreams")
|
||||
.hasArg(false).required(false).build());
|
||||
|
||||
options.addOption(Option.builder("h").longOpt("help")
|
||||
.desc("help")
|
||||
.hasArg(false).required(false).build());
|
||||
|
||||
return options;
|
||||
}
|
||||
}
|
@@ -0,0 +1,246 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.app.itemexport;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
import java.util.UUID;
|
||||
|
||||
import org.apache.commons.cli.CommandLine;
|
||||
import org.apache.commons.cli.CommandLineParser;
|
||||
import org.apache.commons.cli.HelpFormatter;
|
||||
import org.apache.commons.cli.Options;
|
||||
import org.apache.commons.cli.PosixParser;
|
||||
import org.dspace.app.itemexport.factory.ItemExportServiceFactory;
|
||||
import org.dspace.app.itemexport.service.ItemExportService;
|
||||
import org.dspace.content.Collection;
|
||||
import org.dspace.content.Item;
|
||||
import org.dspace.content.factory.ContentServiceFactory;
|
||||
import org.dspace.content.service.CollectionService;
|
||||
import org.dspace.content.service.ItemService;
|
||||
import org.dspace.core.Constants;
|
||||
import org.dspace.core.Context;
|
||||
import org.dspace.handle.factory.HandleServiceFactory;
|
||||
import org.dspace.handle.service.HandleService;
|
||||
|
||||
/**
|
||||
* Item exporter to create simple AIPs for DSpace content. Currently exports
|
||||
* individual items, or entire collections. For instructions on use, see
|
||||
* printUsage() method.
|
||||
* <P>
|
||||
* ItemExport creates the simple AIP package that the importer also uses. It
|
||||
* consists of:
|
||||
* <P>
|
||||
* /exportdir/42/ (one directory per item) / dublin_core.xml - qualified dublin
|
||||
* core in RDF schema / contents - text file, listing one file per line / file1
|
||||
* - files contained in the item / file2 / ...
|
||||
* <P>
|
||||
* issues -doesn't handle special characters in metadata (needs to turn {@code &'s} into
|
||||
* {@code &}, etc.)
|
||||
* <P>
|
||||
* Modified by David Little, UCSD Libraries 12/21/04 to allow the registration
|
||||
* of files (bitstreams) into DSpace.
|
||||
*
|
||||
* @author David Little
|
||||
* @author Jay Paz
|
||||
*/
|
||||
public class ItemExportCLITool {
|
||||
|
||||
protected static ItemExportService itemExportService = ItemExportServiceFactory.getInstance()
|
||||
.getItemExportService();
|
||||
protected static HandleService handleService = HandleServiceFactory.getInstance().getHandleService();
|
||||
protected static ItemService itemService = ContentServiceFactory.getInstance().getItemService();
|
||||
protected static CollectionService collectionService = ContentServiceFactory.getInstance().getCollectionService();
|
||||
|
||||
/**
|
||||
* Default constructor
|
||||
*/
|
||||
private ItemExportCLITool() { }
|
||||
|
||||
/*
|
||||
*
|
||||
*/
|
||||
public static void main(String[] argv) throws Exception {
|
||||
// create an options object and populate it
|
||||
CommandLineParser parser = new PosixParser();
|
||||
|
||||
Options options = new Options();
|
||||
|
||||
options.addOption("t", "type", true, "type: COLLECTION or ITEM");
|
||||
options.addOption("i", "id", true, "ID or handle of thing to export");
|
||||
options.addOption("d", "dest", true,
|
||||
"destination where you want items to go");
|
||||
options.addOption("m", "migrate", false,
|
||||
"export for migration (remove handle and metadata that will be re-created in new system)");
|
||||
options.addOption("n", "number", true,
|
||||
"sequence number to begin exporting items with");
|
||||
options.addOption("z", "zip", true, "export as zip file (specify filename e.g. export.zip)");
|
||||
options.addOption("h", "help", false, "help");
|
||||
|
||||
// as pointed out by Peter Dietz this provides similar functionality to export metadata
|
||||
// but it is needed since it directly exports to Simple Archive Format (SAF)
|
||||
options.addOption("x", "exclude-bitstreams", false, "do not export bitstreams");
|
||||
|
||||
CommandLine line = parser.parse(options, argv);
|
||||
|
||||
String typeString = null;
|
||||
String destDirName = null;
|
||||
String myIDString = null;
|
||||
int seqStart = -1;
|
||||
int myType = -1;
|
||||
|
||||
Item myItem = null;
|
||||
Collection mycollection = null;
|
||||
|
||||
if (line.hasOption('h')) {
|
||||
HelpFormatter myhelp = new HelpFormatter();
|
||||
myhelp.printHelp("ItemExport\n", options);
|
||||
System.out
|
||||
.println("\nfull collection: ItemExport -t COLLECTION -i ID -d dest -n number");
|
||||
System.out
|
||||
.println("singleitem: ItemExport -t ITEM -i ID -d dest -n number");
|
||||
|
||||
System.exit(0);
|
||||
}
|
||||
|
||||
if (line.hasOption('t')) { // type
|
||||
typeString = line.getOptionValue('t');
|
||||
|
||||
if ("ITEM".equals(typeString)) {
|
||||
myType = Constants.ITEM;
|
||||
} else if ("COLLECTION".equals(typeString)) {
|
||||
myType = Constants.COLLECTION;
|
||||
}
|
||||
}
|
||||
|
||||
if (line.hasOption('i')) { // id
|
||||
myIDString = line.getOptionValue('i');
|
||||
}
|
||||
|
||||
if (line.hasOption('d')) { // dest
|
||||
destDirName = line.getOptionValue('d');
|
||||
}
|
||||
|
||||
if (line.hasOption('n')) { // number
|
||||
seqStart = Integer.parseInt(line.getOptionValue('n'));
|
||||
}
|
||||
|
||||
boolean migrate = false;
|
||||
if (line.hasOption('m')) { // number
|
||||
migrate = true;
|
||||
}
|
||||
|
||||
boolean zip = false;
|
||||
String zipFileName = "";
|
||||
if (line.hasOption('z')) {
|
||||
zip = true;
|
||||
zipFileName = line.getOptionValue('z');
|
||||
}
|
||||
|
||||
boolean excludeBitstreams = false;
|
||||
if (line.hasOption('x')) {
|
||||
excludeBitstreams = true;
|
||||
}
|
||||
|
||||
// now validate the args
|
||||
if (myType == -1) {
|
||||
System.out
|
||||
.println("type must be either COLLECTION or ITEM (-h for help)");
|
||||
System.exit(1);
|
||||
}
|
||||
|
||||
if (destDirName == null) {
|
||||
System.out
|
||||
.println("destination directory must be set (-h for help)");
|
||||
System.exit(1);
|
||||
}
|
||||
|
||||
if (seqStart == -1) {
|
||||
System.out
|
||||
.println("sequence start number must be set (-h for help)");
|
||||
System.exit(1);
|
||||
}
|
||||
|
||||
if (myIDString == null) {
|
||||
System.out
|
||||
.println("ID must be set to either a database ID or a handle (-h for help)");
|
||||
System.exit(1);
|
||||
}
|
||||
|
||||
Context c = new Context(Context.Mode.READ_ONLY);
|
||||
c.turnOffAuthorisationSystem();
|
||||
|
||||
if (myType == Constants.ITEM) {
|
||||
// first, is myIDString a handle?
|
||||
if (myIDString.indexOf('/') != -1) {
|
||||
myItem = (Item) handleService.resolveToObject(c, myIDString);
|
||||
|
||||
if ((myItem == null) || (myItem.getType() != Constants.ITEM)) {
|
||||
myItem = null;
|
||||
}
|
||||
} else {
|
||||
myItem = itemService.find(c, UUID.fromString(myIDString));
|
||||
}
|
||||
|
||||
if (myItem == null) {
|
||||
System.out
|
||||
.println("Error, item cannot be found: " + myIDString);
|
||||
}
|
||||
} else {
|
||||
if (myIDString.indexOf('/') != -1) {
|
||||
// has a / must be a handle
|
||||
mycollection = (Collection) handleService.resolveToObject(c,
|
||||
myIDString);
|
||||
|
||||
// ensure it's a collection
|
||||
if ((mycollection == null)
|
||||
|| (mycollection.getType() != Constants.COLLECTION)) {
|
||||
mycollection = null;
|
||||
}
|
||||
} else if (myIDString != null) {
|
||||
mycollection = collectionService.find(c, UUID.fromString(myIDString));
|
||||
}
|
||||
|
||||
if (mycollection == null) {
|
||||
System.out.println("Error, collection cannot be found: "
|
||||
+ myIDString);
|
||||
System.exit(1);
|
||||
}
|
||||
}
|
||||
|
||||
if (zip) {
|
||||
Iterator<Item> items;
|
||||
if (myItem != null) {
|
||||
List<Item> myItems = new ArrayList<>();
|
||||
myItems.add(myItem);
|
||||
items = myItems.iterator();
|
||||
} else {
|
||||
System.out.println("Exporting from collection: " + myIDString);
|
||||
items = itemService.findByCollection(c, mycollection);
|
||||
}
|
||||
itemExportService.exportAsZip(c, items, destDirName, zipFileName, seqStart, migrate, excludeBitstreams);
|
||||
} else {
|
||||
if (myItem != null) {
|
||||
// it's only a single item
|
||||
itemExportService
|
||||
.exportItem(c, Collections.singletonList(myItem).iterator(), destDirName, seqStart, migrate,
|
||||
excludeBitstreams);
|
||||
} else {
|
||||
System.out.println("Exporting from collection: " + myIDString);
|
||||
|
||||
// it's a collection, so do a bunch of items
|
||||
Iterator<Item> i = itemService.findByCollection(c, mycollection);
|
||||
itemExportService.exportItem(c, i, destDirName, seqStart, migrate, excludeBitstreams);
|
||||
}
|
||||
}
|
||||
|
||||
c.complete();
|
||||
}
|
||||
}
|
@@ -1,79 +0,0 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.app.itemexport;
|
||||
|
||||
import java.sql.SQLException;
|
||||
|
||||
import org.apache.commons.cli.Option;
|
||||
import org.apache.commons.cli.Options;
|
||||
import org.dspace.authorize.service.AuthorizeService;
|
||||
import org.dspace.core.Context;
|
||||
import org.dspace.scripts.configuration.ScriptConfiguration;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
|
||||
/**
|
||||
* The {@link ScriptConfiguration} for the {@link ItemExport} script
|
||||
*
|
||||
* @author Francesco Pio Scognamiglio (francescopio.scognamiglio at 4science.com)
|
||||
*/
|
||||
public class ItemExportScriptConfiguration<T extends ItemExport> extends ScriptConfiguration<T> {
|
||||
|
||||
@Autowired
|
||||
private AuthorizeService authorizeService;
|
||||
|
||||
private Class<T> dspaceRunnableClass;
|
||||
|
||||
@Override
|
||||
public Class<T> getDspaceRunnableClass() {
|
||||
return dspaceRunnableClass;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setDspaceRunnableClass(Class<T> dspaceRunnableClass) {
|
||||
this.dspaceRunnableClass = dspaceRunnableClass;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isAllowedToExecute(final Context context) {
|
||||
try {
|
||||
return authorizeService.isAdmin(context);
|
||||
} catch (SQLException e) {
|
||||
throw new RuntimeException("SQLException occurred when checking if the current user is an admin", e);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public Options getOptions() {
|
||||
Options options = new Options();
|
||||
|
||||
options.addOption(Option.builder("t").longOpt("type")
|
||||
.desc("type: COLLECTION or ITEM")
|
||||
.hasArg().required().build());
|
||||
options.addOption(Option.builder("i").longOpt("id")
|
||||
.desc("ID or handle of thing to export")
|
||||
.hasArg().required().build());
|
||||
options.addOption(Option.builder("n").longOpt("number")
|
||||
.desc("sequence number to begin exporting items with")
|
||||
.hasArg().required(false).build());
|
||||
options.addOption(Option.builder("m").longOpt("migrate")
|
||||
.desc("export for migration (remove handle and metadata that will be re-created in new system)")
|
||||
.hasArg(false).required(false).build());
|
||||
|
||||
// as pointed out by Peter Dietz this provides similar functionality to export metadata
|
||||
// but it is needed since it directly exports to Simple Archive Format (SAF)
|
||||
options.addOption(Option.builder("x").longOpt("exclude-bitstreams")
|
||||
.desc("do not export bitstreams")
|
||||
.hasArg(false).required(false).build());
|
||||
|
||||
options.addOption(Option.builder("h").longOpt("help")
|
||||
.desc("help")
|
||||
.hasArg(false).required(false).build());
|
||||
|
||||
return options;
|
||||
}
|
||||
}
|
@@ -16,7 +16,6 @@ import java.io.FileWriter;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.io.PrintWriter;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.sql.SQLException;
|
||||
import java.text.SimpleDateFormat;
|
||||
import java.util.ArrayList;
|
||||
@@ -33,8 +32,8 @@ import java.util.zip.ZipEntry;
|
||||
import java.util.zip.ZipOutputStream;
|
||||
import javax.mail.MessagingException;
|
||||
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.apache.commons.lang.StringUtils;
|
||||
import org.apache.log4j.Logger;
|
||||
import org.dspace.app.itemexport.service.ItemExportService;
|
||||
import org.dspace.content.Bitstream;
|
||||
import org.dspace.content.Bundle;
|
||||
@@ -43,43 +42,38 @@ import org.dspace.content.Community;
|
||||
import org.dspace.content.DSpaceObject;
|
||||
import org.dspace.content.Item;
|
||||
import org.dspace.content.MetadataField;
|
||||
import org.dspace.content.MetadataSchemaEnum;
|
||||
import org.dspace.content.MetadataSchema;
|
||||
import org.dspace.content.MetadataValue;
|
||||
import org.dspace.content.service.BitstreamService;
|
||||
import org.dspace.content.service.CommunityService;
|
||||
import org.dspace.content.service.ItemService;
|
||||
import org.dspace.core.ConfigurationManager;
|
||||
import org.dspace.core.Constants;
|
||||
import org.dspace.core.Context;
|
||||
import org.dspace.core.Email;
|
||||
import org.dspace.core.I18nUtil;
|
||||
import org.dspace.core.LogHelper;
|
||||
import org.dspace.core.LogManager;
|
||||
import org.dspace.core.Utils;
|
||||
import org.dspace.eperson.EPerson;
|
||||
import org.dspace.eperson.service.EPersonService;
|
||||
import org.dspace.handle.service.HandleService;
|
||||
import org.dspace.scripts.handler.DSpaceRunnableHandler;
|
||||
import org.dspace.services.ConfigurationService;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
|
||||
/**
|
||||
* Item exporter to create simple AIPs for DSpace content. Currently exports
|
||||
* individual items, or entire collections. For instructions on use, see
|
||||
* printUsage() method.
|
||||
* <p>
|
||||
* <P>
|
||||
* ItemExport creates the simple AIP package that the importer also uses. It
|
||||
* consists of:
|
||||
* <pre>{@code
|
||||
* /exportdir/42/ (one directory per item)
|
||||
* / dublin_core.xml - qualified dublin core in RDF schema
|
||||
* / contents - text file, listing one file per line
|
||||
* / file1 - files contained in the item
|
||||
* / file2
|
||||
* / ...
|
||||
* }</pre>
|
||||
* <p>
|
||||
* <P>
|
||||
* /exportdir/42/ (one directory per item) / dublin_core.xml - qualified dublin
|
||||
* core in RDF schema / contents - text file, listing one file per line / file1
|
||||
* - files contained in the item / file2 / ...
|
||||
* <P>
|
||||
* issues -doesn't handle special characters in metadata (needs to turn {@code &'s} into
|
||||
* {@code &}, etc.)
|
||||
* <p>
|
||||
* <P>
|
||||
* Modified by David Little, UCSD Libraries 12/21/04 to allow the registration
|
||||
* of files (bitstreams) into DSpace.
|
||||
*
|
||||
@@ -99,15 +93,12 @@ public class ItemExportServiceImpl implements ItemExportService {
|
||||
protected ItemService itemService;
|
||||
@Autowired(required = true)
|
||||
protected HandleService handleService;
|
||||
@Autowired(required = true)
|
||||
protected ConfigurationService configurationService;
|
||||
|
||||
|
||||
/**
|
||||
* log4j logger
|
||||
*/
|
||||
private final Logger log = org.apache.logging.log4j.LogManager.getLogger();
|
||||
|
||||
private DSpaceRunnableHandler handler;
|
||||
private Logger log = Logger.getLogger(ItemExportServiceImpl.class);
|
||||
|
||||
protected ItemExportServiceImpl() {
|
||||
|
||||
@@ -132,11 +123,11 @@ public class ItemExportServiceImpl implements ItemExportService {
|
||||
}
|
||||
}
|
||||
|
||||
logInfo("Beginning export");
|
||||
System.out.println("Beginning export");
|
||||
|
||||
while (i.hasNext()) {
|
||||
if (SUBDIR_LIMIT > 0 && ++counter == SUBDIR_LIMIT) {
|
||||
subdir = Integer.toString(subDirSuffix++);
|
||||
subdir = Integer.valueOf(subDirSuffix++).toString();
|
||||
fullPath = destDirName + File.separatorChar + subdir;
|
||||
counter = 0;
|
||||
|
||||
@@ -145,7 +136,7 @@ public class ItemExportServiceImpl implements ItemExportService {
|
||||
}
|
||||
}
|
||||
|
||||
logInfo("Exporting item to " + mySequenceNumber);
|
||||
System.out.println("Exporting item to " + mySequenceNumber);
|
||||
Item item = i.next();
|
||||
exportItem(c, item, fullPath, mySequenceNumber, migrate, excludeBitstreams);
|
||||
c.uncacheEntity(item);
|
||||
@@ -161,7 +152,7 @@ public class ItemExportServiceImpl implements ItemExportService {
|
||||
// now create a subdirectory
|
||||
File itemDir = new File(destDir + "/" + seqStart);
|
||||
|
||||
logInfo("Exporting Item " + myItem.getID() +
|
||||
System.out.println("Exporting Item " + myItem.getID() +
|
||||
(myItem.getHandle() != null ? ", handle " + myItem.getHandle() : "") +
|
||||
" to " + itemDir);
|
||||
|
||||
@@ -174,7 +165,6 @@ public class ItemExportServiceImpl implements ItemExportService {
|
||||
// make it this far, now start exporting
|
||||
writeMetadata(c, myItem, itemDir, migrate);
|
||||
writeBitstreams(c, myItem, itemDir, excludeBitstreams);
|
||||
writeCollections(myItem, itemDir);
|
||||
if (!migrate) {
|
||||
writeHandle(c, myItem, itemDir);
|
||||
}
|
||||
@@ -199,7 +189,7 @@ public class ItemExportServiceImpl implements ItemExportService {
|
||||
*/
|
||||
protected void writeMetadata(Context c, Item i, File destDir, boolean migrate)
|
||||
throws Exception {
|
||||
Set<String> schemas = new HashSet<>();
|
||||
Set<String> schemas = new HashSet<String>();
|
||||
List<MetadataValue> dcValues = itemService.getMetadata(i, Item.ANY, Item.ANY, Item.ANY, Item.ANY);
|
||||
for (MetadataValue metadataValue : dcValues) {
|
||||
schemas.add(metadataValue.getMetadataField().getMetadataSchema().getName());
|
||||
@@ -224,7 +214,7 @@ public class ItemExportServiceImpl implements ItemExportService {
|
||||
protected void writeMetadata(Context c, String schema, Item i,
|
||||
File destDir, boolean migrate) throws Exception {
|
||||
String filename;
|
||||
if (schema.equals(MetadataSchemaEnum.DC.getName())) {
|
||||
if (schema.equals(MetadataSchema.DC_SCHEMA)) {
|
||||
filename = "dublin_core.xml";
|
||||
} else {
|
||||
filename = "metadata_" + schema + ".xml";
|
||||
@@ -232,7 +222,7 @@ public class ItemExportServiceImpl implements ItemExportService {
|
||||
|
||||
File outFile = new File(destDir, filename);
|
||||
|
||||
logInfo("Attempting to create file " + outFile);
|
||||
System.out.println("Attempting to create file " + outFile);
|
||||
|
||||
if (outFile.createNewFile()) {
|
||||
BufferedOutputStream out = new BufferedOutputStream(
|
||||
@@ -275,14 +265,15 @@ public class ItemExportServiceImpl implements ItemExportService {
|
||||
+ Utils.addEntities(dcv.getValue()) + "</dcvalue>\n")
|
||||
.getBytes("UTF-8");
|
||||
|
||||
if (!migrate ||
|
||||
if ((!migrate) ||
|
||||
(migrate && !(
|
||||
("date".equals(metadataField.getElement()) && "issued".equals(qualifier)) ||
|
||||
("date".equals(metadataField.getElement()) && "accessioned".equals(qualifier)) ||
|
||||
("date".equals(metadataField.getElement()) && "available".equals(qualifier)) ||
|
||||
("identifier".equals(metadataField.getElement()) && "uri".equals(qualifier) &&
|
||||
(dcv.getValue() != null && dcv.getValue().startsWith(
|
||||
handleService.getCanonicalPrefix() + handleService.getPrefix() + "/"))) ||
|
||||
(dcv.getValue() != null && dcv.getValue().startsWith("http://hdl.handle.net/" +
|
||||
handleService
|
||||
.getPrefix() + "/"))) ||
|
||||
("description".equals(metadataField.getElement()) && "provenance".equals(qualifier)) ||
|
||||
("format".equals(metadataField.getElement()) && "extent".equals(qualifier)) ||
|
||||
("format".equals(metadataField.getElement()) && "mimetype".equals(qualifier))))) {
|
||||
@@ -300,10 +291,10 @@ public class ItemExportServiceImpl implements ItemExportService {
|
||||
}
|
||||
|
||||
// When migrating, only keep date.issued if it is different to date.accessioned
|
||||
if (migrate &&
|
||||
if ((migrate) &&
|
||||
(dateIssued != null) &&
|
||||
(dateAccessioned != null) &&
|
||||
!dateIssued.equals(dateAccessioned)) {
|
||||
(!dateIssued.equals(dateAccessioned))) {
|
||||
utf8 = (" <dcvalue element=\"date\" "
|
||||
+ "qualifier=\"issued\">"
|
||||
+ Utils.addEntities(dateIssued) + "</dcvalue>\n")
|
||||
@@ -338,7 +329,7 @@ public class ItemExportServiceImpl implements ItemExportService {
|
||||
File outFile = new File(destDir, filename);
|
||||
|
||||
if (outFile.createNewFile()) {
|
||||
PrintWriter out = new PrintWriter(new FileWriter(outFile, StandardCharsets.UTF_8));
|
||||
PrintWriter out = new PrintWriter(new FileWriter(outFile));
|
||||
|
||||
out.println(i.getHandle());
|
||||
|
||||
@@ -350,33 +341,6 @@ public class ItemExportServiceImpl implements ItemExportService {
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Create the 'collections' file. List handles of all Collections which
|
||||
* contain this Item. The "owning" Collection is listed first.
|
||||
*
|
||||
* @param item list collections holding this Item.
|
||||
* @param destDir write the file here.
|
||||
* @throws IOException if the file cannot be created or written.
|
||||
*/
|
||||
protected void writeCollections(Item item, File destDir)
|
||||
throws IOException {
|
||||
File outFile = new File(destDir, "collections");
|
||||
if (outFile.createNewFile()) {
|
||||
try (PrintWriter out = new PrintWriter(new FileWriter(outFile))) {
|
||||
String ownerHandle = item.getOwningCollection().getHandle();
|
||||
out.println(ownerHandle);
|
||||
for (Collection collection : item.getCollections()) {
|
||||
String collectionHandle = collection.getHandle();
|
||||
if (!collectionHandle.equals(ownerHandle)) {
|
||||
out.println(collectionHandle);
|
||||
}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
throw new IOException("Cannot create 'collections' in " + destDir);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Create both the bitstreams and the contents file. Any bitstreams that
|
||||
* were originally registered will be marked in the contents file as such.
|
||||
@@ -395,7 +359,7 @@ public class ItemExportServiceImpl implements ItemExportService {
|
||||
File outFile = new File(destDir, "contents");
|
||||
|
||||
if (outFile.createNewFile()) {
|
||||
PrintWriter out = new PrintWriter(new FileWriter(outFile, StandardCharsets.UTF_8));
|
||||
PrintWriter out = new PrintWriter(new FileWriter(outFile));
|
||||
|
||||
List<Bundle> bundles = i.getBundles();
|
||||
|
||||
@@ -433,7 +397,7 @@ public class ItemExportServiceImpl implements ItemExportService {
|
||||
File fdirs = new File(destDir + File.separator
|
||||
+ dirs);
|
||||
if (!fdirs.exists() && !fdirs.mkdirs()) {
|
||||
logError("Unable to create destination directory");
|
||||
log.error("Unable to create destination directory");
|
||||
}
|
||||
}
|
||||
|
||||
@@ -490,12 +454,12 @@ public class ItemExportServiceImpl implements ItemExportService {
|
||||
|
||||
File wkDir = new File(workDir);
|
||||
if (!wkDir.exists() && !wkDir.mkdirs()) {
|
||||
logError("Unable to create working direcory");
|
||||
log.error("Unable to create working direcory");
|
||||
}
|
||||
|
||||
File dnDir = new File(destDirName);
|
||||
if (!dnDir.exists() && !dnDir.mkdirs()) {
|
||||
logError("Unable to create destination directory");
|
||||
log.error("Unable to create destination directory");
|
||||
}
|
||||
|
||||
// export the items using normal export method
|
||||
@@ -509,7 +473,7 @@ public class ItemExportServiceImpl implements ItemExportService {
|
||||
public void createDownloadableExport(DSpaceObject dso,
|
||||
Context context, boolean migrate) throws Exception {
|
||||
EPerson eperson = context.getCurrentUser();
|
||||
ArrayList<DSpaceObject> list = new ArrayList<>(1);
|
||||
ArrayList<DSpaceObject> list = new ArrayList<DSpaceObject>(1);
|
||||
list.add(dso);
|
||||
processDownloadableExport(list, context, eperson == null ? null
|
||||
: eperson.getEmail(), migrate);
|
||||
@@ -526,7 +490,7 @@ public class ItemExportServiceImpl implements ItemExportService {
|
||||
@Override
|
||||
public void createDownloadableExport(DSpaceObject dso,
|
||||
Context context, String additionalEmail, boolean migrate) throws Exception {
|
||||
ArrayList<DSpaceObject> list = new ArrayList<>(1);
|
||||
ArrayList<DSpaceObject> list = new ArrayList<DSpaceObject>(1);
|
||||
list.add(dso);
|
||||
processDownloadableExport(list, context, additionalEmail, migrate);
|
||||
}
|
||||
@@ -583,7 +547,7 @@ public class ItemExportServiceImpl implements ItemExportService {
|
||||
List<Bitstream> bitstreams = bundle.getBitstreams();
|
||||
for (Bitstream bitstream : bitstreams) {
|
||||
// add up the size
|
||||
size += bitstream.getSizeBytes();
|
||||
size += bitstream.getSize();
|
||||
}
|
||||
}
|
||||
items.add(item.getID());
|
||||
@@ -610,7 +574,7 @@ public class ItemExportServiceImpl implements ItemExportService {
|
||||
List<Bitstream> bitstreams = bundle.getBitstreams();
|
||||
for (Bitstream bitstream : bitstreams) {
|
||||
// add up the size
|
||||
size += bitstream.getSizeBytes();
|
||||
size += bitstream.getSize();
|
||||
}
|
||||
}
|
||||
items.add(item.getID());
|
||||
@@ -629,7 +593,7 @@ public class ItemExportServiceImpl implements ItemExportService {
|
||||
List<Bitstream> bitstreams = bundle.getBitstreams();
|
||||
for (Bitstream bitstream : bitstreams) {
|
||||
// add up the size
|
||||
size += bitstream.getSizeBytes();
|
||||
size += bitstream.getSize();
|
||||
}
|
||||
}
|
||||
ArrayList<UUID> items = new ArrayList<>();
|
||||
@@ -642,7 +606,7 @@ public class ItemExportServiceImpl implements ItemExportService {
|
||||
|
||||
// check the size of all the bitstreams against the configuration file
|
||||
// entry if it exists
|
||||
String megaBytes = configurationService
|
||||
String megaBytes = ConfigurationManager
|
||||
.getProperty("org.dspace.app.itemexport.max.size");
|
||||
if (megaBytes != null) {
|
||||
float maxSize = 0;
|
||||
@@ -664,9 +628,11 @@ public class ItemExportServiceImpl implements ItemExportService {
|
||||
Thread go = new Thread() {
|
||||
@Override
|
||||
public void run() {
|
||||
Context context = new Context();
|
||||
Context context = null;
|
||||
Iterator<Item> iitems = null;
|
||||
try {
|
||||
// create a new dspace context
|
||||
context = new Context();
|
||||
// ignore auths
|
||||
context.turnOffAuthorisationSystem();
|
||||
|
||||
@@ -678,14 +644,14 @@ public class ItemExportServiceImpl implements ItemExportService {
|
||||
String downloadDir = getExportDownloadDirectory(eperson);
|
||||
File dnDir = new File(downloadDir);
|
||||
if (!dnDir.exists() && !dnDir.mkdirs()) {
|
||||
logError("Unable to create download directory");
|
||||
log.error("Unable to create download directory");
|
||||
}
|
||||
|
||||
Iterator<String> iter = itemsMap.keySet().iterator();
|
||||
while (iter.hasNext()) {
|
||||
String keyName = iter.next();
|
||||
List<UUID> uuids = itemsMap.get(keyName);
|
||||
List<Item> items = new ArrayList<>();
|
||||
List<Item> items = new ArrayList<Item>();
|
||||
for (UUID uuid : uuids) {
|
||||
items.add(itemService.find(context, uuid));
|
||||
}
|
||||
@@ -697,7 +663,7 @@ public class ItemExportServiceImpl implements ItemExportService {
|
||||
|
||||
File wkDir = new File(workDir);
|
||||
if (!wkDir.exists() && !wkDir.mkdirs()) {
|
||||
logError("Unable to create working directory");
|
||||
log.error("Unable to create working directory");
|
||||
}
|
||||
|
||||
|
||||
@@ -765,7 +731,7 @@ public class ItemExportServiceImpl implements ItemExportService {
|
||||
@Override
|
||||
public String getExportDownloadDirectory(EPerson ePerson)
|
||||
throws Exception {
|
||||
String downloadDir = configurationService
|
||||
String downloadDir = ConfigurationManager
|
||||
.getProperty("org.dspace.app.itemexport.download.dir");
|
||||
if (downloadDir == null) {
|
||||
throw new Exception(
|
||||
@@ -782,14 +748,13 @@ public class ItemExportServiceImpl implements ItemExportService {
|
||||
|
||||
@Override
|
||||
public String getExportWorkDirectory() throws Exception {
|
||||
String exportDir = configurationService
|
||||
String exportDir = ConfigurationManager
|
||||
.getProperty("org.dspace.app.itemexport.work.dir");
|
||||
if (exportDir == null) {
|
||||
throw new Exception(
|
||||
"A dspace.cfg entry for 'org.dspace.app.itemexport.work.dir' does not exist.");
|
||||
}
|
||||
// clean work dir path from duplicate separators
|
||||
return StringUtils.replace(exportDir, File.separator + File.separator, File.separator);
|
||||
return exportDir;
|
||||
}
|
||||
|
||||
@Override
|
||||
@@ -889,7 +854,7 @@ public class ItemExportServiceImpl implements ItemExportService {
|
||||
return null;
|
||||
}
|
||||
|
||||
List<String> fileNames = new ArrayList<>();
|
||||
List<String> fileNames = new ArrayList<String>();
|
||||
|
||||
for (String fileName : downloadDir.list()) {
|
||||
if (fileName.contains("export") && fileName.endsWith(".zip")) {
|
||||
@@ -906,18 +871,18 @@ public class ItemExportServiceImpl implements ItemExportService {
|
||||
|
||||
@Override
|
||||
public void deleteOldExportArchives(EPerson eperson) throws Exception {
|
||||
int hours = configurationService
|
||||
int hours = ConfigurationManager
|
||||
.getIntProperty("org.dspace.app.itemexport.life.span.hours");
|
||||
Calendar now = Calendar.getInstance();
|
||||
now.setTime(new Date());
|
||||
now.add(Calendar.HOUR, -hours);
|
||||
now.add(Calendar.HOUR, (-hours));
|
||||
File downloadDir = new File(getExportDownloadDirectory(eperson));
|
||||
if (downloadDir.exists()) {
|
||||
File[] files = downloadDir.listFiles();
|
||||
for (File file : files) {
|
||||
if (file.lastModified() < now.getTimeInMillis()) {
|
||||
if (!file.delete()) {
|
||||
logError("Unable to delete export file");
|
||||
log.error("Unable to delete export file");
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -927,11 +892,11 @@ public class ItemExportServiceImpl implements ItemExportService {
|
||||
|
||||
@Override
|
||||
public void deleteOldExportArchives() throws Exception {
|
||||
int hours = configurationService.getIntProperty("org.dspace.app.itemexport.life.span.hours");
|
||||
int hours = ConfigurationManager.getIntProperty("org.dspace.app.itemexport.life.span.hours");
|
||||
Calendar now = Calendar.getInstance();
|
||||
now.setTime(new Date());
|
||||
now.add(Calendar.HOUR, -hours);
|
||||
File downloadDir = new File(configurationService.getProperty("org.dspace.app.itemexport.download.dir"));
|
||||
now.add(Calendar.HOUR, (-hours));
|
||||
File downloadDir = new File(ConfigurationManager.getProperty("org.dspace.app.itemexport.download.dir"));
|
||||
if (downloadDir.exists()) {
|
||||
// Get a list of all the sub-directories, potentially one for each ePerson.
|
||||
File[] dirs = downloadDir.listFiles();
|
||||
@@ -941,7 +906,7 @@ public class ItemExportServiceImpl implements ItemExportService {
|
||||
for (File file : files) {
|
||||
if (file.lastModified() < now.getTimeInMillis()) {
|
||||
if (!file.delete()) {
|
||||
logError("Unable to delete old files");
|
||||
log.error("Unable to delete old files");
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -949,7 +914,7 @@ public class ItemExportServiceImpl implements ItemExportService {
|
||||
// If the directory is now empty then we delete it too.
|
||||
if (dir.listFiles().length == 0) {
|
||||
if (!dir.delete()) {
|
||||
logError("Unable to delete directory");
|
||||
log.error("Unable to delete directory");
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -965,29 +930,29 @@ public class ItemExportServiceImpl implements ItemExportService {
|
||||
Locale supportedLocale = I18nUtil.getEPersonLocale(eperson);
|
||||
Email email = Email.getEmail(I18nUtil.getEmailFilename(supportedLocale, "export_success"));
|
||||
email.addRecipient(eperson.getEmail());
|
||||
email.addArgument(configurationService.getProperty("dspace.ui.url") + "/exportdownload/" + fileName);
|
||||
email.addArgument(configurationService.getProperty("org.dspace.app.itemexport.life.span.hours"));
|
||||
email.addArgument(ConfigurationManager.getProperty("dspace.url") + "/exportdownload/" + fileName);
|
||||
email.addArgument(ConfigurationManager.getProperty("org.dspace.app.itemexport.life.span.hours"));
|
||||
|
||||
email.send();
|
||||
} catch (Exception e) {
|
||||
logWarn(LogHelper.getHeader(context, "emailSuccessMessage", "cannot notify user of export"), e);
|
||||
log.warn(LogManager.getHeader(context, "emailSuccessMessage", "cannot notify user of export"), e);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void emailErrorMessage(EPerson eperson, String error)
|
||||
throws MessagingException {
|
||||
logWarn("An error occurred during item export, the user will be notified. " + error);
|
||||
log.warn("An error occurred during item export, the user will be notified. " + error);
|
||||
try {
|
||||
Locale supportedLocale = I18nUtil.getEPersonLocale(eperson);
|
||||
Email email = Email.getEmail(I18nUtil.getEmailFilename(supportedLocale, "export_error"));
|
||||
email.addRecipient(eperson.getEmail());
|
||||
email.addArgument(error);
|
||||
email.addArgument(configurationService.getProperty("dspace.ui.url") + "/feedback");
|
||||
email.addArgument(ConfigurationManager.getProperty("dspace.url") + "/feedback");
|
||||
|
||||
email.send();
|
||||
} catch (Exception e) {
|
||||
logWarn("error during item export error notification", e);
|
||||
log.warn("error during item export error notification", e);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1002,7 +967,7 @@ public class ItemExportServiceImpl implements ItemExportService {
|
||||
}
|
||||
File targetFile = new File(tempFileName);
|
||||
if (!targetFile.createNewFile()) {
|
||||
logWarn("Target file already exists: " + targetFile.getName());
|
||||
log.warn("Target file already exists: " + targetFile.getName());
|
||||
}
|
||||
|
||||
FileOutputStream fos = new FileOutputStream(tempFileName);
|
||||
@@ -1018,7 +983,7 @@ public class ItemExportServiceImpl implements ItemExportService {
|
||||
|
||||
deleteDirectory(cpFile);
|
||||
if (!targetFile.renameTo(new File(target))) {
|
||||
logError("Unable to rename file");
|
||||
log.error("Unable to rename file");
|
||||
}
|
||||
} finally {
|
||||
if (cpZipOutputStream != null) {
|
||||
@@ -1051,11 +1016,8 @@ public class ItemExportServiceImpl implements ItemExportService {
|
||||
return;
|
||||
}
|
||||
String strAbsPath = cpFile.getPath();
|
||||
int startIndex = strSource.length();
|
||||
if (!StringUtils.endsWith(strSource, File.separator)) {
|
||||
startIndex++;
|
||||
}
|
||||
String strZipEntryName = strAbsPath.substring(startIndex, strAbsPath.length());
|
||||
String strZipEntryName = strAbsPath.substring(strSource
|
||||
.length() + 1, strAbsPath.length());
|
||||
|
||||
// byte[] b = new byte[ (int)(cpFile.length()) ];
|
||||
|
||||
@@ -1094,7 +1056,7 @@ public class ItemExportServiceImpl implements ItemExportService {
|
||||
deleteDirectory(file);
|
||||
} else {
|
||||
if (!file.delete()) {
|
||||
logError("Unable to delete file: " + file.getName());
|
||||
log.error("Unable to delete file: " + file.getName());
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1103,64 +1065,4 @@ public class ItemExportServiceImpl implements ItemExportService {
|
||||
return (path.delete());
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setHandler(DSpaceRunnableHandler handler) {
|
||||
this.handler = handler;
|
||||
}
|
||||
|
||||
private void logInfo(String message) {
|
||||
logInfo(message, null);
|
||||
}
|
||||
|
||||
private void logInfo(String message, Exception e) {
|
||||
if (handler != null) {
|
||||
handler.logInfo(message);
|
||||
return;
|
||||
}
|
||||
|
||||
if (e != null) {
|
||||
log.info(message, e);
|
||||
} else {
|
||||
log.info(message);
|
||||
}
|
||||
}
|
||||
|
||||
private void logWarn(String message) {
|
||||
logWarn(message, null);
|
||||
}
|
||||
|
||||
private void logWarn(String message, Exception e) {
|
||||
if (handler != null) {
|
||||
handler.logWarning(message);
|
||||
return;
|
||||
}
|
||||
|
||||
if (e != null) {
|
||||
log.warn(message, e);
|
||||
} else {
|
||||
log.warn(message);
|
||||
}
|
||||
}
|
||||
|
||||
private void logError(String message) {
|
||||
logError(message, null);
|
||||
}
|
||||
|
||||
private void logError(String message, Exception e) {
|
||||
if (handler != null) {
|
||||
if (e != null) {
|
||||
handler.logError(message, e);
|
||||
} else {
|
||||
handler.logError(message);
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
if (e != null) {
|
||||
log.error(message, e);
|
||||
} else {
|
||||
log.error(message);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
@@ -17,7 +17,6 @@ import org.dspace.content.DSpaceObject;
|
||||
import org.dspace.content.Item;
|
||||
import org.dspace.core.Context;
|
||||
import org.dspace.eperson.EPerson;
|
||||
import org.dspace.scripts.handler.DSpaceRunnableHandler;
|
||||
|
||||
/**
|
||||
* Item exporter to create simple AIPs for DSpace content. Currently exports
|
||||
@@ -268,10 +267,4 @@ public interface ItemExportService {
|
||||
*/
|
||||
public void zip(String strSource, String target) throws Exception;
|
||||
|
||||
/**
|
||||
* Set the DSpace Runnable Handler
|
||||
* @param handler
|
||||
*/
|
||||
public void setHandler(DSpaceRunnableHandler handler);
|
||||
|
||||
}
|
||||
|
@@ -0,0 +1,106 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.app.itemimport;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import gr.ekt.bte.core.DataLoader;
|
||||
import gr.ekt.bte.core.TransformationEngine;
|
||||
import gr.ekt.bte.dataloader.FileDataLoader;
|
||||
|
||||
|
||||
/**
|
||||
* This class acts as a Service in the procedure to batch import using the Biblio-Transformation-Engine
|
||||
*/
|
||||
public class BTEBatchImportService {
|
||||
|
||||
TransformationEngine transformationEngine;
|
||||
Map<String, DataLoader> dataLoaders = new HashMap<String, DataLoader>();
|
||||
Map<String, String> outputMap = new HashMap<String, String>();
|
||||
|
||||
/**
|
||||
* Default constructor
|
||||
*/
|
||||
public BTEBatchImportService() {
|
||||
super();
|
||||
}
|
||||
|
||||
/**
|
||||
* Setter method for dataLoaders parameter
|
||||
*
|
||||
* @param dataLoaders map of data loaders
|
||||
*/
|
||||
public void setDataLoaders(Map<String, DataLoader> dataLoaders) {
|
||||
this.dataLoaders = dataLoaders;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get data loaders
|
||||
*
|
||||
* @return the map of DataLoaders
|
||||
*/
|
||||
public Map<String, DataLoader> getDataLoaders() {
|
||||
return dataLoaders;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get output map
|
||||
*
|
||||
* @return the outputMapping
|
||||
*/
|
||||
public Map<String, String> getOutputMap() {
|
||||
return outputMap;
|
||||
}
|
||||
|
||||
/**
|
||||
* Setter method for the outputMapping
|
||||
*
|
||||
* @param outputMap the output mapping
|
||||
*/
|
||||
public void setOutputMap(Map<String, String> outputMap) {
|
||||
this.outputMap = outputMap;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get transformation engine
|
||||
*
|
||||
* @return transformation engine
|
||||
*/
|
||||
public TransformationEngine getTransformationEngine() {
|
||||
return transformationEngine;
|
||||
}
|
||||
|
||||
/**
|
||||
* set transformation engine
|
||||
*
|
||||
* @param transformationEngine transformation engine
|
||||
*/
|
||||
public void setTransformationEngine(TransformationEngine transformationEngine) {
|
||||
this.transformationEngine = transformationEngine;
|
||||
}
|
||||
|
||||
/**
|
||||
* Getter of file data loaders
|
||||
*
|
||||
* @return List of file data loaders
|
||||
*/
|
||||
public List<String> getFileDataLoaders() {
|
||||
List<String> result = new ArrayList<String>();
|
||||
|
||||
for (String key : dataLoaders.keySet()) {
|
||||
DataLoader dl = dataLoaders.get(key);
|
||||
if (dl instanceof FileDataLoader) {
|
||||
result.add(key);
|
||||
}
|
||||
}
|
||||
return result;
|
||||
}
|
||||
}
|
@@ -1,374 +0,0 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.app.itemimport;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.FileInputStream;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.nio.file.Files;
|
||||
import java.sql.SQLException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Date;
|
||||
import java.util.List;
|
||||
import java.util.Optional;
|
||||
import java.util.UUID;
|
||||
|
||||
import org.apache.commons.cli.ParseException;
|
||||
import org.apache.commons.io.FileUtils;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.dspace.app.itemimport.factory.ItemImportServiceFactory;
|
||||
import org.dspace.app.itemimport.service.ItemImportService;
|
||||
import org.dspace.authorize.AuthorizeException;
|
||||
import org.dspace.content.Collection;
|
||||
import org.dspace.content.factory.ContentServiceFactory;
|
||||
import org.dspace.content.service.CollectionService;
|
||||
import org.dspace.core.Constants;
|
||||
import org.dspace.core.Context;
|
||||
import org.dspace.eperson.EPerson;
|
||||
import org.dspace.eperson.factory.EPersonServiceFactory;
|
||||
import org.dspace.eperson.service.EPersonService;
|
||||
import org.dspace.handle.factory.HandleServiceFactory;
|
||||
import org.dspace.handle.service.HandleService;
|
||||
import org.dspace.scripts.DSpaceRunnable;
|
||||
import org.dspace.utils.DSpace;
|
||||
|
||||
/**
|
||||
* Import items into DSpace. The conventional use is upload files by copying
|
||||
* them. DSpace writes the item's bitstreams into its assetstore. Metadata is
|
||||
* also loaded to the DSpace database.
|
||||
* <P>
|
||||
* A second use assumes the bitstream files already exist in a storage
|
||||
* resource accessible to DSpace. In this case the bitstreams are 'registered'.
|
||||
* That is, the metadata is loaded to the DSpace database and DSpace is given
|
||||
* the location of the file which is subsumed into DSpace.
|
||||
* <P>
|
||||
* The distinction is controlled by the format of lines in the 'contents' file.
|
||||
* See comments in processContentsFile() below.
|
||||
* <P>
|
||||
* Modified by David Little, UCSD Libraries 12/21/04 to
|
||||
* allow the registration of files (bitstreams) into DSpace.
|
||||
*/
|
||||
public class ItemImport extends DSpaceRunnable<ItemImportScriptConfiguration> {
|
||||
|
||||
public static String TEMP_DIR = "importSAF";
|
||||
public static String MAPFILE_FILENAME = "mapfile";
|
||||
public static String MAPFILE_BITSTREAM_TYPE = "importSAFMapfile";
|
||||
|
||||
protected boolean template = false;
|
||||
protected String command = null;
|
||||
protected String sourcedir = null;
|
||||
protected String mapfile = null;
|
||||
protected String eperson = null;
|
||||
protected String[] collections = null;
|
||||
protected boolean isTest = false;
|
||||
protected boolean isResume = false;
|
||||
protected boolean useWorkflow = false;
|
||||
protected boolean useWorkflowSendEmail = false;
|
||||
protected boolean isQuiet = false;
|
||||
protected boolean commandLineCollections = false;
|
||||
protected boolean zip = false;
|
||||
protected String zipfilename = null;
|
||||
protected boolean help = false;
|
||||
protected File workDir = null;
|
||||
private File workFile = null;
|
||||
|
||||
protected static final CollectionService collectionService =
|
||||
ContentServiceFactory.getInstance().getCollectionService();
|
||||
protected static final EPersonService epersonService =
|
||||
EPersonServiceFactory.getInstance().getEPersonService();
|
||||
protected static final HandleService handleService =
|
||||
HandleServiceFactory.getInstance().getHandleService();
|
||||
|
||||
@Override
|
||||
public ItemImportScriptConfiguration getScriptConfiguration() {
|
||||
return new DSpace().getServiceManager()
|
||||
.getServiceByName("import", ItemImportScriptConfiguration.class);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setup() throws ParseException {
|
||||
help = commandLine.hasOption('h');
|
||||
|
||||
if (commandLine.hasOption('a')) {
|
||||
command = "add";
|
||||
}
|
||||
|
||||
if (commandLine.hasOption('r')) {
|
||||
command = "replace";
|
||||
}
|
||||
|
||||
if (commandLine.hasOption('d')) {
|
||||
command = "delete";
|
||||
}
|
||||
|
||||
if (commandLine.hasOption('w')) {
|
||||
useWorkflow = true;
|
||||
if (commandLine.hasOption('n')) {
|
||||
useWorkflowSendEmail = true;
|
||||
}
|
||||
}
|
||||
|
||||
if (commandLine.hasOption('v')) {
|
||||
isTest = true;
|
||||
handler.logInfo("**Test Run** - not actually importing items.");
|
||||
}
|
||||
|
||||
if (commandLine.hasOption('p')) {
|
||||
template = true;
|
||||
}
|
||||
|
||||
if (commandLine.hasOption('c')) { // collections
|
||||
collections = commandLine.getOptionValues('c');
|
||||
commandLineCollections = true;
|
||||
} else {
|
||||
handler.logInfo("No collections given. Assuming 'collections' file inside item directory");
|
||||
}
|
||||
|
||||
if (commandLine.hasOption('R')) {
|
||||
isResume = true;
|
||||
handler.logInfo("**Resume import** - attempting to import items not already imported");
|
||||
}
|
||||
|
||||
if (commandLine.hasOption('q')) {
|
||||
isQuiet = true;
|
||||
}
|
||||
|
||||
setZip();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void internalRun() throws Exception {
|
||||
if (help) {
|
||||
printHelp();
|
||||
return;
|
||||
}
|
||||
|
||||
Date startTime = new Date();
|
||||
Context context = new Context(Context.Mode.BATCH_EDIT);
|
||||
|
||||
setMapFile();
|
||||
|
||||
validate(context);
|
||||
|
||||
setEPerson(context);
|
||||
|
||||
// check collection
|
||||
List<Collection> mycollections = null;
|
||||
// don't need to validate collections set if command is "delete"
|
||||
// also if no collections are given in the command line
|
||||
if (!"delete".equals(command) && commandLineCollections) {
|
||||
handler.logInfo("Destination collections:");
|
||||
|
||||
mycollections = new ArrayList<>();
|
||||
|
||||
// validate each collection arg to see if it's a real collection
|
||||
for (int i = 0; i < collections.length; i++) {
|
||||
Collection collection = null;
|
||||
if (collections[i] != null) {
|
||||
// is the ID a handle?
|
||||
if (collections[i].indexOf('/') != -1) {
|
||||
// string has a / so it must be a handle - try and resolve
|
||||
// it
|
||||
collection = ((Collection) handleService
|
||||
.resolveToObject(context, collections[i]));
|
||||
} else {
|
||||
// not a handle, try and treat it as an integer collection database ID
|
||||
collection = collectionService.find(context, UUID.fromString(collections[i]));
|
||||
}
|
||||
}
|
||||
|
||||
// was the collection valid?
|
||||
if (collection == null
|
||||
|| collection.getType() != Constants.COLLECTION) {
|
||||
throw new IllegalArgumentException("Cannot resolve "
|
||||
+ collections[i] + " to collection");
|
||||
}
|
||||
|
||||
// add resolved collection to list
|
||||
mycollections.add(collection);
|
||||
|
||||
// print progress info
|
||||
handler.logInfo((i == 0 ? "Owning " : "") + "Collection: " + collection.getName());
|
||||
}
|
||||
}
|
||||
// end validation
|
||||
|
||||
// start
|
||||
ItemImportService itemImportService = ItemImportServiceFactory.getInstance()
|
||||
.getItemImportService();
|
||||
try {
|
||||
itemImportService.setTest(isTest);
|
||||
itemImportService.setResume(isResume);
|
||||
itemImportService.setUseWorkflow(useWorkflow);
|
||||
itemImportService.setUseWorkflowSendEmail(useWorkflowSendEmail);
|
||||
itemImportService.setQuiet(isQuiet);
|
||||
itemImportService.setHandler(handler);
|
||||
|
||||
try {
|
||||
context.turnOffAuthorisationSystem();
|
||||
|
||||
readZip(context, itemImportService);
|
||||
|
||||
process(context, itemImportService, mycollections);
|
||||
|
||||
// complete all transactions
|
||||
context.complete();
|
||||
} catch (Exception e) {
|
||||
context.abort();
|
||||
throw new Exception(
|
||||
"Error committing changes to database: " + e.getMessage() + ", aborting most recent changes", e);
|
||||
}
|
||||
|
||||
if (isTest) {
|
||||
handler.logInfo("***End of Test Run***");
|
||||
}
|
||||
} finally {
|
||||
// clean work dir
|
||||
if (zip) {
|
||||
FileUtils.deleteDirectory(new File(sourcedir));
|
||||
FileUtils.deleteDirectory(workDir);
|
||||
}
|
||||
|
||||
Date endTime = new Date();
|
||||
handler.logInfo("Started: " + startTime.getTime());
|
||||
handler.logInfo("Ended: " + endTime.getTime());
|
||||
handler.logInfo(
|
||||
"Elapsed time: " + ((endTime.getTime() - startTime.getTime()) / 1000) + " secs (" + (endTime
|
||||
.getTime() - startTime.getTime()) + " msecs)");
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Validate the options
|
||||
* @param context
|
||||
*/
|
||||
protected void validate(Context context) {
|
||||
if (command == null) {
|
||||
handler.logError("Must run with either add, replace, or remove (run with -h flag for details)");
|
||||
throw new UnsupportedOperationException("Must run with either add, replace, or remove");
|
||||
}
|
||||
|
||||
// can only resume for adds
|
||||
if (isResume && !"add".equals(command)) {
|
||||
handler.logError("Resume option only works with the --add command (run with -h flag for details)");
|
||||
throw new UnsupportedOperationException("Resume option only works with the --add command");
|
||||
}
|
||||
|
||||
if (isResume && StringUtils.isBlank(mapfile)) {
|
||||
handler.logError("The mapfile does not exist. ");
|
||||
throw new UnsupportedOperationException("The mapfile does not exist");
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Process the import
|
||||
* @param context
|
||||
* @param itemImportService
|
||||
* @param collections
|
||||
* @throws Exception
|
||||
*/
|
||||
protected void process(Context context, ItemImportService itemImportService,
|
||||
List<Collection> collections) throws Exception {
|
||||
readMapfile(context);
|
||||
|
||||
if ("add".equals(command)) {
|
||||
itemImportService.addItems(context, collections, sourcedir, mapfile, template);
|
||||
} else if ("replace".equals(command)) {
|
||||
itemImportService.replaceItems(context, collections, sourcedir, mapfile, template);
|
||||
} else if ("delete".equals(command)) {
|
||||
itemImportService.deleteItems(context, mapfile);
|
||||
}
|
||||
|
||||
// write input stream on handler
|
||||
File mapFile = new File(mapfile);
|
||||
try (InputStream mapfileInputStream = new FileInputStream(mapFile)) {
|
||||
handler.writeFilestream(context, MAPFILE_FILENAME, mapfileInputStream, MAPFILE_BITSTREAM_TYPE);
|
||||
} finally {
|
||||
mapFile.delete();
|
||||
workFile.delete();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Read the ZIP archive in SAF format
|
||||
* @param context
|
||||
* @param itemImportService
|
||||
* @throws Exception
|
||||
*/
|
||||
protected void readZip(Context context, ItemImportService itemImportService) throws Exception {
|
||||
Optional<InputStream> optionalFileStream = handler.getFileStream(context, zipfilename);
|
||||
if (optionalFileStream.isPresent()) {
|
||||
workFile = new File(itemImportService.getTempWorkDir() + File.separator
|
||||
+ zipfilename + "-" + context.getCurrentUser().getID());
|
||||
FileUtils.copyInputStreamToFile(optionalFileStream.get(), workFile);
|
||||
workDir = new File(itemImportService.getTempWorkDir() + File.separator + TEMP_DIR);
|
||||
sourcedir = itemImportService.unzip(workFile, workDir.getAbsolutePath());
|
||||
} else {
|
||||
throw new IllegalArgumentException(
|
||||
"Error reading file, the file couldn't be found for filename: " + zipfilename);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Read the mapfile
|
||||
* @param context
|
||||
*/
|
||||
protected void readMapfile(Context context) {
|
||||
if (isResume) {
|
||||
try {
|
||||
Optional<InputStream> optionalFileStream = handler.getFileStream(context, mapfile);
|
||||
if (optionalFileStream.isPresent()) {
|
||||
File tempFile = File.createTempFile(mapfile, "temp");
|
||||
tempFile.deleteOnExit();
|
||||
FileUtils.copyInputStreamToFile(optionalFileStream.get(), tempFile);
|
||||
mapfile = tempFile.getAbsolutePath();
|
||||
}
|
||||
} catch (IOException | AuthorizeException e) {
|
||||
throw new UnsupportedOperationException("The mapfile does not exist");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the mapfile option
|
||||
* @throws IOException
|
||||
*/
|
||||
protected void setMapFile() throws IOException {
|
||||
if (isResume && commandLine.hasOption('m')) {
|
||||
mapfile = commandLine.getOptionValue('m');
|
||||
} else {
|
||||
mapfile = Files.createTempFile(MAPFILE_FILENAME, "temp").toString();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the zip option
|
||||
*/
|
||||
protected void setZip() {
|
||||
zip = true;
|
||||
zipfilename = commandLine.getOptionValue('z');
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the eperson in the context
|
||||
* @param context
|
||||
* @throws SQLException
|
||||
*/
|
||||
protected void setEPerson(Context context) throws SQLException {
|
||||
EPerson myEPerson = epersonService.find(context, this.getEpersonIdentifier());
|
||||
|
||||
// check eperson
|
||||
if (myEPerson == null) {
|
||||
handler.logError("EPerson cannot be found: " + this.getEpersonIdentifier());
|
||||
throw new UnsupportedOperationException("EPerson cannot be found: " + this.getEpersonIdentifier());
|
||||
}
|
||||
|
||||
context.setCurrentUser(myEPerson);
|
||||
}
|
||||
}
|
@@ -1,143 +0,0 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.app.itemimport;
|
||||
|
||||
import java.io.File;
|
||||
import java.sql.SQLException;
|
||||
import java.util.List;
|
||||
import java.util.UUID;
|
||||
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.dspace.app.itemimport.service.ItemImportService;
|
||||
import org.dspace.content.Collection;
|
||||
import org.dspace.core.Context;
|
||||
import org.dspace.eperson.EPerson;
|
||||
|
||||
/**
|
||||
* CLI variant for the {@link ItemImport} class.
|
||||
* This was done to specify the specific behaviors for the CLI.
|
||||
*
|
||||
* @author Francesco Pio Scognamiglio (francescopio.scognamiglio at 4science.com)
|
||||
*/
|
||||
public class ItemImportCLI extends ItemImport {
|
||||
|
||||
@Override
|
||||
protected void validate(Context context) {
|
||||
// can only resume for adds
|
||||
if (isResume && !"add".equals(command)) {
|
||||
handler.logError("Resume option only works with the --add command (run with -h flag for details)");
|
||||
throw new UnsupportedOperationException("Resume option only works with the --add command");
|
||||
}
|
||||
|
||||
if (commandLine.hasOption('e')) {
|
||||
eperson = commandLine.getOptionValue('e');
|
||||
}
|
||||
|
||||
// check eperson identifier (email or id)
|
||||
if (eperson == null) {
|
||||
handler.logError("An eperson to do the importing must be specified (run with -h flag for details)");
|
||||
throw new UnsupportedOperationException("An eperson to do the importing must be specified");
|
||||
}
|
||||
|
||||
File myFile = null;
|
||||
try {
|
||||
myFile = new File(mapfile);
|
||||
} catch (Exception e) {
|
||||
throw new UnsupportedOperationException("The mapfile " + mapfile + " does not exist");
|
||||
}
|
||||
|
||||
if (!isResume && "add".equals(command) && myFile.exists()) {
|
||||
handler.logError("The mapfile " + mapfile + " already exists. "
|
||||
+ "Either delete it or use --resume if attempting to resume an aborted import. "
|
||||
+ "(run with -h flag for details)");
|
||||
throw new UnsupportedOperationException("The mapfile " + mapfile + " already exists");
|
||||
}
|
||||
|
||||
if (command == null) {
|
||||
handler.logError("Must run with either add, replace, or remove (run with -h flag for details)");
|
||||
throw new UnsupportedOperationException("Must run with either add, replace, or remove");
|
||||
} else if ("add".equals(command) || "replace".equals(command)) {
|
||||
if (sourcedir == null) {
|
||||
handler.logError("A source directory containing items must be set (run with -h flag for details)");
|
||||
throw new UnsupportedOperationException("A source directory containing items must be set");
|
||||
}
|
||||
|
||||
if (mapfile == null) {
|
||||
handler.logError(
|
||||
"A map file to hold importing results must be specified (run with -h flag for details)");
|
||||
throw new UnsupportedOperationException("A map file to hold importing results must be specified");
|
||||
}
|
||||
} else if ("delete".equals(command)) {
|
||||
if (mapfile == null) {
|
||||
handler.logError("A map file must be specified (run with -h flag for details)");
|
||||
throw new UnsupportedOperationException("A map file must be specified");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void process(Context context, ItemImportService itemImportService,
|
||||
List<Collection> collections) throws Exception {
|
||||
if ("add".equals(command)) {
|
||||
itemImportService.addItems(context, collections, sourcedir, mapfile, template);
|
||||
} else if ("replace".equals(command)) {
|
||||
itemImportService.replaceItems(context, collections, sourcedir, mapfile, template);
|
||||
} else if ("delete".equals(command)) {
|
||||
itemImportService.deleteItems(context, mapfile);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void readZip(Context context, ItemImportService itemImportService) throws Exception {
|
||||
// If this is a zip archive, unzip it first
|
||||
if (zip) {
|
||||
workDir = new File(itemImportService.getTempWorkDir() + File.separator + TEMP_DIR
|
||||
+ File.separator + context.getCurrentUser().getID());
|
||||
sourcedir = itemImportService.unzip(
|
||||
new File(sourcedir + File.separator + zipfilename), workDir.getAbsolutePath());
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void setMapFile() {
|
||||
if (commandLine.hasOption('m')) {
|
||||
mapfile = commandLine.getOptionValue('m');
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void setZip() {
|
||||
if (commandLine.hasOption('s')) { // source
|
||||
sourcedir = commandLine.getOptionValue('s');
|
||||
}
|
||||
|
||||
if (commandLine.hasOption('z')) {
|
||||
zip = true;
|
||||
zipfilename = commandLine.getOptionValue('z');
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void setEPerson(Context context) throws SQLException {
|
||||
EPerson myEPerson = null;
|
||||
if (StringUtils.contains(eperson, '@')) {
|
||||
// @ sign, must be an email
|
||||
myEPerson = epersonService.findByEmail(context, eperson);
|
||||
} else {
|
||||
myEPerson = epersonService.find(context, UUID.fromString(eperson));
|
||||
}
|
||||
|
||||
// check eperson
|
||||
if (myEPerson == null) {
|
||||
handler.logError("EPerson cannot be found: " + eperson + " (run with -h flag for details)");
|
||||
throw new UnsupportedOperationException("EPerson cannot be found: " + eperson);
|
||||
}
|
||||
|
||||
context.setCurrentUser(myEPerson);
|
||||
}
|
||||
}
|
@@ -1,74 +0,0 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.app.itemimport;
|
||||
|
||||
import org.apache.commons.cli.Option;
|
||||
import org.apache.commons.cli.Options;
|
||||
import org.dspace.scripts.configuration.ScriptConfiguration;
|
||||
|
||||
/**
|
||||
* The {@link ScriptConfiguration} for the {@link ItemImportCLI} script
|
||||
*
|
||||
* @author Francesco Pio Scognamiglio (francescopio.scognamiglio at 4science.com)
|
||||
*/
|
||||
public class ItemImportCLIScriptConfiguration extends ItemImportScriptConfiguration<ItemImportCLI> {
|
||||
|
||||
@Override
|
||||
public Options getOptions() {
|
||||
Options options = new Options();
|
||||
|
||||
options.addOption(Option.builder("a").longOpt("add")
|
||||
.desc("add items to DSpace")
|
||||
.hasArg(false).required(false).build());
|
||||
options.addOption(Option.builder("r").longOpt("replace")
|
||||
.desc("replace items in mapfile")
|
||||
.hasArg(false).required(false).build());
|
||||
options.addOption(Option.builder("d").longOpt("delete")
|
||||
.desc("delete items listed in mapfile")
|
||||
.hasArg(false).required(false).build());
|
||||
options.addOption(Option.builder("s").longOpt("source")
|
||||
.desc("source of items (directory)")
|
||||
.hasArg().required(false).build());
|
||||
options.addOption(Option.builder("z").longOpt("zip")
|
||||
.desc("name of zip file")
|
||||
.hasArg().required(false).build());
|
||||
options.addOption(Option.builder("c").longOpt("collection")
|
||||
.desc("destination collection(s) Handle or database ID")
|
||||
.hasArg().required(false).build());
|
||||
options.addOption(Option.builder("m").longOpt("mapfile")
|
||||
.desc("mapfile items in mapfile")
|
||||
.hasArg().required().build());
|
||||
options.addOption(Option.builder("e").longOpt("eperson")
|
||||
.desc("email of eperson doing importing")
|
||||
.hasArg().required().build());
|
||||
options.addOption(Option.builder("w").longOpt("workflow")
|
||||
.desc("send submission through collection's workflow")
|
||||
.hasArg(false).required(false).build());
|
||||
options.addOption(Option.builder("n").longOpt("notify")
|
||||
.desc("if sending submissions through the workflow, send notification emails")
|
||||
.hasArg(false).required(false).build());
|
||||
options.addOption(Option.builder("v").longOpt("validate")
|
||||
.desc("test run - do not actually import items")
|
||||
.hasArg(false).required(false).build());
|
||||
options.addOption(Option.builder("p").longOpt("template")
|
||||
.desc("apply template")
|
||||
.hasArg(false).required(false).build());
|
||||
options.addOption(Option.builder("R").longOpt("resume")
|
||||
.desc("resume a failed import (add only)")
|
||||
.hasArg(false).required(false).build());
|
||||
options.addOption(Option.builder("q").longOpt("quiet")
|
||||
.desc("don't display metadata")
|
||||
.hasArg(false).required(false).build());
|
||||
|
||||
options.addOption(Option.builder("h").longOpt("help")
|
||||
.desc("help")
|
||||
.hasArg(false).required(false).build());
|
||||
|
||||
return options;
|
||||
}
|
||||
}
|
@@ -0,0 +1,431 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.app.itemimport;
|
||||
|
||||
import java.io.File;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Date;
|
||||
import java.util.List;
|
||||
import java.util.UUID;
|
||||
|
||||
import org.apache.commons.cli.CommandLine;
|
||||
import org.apache.commons.cli.CommandLineParser;
|
||||
import org.apache.commons.cli.HelpFormatter;
|
||||
import org.apache.commons.cli.Options;
|
||||
import org.apache.commons.cli.PosixParser;
|
||||
import org.dspace.app.itemimport.factory.ItemImportServiceFactory;
|
||||
import org.dspace.app.itemimport.service.ItemImportService;
|
||||
import org.dspace.content.Collection;
|
||||
import org.dspace.content.factory.ContentServiceFactory;
|
||||
import org.dspace.content.service.CollectionService;
|
||||
import org.dspace.core.Constants;
|
||||
import org.dspace.core.Context;
|
||||
import org.dspace.eperson.EPerson;
|
||||
import org.dspace.eperson.factory.EPersonServiceFactory;
|
||||
import org.dspace.eperson.service.EPersonService;
|
||||
import org.dspace.handle.factory.HandleServiceFactory;
|
||||
import org.dspace.handle.service.HandleService;
|
||||
|
||||
/**
|
||||
* Import items into DSpace. The conventional use is upload files by copying
|
||||
* them. DSpace writes the item's bitstreams into its assetstore. Metadata is
|
||||
* also loaded to the DSpace database.
|
||||
* <P>
|
||||
* A second use assumes the bitstream files already exist in a storage
|
||||
* resource accessible to DSpace. In this case the bitstreams are 'registered'.
|
||||
* That is, the metadata is loaded to the DSpace database and DSpace is given
|
||||
* the location of the file which is subsumed into DSpace.
|
||||
* <P>
|
||||
* The distinction is controlled by the format of lines in the 'contents' file.
|
||||
* See comments in processContentsFile() below.
|
||||
* <P>
|
||||
* Modified by David Little, UCSD Libraries 12/21/04 to
|
||||
* allow the registration of files (bitstreams) into DSpace.
|
||||
*/
|
||||
public class ItemImportCLITool {
|
||||
|
||||
private static boolean template = false;
|
||||
|
||||
private static final CollectionService collectionService = ContentServiceFactory.getInstance()
|
||||
.getCollectionService();
|
||||
private static final EPersonService epersonService = EPersonServiceFactory.getInstance().getEPersonService();
|
||||
private static final HandleService handleService = HandleServiceFactory.getInstance().getHandleService();
|
||||
|
||||
/**
|
||||
* Default constructor
|
||||
*/
|
||||
private ItemImportCLITool() { }
|
||||
|
||||
public static void main(String[] argv) throws Exception {
|
||||
Date startTime = new Date();
|
||||
int status = 0;
|
||||
|
||||
try {
|
||||
// create an options object and populate it
|
||||
CommandLineParser parser = new PosixParser();
|
||||
|
||||
Options options = new Options();
|
||||
|
||||
options.addOption("a", "add", false, "add items to DSpace");
|
||||
options.addOption("b", "add-bte", false, "add items to DSpace via Biblio-Transformation-Engine (BTE)");
|
||||
options.addOption("r", "replace", false, "replace items in mapfile");
|
||||
options.addOption("d", "delete", false,
|
||||
"delete items listed in mapfile");
|
||||
options.addOption("i", "inputtype", true, "input type in case of BTE import");
|
||||
options.addOption("s", "source", true, "source of items (directory)");
|
||||
options.addOption("z", "zip", true, "name of zip file");
|
||||
options.addOption("c", "collection", true,
|
||||
"destination collection(s) Handle or database ID");
|
||||
options.addOption("m", "mapfile", true, "mapfile items in mapfile");
|
||||
options.addOption("e", "eperson", true,
|
||||
"email of eperson doing importing");
|
||||
options.addOption("w", "workflow", false,
|
||||
"send submission through collection's workflow");
|
||||
options.addOption("n", "notify", false,
|
||||
"if sending submissions through the workflow, send notification emails");
|
||||
options.addOption("t", "test", false,
|
||||
"test run - do not actually import items");
|
||||
options.addOption("p", "template", false, "apply template");
|
||||
options.addOption("R", "resume", false,
|
||||
"resume a failed import (add only)");
|
||||
options.addOption("q", "quiet", false, "don't display metadata");
|
||||
|
||||
options.addOption("h", "help", false, "help");
|
||||
|
||||
CommandLine line = parser.parse(options, argv);
|
||||
|
||||
String command = null; // add replace remove, etc
|
||||
String bteInputType = null; //ris, endnote, tsv, csv, bibtex
|
||||
String sourcedir = null;
|
||||
String mapfile = null;
|
||||
String eperson = null; // db ID or email
|
||||
String[] collections = null; // db ID or handles
|
||||
boolean isTest = false;
|
||||
boolean isResume = false;
|
||||
boolean useWorkflow = false;
|
||||
boolean useWorkflowSendEmail = false;
|
||||
boolean isQuiet = false;
|
||||
|
||||
if (line.hasOption('h')) {
|
||||
HelpFormatter myhelp = new HelpFormatter();
|
||||
myhelp.printHelp("ItemImport\n", options);
|
||||
System.out
|
||||
.println("\nadding items: ItemImport -a -e eperson -c collection -s sourcedir -m mapfile");
|
||||
System.out
|
||||
.println(
|
||||
"\nadding items from zip file: ItemImport -a -e eperson -c collection -s sourcedir -z " +
|
||||
"filename.zip -m mapfile");
|
||||
System.out
|
||||
.println("replacing items: ItemImport -r -e eperson -c collection -s sourcedir -m mapfile");
|
||||
System.out
|
||||
.println("deleting items: ItemImport -d -e eperson -m mapfile");
|
||||
System.out
|
||||
.println(
|
||||
"If multiple collections are specified, the first collection will be the one that owns the " +
|
||||
"item.");
|
||||
|
||||
System.exit(0);
|
||||
}
|
||||
|
||||
if (line.hasOption('a')) {
|
||||
command = "add";
|
||||
}
|
||||
|
||||
if (line.hasOption('r')) {
|
||||
command = "replace";
|
||||
}
|
||||
|
||||
if (line.hasOption('d')) {
|
||||
command = "delete";
|
||||
}
|
||||
|
||||
if (line.hasOption('b')) {
|
||||
command = "add-bte";
|
||||
}
|
||||
|
||||
if (line.hasOption('i')) {
|
||||
bteInputType = line.getOptionValue('i');
|
||||
}
|
||||
|
||||
if (line.hasOption('w')) {
|
||||
useWorkflow = true;
|
||||
if (line.hasOption('n')) {
|
||||
useWorkflowSendEmail = true;
|
||||
}
|
||||
}
|
||||
|
||||
if (line.hasOption('t')) {
|
||||
isTest = true;
|
||||
System.out.println("**Test Run** - not actually importing items.");
|
||||
}
|
||||
|
||||
if (line.hasOption('p')) {
|
||||
template = true;
|
||||
}
|
||||
|
||||
if (line.hasOption('s')) { // source
|
||||
sourcedir = line.getOptionValue('s');
|
||||
}
|
||||
|
||||
if (line.hasOption('m')) { // mapfile
|
||||
mapfile = line.getOptionValue('m');
|
||||
}
|
||||
|
||||
if (line.hasOption('e')) { // eperson
|
||||
eperson = line.getOptionValue('e');
|
||||
}
|
||||
|
||||
if (line.hasOption('c')) { // collections
|
||||
collections = line.getOptionValues('c');
|
||||
}
|
||||
|
||||
if (line.hasOption('R')) {
|
||||
isResume = true;
|
||||
System.out
|
||||
.println("**Resume import** - attempting to import items not already imported");
|
||||
}
|
||||
|
||||
if (line.hasOption('q')) {
|
||||
isQuiet = true;
|
||||
}
|
||||
|
||||
boolean zip = false;
|
||||
String zipfilename = "";
|
||||
if (line.hasOption('z')) {
|
||||
zip = true;
|
||||
zipfilename = line.getOptionValue('z');
|
||||
}
|
||||
|
||||
//By default assume collections will be given on the command line
|
||||
boolean commandLineCollections = true;
|
||||
// now validate
|
||||
// must have a command set
|
||||
if (command == null) {
|
||||
System.out
|
||||
.println("Error - must run with either add, replace, or remove (run with -h flag for details)");
|
||||
System.exit(1);
|
||||
} else if ("add".equals(command) || "replace".equals(command)) {
|
||||
if (sourcedir == null) {
|
||||
System.out
|
||||
.println("Error - a source directory containing items must be set");
|
||||
System.out.println(" (run with -h flag for details)");
|
||||
System.exit(1);
|
||||
}
|
||||
|
||||
if (mapfile == null) {
|
||||
System.out
|
||||
.println("Error - a map file to hold importing results must be specified");
|
||||
System.out.println(" (run with -h flag for details)");
|
||||
System.exit(1);
|
||||
}
|
||||
|
||||
if (eperson == null) {
|
||||
System.out
|
||||
.println("Error - an eperson to do the importing must be specified");
|
||||
System.out.println(" (run with -h flag for details)");
|
||||
System.exit(1);
|
||||
}
|
||||
|
||||
if (collections == null) {
|
||||
System.out.println("No collections given. Assuming 'collections' file inside item directory");
|
||||
commandLineCollections = false;
|
||||
}
|
||||
} else if ("add-bte".equals(command)) {
|
||||
//Source dir can be null, the user can specify the parameters for his loader in the Spring XML
|
||||
// configuration file
|
||||
|
||||
if (mapfile == null) {
|
||||
System.out
|
||||
.println("Error - a map file to hold importing results must be specified");
|
||||
System.out.println(" (run with -h flag for details)");
|
||||
System.exit(1);
|
||||
}
|
||||
|
||||
if (eperson == null) {
|
||||
System.out
|
||||
.println("Error - an eperson to do the importing must be specified");
|
||||
System.out.println(" (run with -h flag for details)");
|
||||
System.exit(1);
|
||||
}
|
||||
|
||||
if (collections == null) {
|
||||
System.out.println("No collections given. Assuming 'collections' file inside item directory");
|
||||
commandLineCollections = false;
|
||||
}
|
||||
|
||||
if (bteInputType == null) {
|
||||
System.out
|
||||
.println(
|
||||
"Error - an input type (tsv, csv, ris, endnote, bibtex or any other type you have " +
|
||||
"specified in BTE Spring XML configuration file) must be specified");
|
||||
System.out.println(" (run with -h flag for details)");
|
||||
System.exit(1);
|
||||
}
|
||||
} else if ("delete".equals(command)) {
|
||||
if (eperson == null) {
|
||||
System.out
|
||||
.println("Error - an eperson to do the importing must be specified");
|
||||
System.exit(1);
|
||||
}
|
||||
|
||||
if (mapfile == null) {
|
||||
System.out.println("Error - a map file must be specified");
|
||||
System.exit(1);
|
||||
}
|
||||
}
|
||||
|
||||
// can only resume for adds
|
||||
if (isResume && !"add".equals(command) && !"add-bte".equals(command)) {
|
||||
System.out
|
||||
.println("Error - resume option only works with the --add or the --add-bte commands");
|
||||
System.exit(1);
|
||||
}
|
||||
|
||||
// do checks around mapfile - if mapfile exists and 'add' is selected,
|
||||
// resume must be chosen
|
||||
File myFile = new File(mapfile);
|
||||
|
||||
if (!isResume && "add".equals(command) && myFile.exists()) {
|
||||
System.out.println("Error - the mapfile " + mapfile
|
||||
+ " already exists.");
|
||||
System.out
|
||||
.println("Either delete it or use --resume if attempting to resume an aborted import.");
|
||||
System.exit(1);
|
||||
}
|
||||
|
||||
ItemImportService myloader = ItemImportServiceFactory.getInstance().getItemImportService();
|
||||
myloader.setTest(isTest);
|
||||
myloader.setResume(isResume);
|
||||
myloader.setUseWorkflow(useWorkflow);
|
||||
myloader.setUseWorkflowSendEmail(useWorkflowSendEmail);
|
||||
myloader.setQuiet(isQuiet);
|
||||
|
||||
// create a context
|
||||
Context c = new Context(Context.Mode.BATCH_EDIT);
|
||||
|
||||
// find the EPerson, assign to context
|
||||
EPerson myEPerson = null;
|
||||
|
||||
if (eperson.indexOf('@') != -1) {
|
||||
// @ sign, must be an email
|
||||
myEPerson = epersonService.findByEmail(c, eperson);
|
||||
} else {
|
||||
myEPerson = epersonService.find(c, UUID.fromString(eperson));
|
||||
}
|
||||
|
||||
if (myEPerson == null) {
|
||||
System.out.println("Error, eperson cannot be found: " + eperson);
|
||||
System.exit(1);
|
||||
}
|
||||
|
||||
c.setCurrentUser(myEPerson);
|
||||
|
||||
// find collections
|
||||
List<Collection> mycollections = null;
|
||||
|
||||
// don't need to validate collections set if command is "delete"
|
||||
// also if no collections are given in the command line
|
||||
if (!"delete".equals(command) && commandLineCollections) {
|
||||
System.out.println("Destination collections:");
|
||||
|
||||
mycollections = new ArrayList<>();
|
||||
|
||||
// validate each collection arg to see if it's a real collection
|
||||
for (int i = 0; i < collections.length; i++) {
|
||||
// is the ID a handle?
|
||||
if (collections[i].indexOf('/') != -1) {
|
||||
// string has a / so it must be a handle - try and resolve
|
||||
// it
|
||||
mycollections.add((Collection) handleService
|
||||
.resolveToObject(c, collections[i]));
|
||||
|
||||
// resolved, now make sure it's a collection
|
||||
if ((mycollections.get(i) == null)
|
||||
|| (mycollections.get(i).getType() != Constants.COLLECTION)) {
|
||||
mycollections.set(i, null);
|
||||
}
|
||||
} else if (collections[i] != null) {
|
||||
// not a handle, try and treat it as an integer collection database ID
|
||||
mycollections.set(i, collectionService.find(c, UUID.fromString(collections[i])));
|
||||
}
|
||||
|
||||
// was the collection valid?
|
||||
if (mycollections.get(i) == null) {
|
||||
throw new IllegalArgumentException("Cannot resolve "
|
||||
+ collections[i] + " to collection");
|
||||
}
|
||||
|
||||
// print progress info
|
||||
String owningPrefix = "";
|
||||
|
||||
if (i == 0) {
|
||||
owningPrefix = "Owning ";
|
||||
}
|
||||
|
||||
System.out.println(owningPrefix + " Collection: "
|
||||
+ mycollections.get(i).getName());
|
||||
}
|
||||
} // end of validating collections
|
||||
|
||||
try {
|
||||
// If this is a zip archive, unzip it first
|
||||
if (zip) {
|
||||
sourcedir = myloader.unzip(sourcedir, zipfilename);
|
||||
}
|
||||
|
||||
|
||||
c.turnOffAuthorisationSystem();
|
||||
|
||||
if ("add".equals(command)) {
|
||||
myloader.addItems(c, mycollections, sourcedir, mapfile, template);
|
||||
} else if ("replace".equals(command)) {
|
||||
myloader.replaceItems(c, mycollections, sourcedir, mapfile, template);
|
||||
} else if ("delete".equals(command)) {
|
||||
myloader.deleteItems(c, mapfile);
|
||||
} else if ("add-bte".equals(command)) {
|
||||
myloader.addBTEItems(c, mycollections, sourcedir, mapfile, template, bteInputType, null);
|
||||
}
|
||||
|
||||
// complete all transactions
|
||||
c.complete();
|
||||
} catch (Exception e) {
|
||||
c.abort();
|
||||
e.printStackTrace();
|
||||
System.out.println(e);
|
||||
status = 1;
|
||||
}
|
||||
|
||||
// Delete the unzipped file
|
||||
try {
|
||||
if (zip) {
|
||||
System.gc();
|
||||
System.out.println(
|
||||
"Deleting temporary zip directory: " + myloader.getTempWorkDirFile().getAbsolutePath());
|
||||
myloader.cleanupZipTemp();
|
||||
}
|
||||
} catch (Exception ex) {
|
||||
System.out.println("Unable to delete temporary zip archive location: " + myloader.getTempWorkDirFile()
|
||||
.getAbsolutePath());
|
||||
}
|
||||
|
||||
|
||||
if (isTest) {
|
||||
System.out.println("***End of Test Run***");
|
||||
}
|
||||
} finally {
|
||||
Date endTime = new Date();
|
||||
System.out.println("Started: " + startTime.getTime());
|
||||
System.out.println("Ended: " + endTime.getTime());
|
||||
System.out.println(
|
||||
"Elapsed time: " + ((endTime.getTime() - startTime.getTime()) / 1000) + " secs (" + (endTime
|
||||
.getTime() - startTime.getTime()) + " msecs)");
|
||||
}
|
||||
|
||||
System.exit(status);
|
||||
}
|
||||
}
|
@@ -1,100 +0,0 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.app.itemimport;
|
||||
|
||||
import java.io.InputStream;
|
||||
import java.sql.SQLException;
|
||||
|
||||
import org.apache.commons.cli.Option;
|
||||
import org.apache.commons.cli.Options;
|
||||
import org.dspace.authorize.service.AuthorizeService;
|
||||
import org.dspace.core.Context;
|
||||
import org.dspace.scripts.configuration.ScriptConfiguration;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
|
||||
/**
|
||||
* The {@link ScriptConfiguration} for the {@link ItemImport} script
|
||||
*
|
||||
* @author Francesco Pio Scognamiglio (francescopio.scognamiglio at 4science.com)
|
||||
*/
|
||||
public class ItemImportScriptConfiguration<T extends ItemImport> extends ScriptConfiguration<T> {
|
||||
|
||||
@Autowired
|
||||
private AuthorizeService authorizeService;
|
||||
|
||||
private Class<T> dspaceRunnableClass;
|
||||
|
||||
@Override
|
||||
public Class<T> getDspaceRunnableClass() {
|
||||
return dspaceRunnableClass;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setDspaceRunnableClass(Class<T> dspaceRunnableClass) {
|
||||
this.dspaceRunnableClass = dspaceRunnableClass;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isAllowedToExecute(final Context context) {
|
||||
try {
|
||||
return authorizeService.isAdmin(context);
|
||||
} catch (SQLException e) {
|
||||
throw new RuntimeException("SQLException occurred when checking if the current user is an admin", e);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public Options getOptions() {
|
||||
Options options = new Options();
|
||||
|
||||
options.addOption(Option.builder("a").longOpt("add")
|
||||
.desc("add items to DSpace")
|
||||
.hasArg(false).required(false).build());
|
||||
options.addOption(Option.builder("r").longOpt("replace")
|
||||
.desc("replace items in mapfile")
|
||||
.hasArg(false).required(false).build());
|
||||
options.addOption(Option.builder("d").longOpt("delete")
|
||||
.desc("delete items listed in mapfile")
|
||||
.hasArg(false).required(false).build());
|
||||
options.addOption(Option.builder("z").longOpt("zip")
|
||||
.desc("name of zip file")
|
||||
.type(InputStream.class)
|
||||
.hasArg().required().build());
|
||||
options.addOption(Option.builder("c").longOpt("collection")
|
||||
.desc("destination collection(s) Handle or database ID")
|
||||
.hasArg().required(false).build());
|
||||
options.addOption(Option.builder("m").longOpt("mapfile")
|
||||
.desc("mapfile items in mapfile")
|
||||
.type(InputStream.class)
|
||||
.hasArg().required(false).build());
|
||||
options.addOption(Option.builder("w").longOpt("workflow")
|
||||
.desc("send submission through collection's workflow")
|
||||
.hasArg(false).required(false).build());
|
||||
options.addOption(Option.builder("n").longOpt("notify")
|
||||
.desc("if sending submissions through the workflow, send notification emails")
|
||||
.hasArg(false).required(false).build());
|
||||
options.addOption(Option.builder("v").longOpt("validate")
|
||||
.desc("test run - do not actually import items")
|
||||
.hasArg(false).required(false).build());
|
||||
options.addOption(Option.builder("p").longOpt("template")
|
||||
.desc("apply template")
|
||||
.hasArg(false).required(false).build());
|
||||
options.addOption(Option.builder("R").longOpt("resume")
|
||||
.desc("resume a failed import (add only)")
|
||||
.hasArg(false).required(false).build());
|
||||
options.addOption(Option.builder("q").longOpt("quiet")
|
||||
.desc("don't display metadata")
|
||||
.hasArg(false).required(false).build());
|
||||
|
||||
options.addOption(Option.builder("h").longOpt("help")
|
||||
.desc("help")
|
||||
.hasArg(false).required(false).build());
|
||||
|
||||
return options;
|
||||
}
|
||||
}
|
File diff suppressed because it is too large
Load Diff
@@ -16,7 +16,6 @@ import org.dspace.app.itemimport.BatchUpload;
|
||||
import org.dspace.content.Collection;
|
||||
import org.dspace.core.Context;
|
||||
import org.dspace.eperson.EPerson;
|
||||
import org.dspace.scripts.handler.DSpaceRunnableHandler;
|
||||
|
||||
/**
|
||||
* Import items into DSpace. The conventional use is upload files by copying
|
||||
@@ -106,7 +105,7 @@ public interface ItemImportService {
|
||||
String inputType, Context context, boolean template) throws Exception;
|
||||
|
||||
/**
|
||||
* If a batch import is done in a new thread we are unable to communicate
|
||||
* Since the BTE batch import is done in a new thread we are unable to communicate
|
||||
* with calling method about success or failure. We accomplish this
|
||||
* communication with email instead. Send a success email once the batch
|
||||
* import is complete
|
||||
@@ -120,7 +119,7 @@ public interface ItemImportService {
|
||||
String fileName) throws MessagingException;
|
||||
|
||||
/**
|
||||
* If a batch import is done in a new thread we are unable to communicate
|
||||
* Since the BTE batch import is done in a new thread we are unable to communicate
|
||||
* with calling method about success or failure. We accomplis this
|
||||
* communication with email instead. Send an error email if the batch
|
||||
* import fails
|
||||
@@ -184,6 +183,21 @@ public interface ItemImportService {
|
||||
*/
|
||||
public void deleteItems(Context c, String mapfile) throws Exception;
|
||||
|
||||
/**
|
||||
* Add items
|
||||
*
|
||||
* @param c DSpace Context
|
||||
* @param mycollections List of Collections
|
||||
* @param sourcedir source directory
|
||||
* @param mapfile map file
|
||||
* @param template whether to use template item
|
||||
* @param bteInputType The input type of the data (bibtex, csv, etc.), in case of local file
|
||||
* @param workingDir working directory
|
||||
* @throws Exception if error
|
||||
*/
|
||||
public void addBTEItems(Context c, List<Collection> mycollections, String sourcedir, String mapfile,
|
||||
boolean template, String bteInputType, String workingDir) throws Exception;
|
||||
|
||||
/**
|
||||
* Get temporary work directory
|
||||
*
|
||||
@@ -236,10 +250,4 @@ public interface ItemImportService {
|
||||
* @param isQuiet true or false
|
||||
*/
|
||||
public void setQuiet(boolean isQuiet);
|
||||
|
||||
/**
|
||||
* Set the DSpace Runnable Handler
|
||||
* @param handler
|
||||
*/
|
||||
public void setHandler(DSpaceRunnableHandler handler);
|
||||
}
|
||||
|
@@ -11,8 +11,6 @@ import java.io.UnsupportedEncodingException;
|
||||
import java.sql.SQLException;
|
||||
import java.util.List;
|
||||
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.dspace.app.util.Util;
|
||||
import org.dspace.content.Bitstream;
|
||||
import org.dspace.content.Bundle;
|
||||
@@ -36,9 +34,8 @@ public class ItemMarkingAvailabilityBitstreamStrategy implements ItemMarkingExtr
|
||||
@Autowired(required = true)
|
||||
protected ItemService itemService;
|
||||
|
||||
private static final Logger LOG = LogManager.getLogger();
|
||||
|
||||
public ItemMarkingAvailabilityBitstreamStrategy() {
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
@@ -46,14 +43,14 @@ public class ItemMarkingAvailabilityBitstreamStrategy implements ItemMarkingExtr
|
||||
throws SQLException {
|
||||
|
||||
List<Bundle> bundles = itemService.getBundles(item, "ORIGINAL");
|
||||
if (bundles.isEmpty()) {
|
||||
if (bundles.size() == 0) {
|
||||
ItemMarkingInfo markInfo = new ItemMarkingInfo();
|
||||
markInfo.setImageName(nonAvailableImageName);
|
||||
|
||||
return markInfo;
|
||||
} else {
|
||||
Bundle originalBundle = bundles.iterator().next();
|
||||
if (originalBundle.getBitstreams().isEmpty()) {
|
||||
if (originalBundle.getBitstreams().size() == 0) {
|
||||
ItemMarkingInfo markInfo = new ItemMarkingInfo();
|
||||
markInfo.setImageName(nonAvailableImageName);
|
||||
|
||||
@@ -75,7 +72,8 @@ public class ItemMarkingAvailabilityBitstreamStrategy implements ItemMarkingExtr
|
||||
try {
|
||||
bsLink = bsLink + Util.encodeBitstreamName(bitstream.getName(), Constants.DEFAULT_ENCODING);
|
||||
} catch (UnsupportedEncodingException e) {
|
||||
LOG.warn("DSpace uses an unsupported encoding", e);
|
||||
|
||||
e.printStackTrace();
|
||||
}
|
||||
|
||||
signInfo.setLink(bsLink);
|
||||
|
@@ -7,7 +7,6 @@
|
||||
*/
|
||||
package org.dspace.app.itemupdate;
|
||||
|
||||
import java.lang.reflect.InvocationTargetException;
|
||||
import java.util.Iterator;
|
||||
import java.util.LinkedHashMap;
|
||||
import java.util.Map;
|
||||
@@ -21,25 +20,22 @@ import java.util.Map;
|
||||
public class ActionManager implements Iterable<UpdateAction> {
|
||||
|
||||
protected Map<Class<? extends UpdateAction>, UpdateAction> registry
|
||||
= new LinkedHashMap<>();
|
||||
= new LinkedHashMap<Class<? extends UpdateAction>, UpdateAction>();
|
||||
|
||||
/**
|
||||
* Get update action.
|
||||
* Get update action
|
||||
*
|
||||
* @param actionClass UpdateAction class
|
||||
* @return instantiation of UpdateAction class
|
||||
* @throws InstantiationException if instantiation error
|
||||
* @throws IllegalAccessException if illegal access error
|
||||
* @throws NoSuchMethodException passed through.
|
||||
* @throws InvocationTargetException passed through.
|
||||
*/
|
||||
public UpdateAction getUpdateAction(Class<? extends UpdateAction> actionClass)
|
||||
throws InstantiationException, IllegalAccessException,
|
||||
NoSuchMethodException, IllegalArgumentException, InvocationTargetException {
|
||||
throws InstantiationException, IllegalAccessException {
|
||||
UpdateAction action = registry.get(actionClass);
|
||||
|
||||
if (action == null) {
|
||||
action = actionClass.getDeclaredConstructor().newInstance();
|
||||
action = actionClass.newInstance();
|
||||
registry.put(actionClass, action);
|
||||
}
|
||||
|
||||
@@ -62,8 +58,7 @@ public class ActionManager implements Iterable<UpdateAction> {
|
||||
@Override
|
||||
public Iterator<UpdateAction> iterator() {
|
||||
return new Iterator<UpdateAction>() {
|
||||
private final Iterator<Class<? extends UpdateAction>> itr
|
||||
= registry.keySet().iterator();
|
||||
private Iterator<Class<? extends UpdateAction>> itr = registry.keySet().iterator();
|
||||
|
||||
@Override
|
||||
public boolean hasNext() {
|
||||
|
@@ -77,7 +77,7 @@ public class AddBitstreamsAction extends UpdateBitstreamsAction {
|
||||
ItemUpdate.pr("Contents bitstream count: " + contents.size());
|
||||
|
||||
String[] files = dir.list(ItemUpdate.fileFilter);
|
||||
List<String> fileList = new ArrayList<>();
|
||||
List<String> fileList = new ArrayList<String>();
|
||||
for (String filename : files) {
|
||||
fileList.add(filename);
|
||||
ItemUpdate.pr("file: " + filename);
|
||||
@@ -134,6 +134,9 @@ public class AddBitstreamsAction extends UpdateBitstreamsAction {
|
||||
ItemUpdate.pr("contents entry for bitstream: " + ce.toString());
|
||||
File f = new File(dir, ce.filename);
|
||||
|
||||
// get an input stream
|
||||
BufferedInputStream bis = new BufferedInputStream(new FileInputStream(f));
|
||||
|
||||
Bitstream bs = null;
|
||||
String newBundleName = ce.bundlename;
|
||||
|
||||
@@ -170,9 +173,7 @@ public class AddBitstreamsAction extends UpdateBitstreamsAction {
|
||||
targetBundle = bundles.iterator().next();
|
||||
}
|
||||
|
||||
try (BufferedInputStream bis = new BufferedInputStream(new FileInputStream(f));) {
|
||||
bs = bitstreamService.create(context, targetBundle, bis);
|
||||
}
|
||||
bs = bitstreamService.create(context, targetBundle, bis);
|
||||
bs.setName(context, ce.filename);
|
||||
|
||||
// Identify the format
|
||||
|
@@ -105,7 +105,6 @@ public class ContentsEntry {
|
||||
return new ContentsEntry(arp[0], arp[1], actionId, groupName, arp[3]);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
StringBuilder sb = new StringBuilder(filename);
|
||||
if (bundlename != null) {
|
||||
|
@@ -120,7 +120,6 @@ class DtoMetadata {
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
String s = "\tSchema: " + schema + " Element: " + element;
|
||||
if (qualifier != null) {
|
||||
|
@@ -17,7 +17,6 @@ import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.io.OutputStream;
|
||||
import java.io.PrintWriter;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.sql.SQLException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Iterator;
|
||||
@@ -31,7 +30,7 @@ import javax.xml.transform.TransformerConfigurationException;
|
||||
import javax.xml.transform.TransformerException;
|
||||
import javax.xml.transform.TransformerFactory;
|
||||
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.apache.log4j.Logger;
|
||||
import org.dspace.app.util.LocalSchemaFilenameFilter;
|
||||
import org.dspace.authorize.AuthorizeException;
|
||||
import org.dspace.content.DSpaceObject;
|
||||
@@ -48,7 +47,7 @@ import org.w3c.dom.Document;
|
||||
* Encapsulates the Item in the context of the DSpace Archive Format
|
||||
*/
|
||||
public class ItemArchive {
|
||||
private static final Logger log = org.apache.logging.log4j.LogManager.getLogger(ItemArchive.class);
|
||||
private static final Logger log = Logger.getLogger(ItemArchive.class);
|
||||
|
||||
public static final String DUBLIN_CORE_XML = "dublin_core.xml";
|
||||
|
||||
@@ -56,7 +55,7 @@ public class ItemArchive {
|
||||
protected Transformer transformer = null;
|
||||
|
||||
protected List<DtoMetadata> dtomList = null;
|
||||
protected List<DtoMetadata> undoDtomList = new ArrayList<>();
|
||||
protected List<DtoMetadata> undoDtomList = new ArrayList<DtoMetadata>();
|
||||
|
||||
protected List<UUID> undoAddContents = new ArrayList<>(); // for undo of add
|
||||
|
||||
@@ -326,7 +325,7 @@ public class ItemArchive {
|
||||
PrintWriter pw = null;
|
||||
try {
|
||||
File f = new File(dir, ItemUpdate.DELETE_CONTENTS_FILE);
|
||||
pw = new PrintWriter(new BufferedWriter(new FileWriter(f, StandardCharsets.UTF_8)));
|
||||
pw = new PrintWriter(new BufferedWriter(new FileWriter(f)));
|
||||
for (UUID i : undoAddContents) {
|
||||
pw.println(i);
|
||||
}
|
||||
|
@@ -24,49 +24,43 @@ import java.util.UUID;
|
||||
|
||||
import org.apache.commons.cli.CommandLine;
|
||||
import org.apache.commons.cli.CommandLineParser;
|
||||
import org.apache.commons.cli.DefaultParser;
|
||||
import org.apache.commons.cli.HelpFormatter;
|
||||
import org.apache.commons.cli.Option;
|
||||
import org.apache.commons.cli.Options;
|
||||
import org.apache.commons.cli.PosixParser;
|
||||
import org.dspace.content.Item;
|
||||
import org.dspace.content.factory.ContentServiceFactory;
|
||||
import org.dspace.content.service.ItemService;
|
||||
import org.dspace.core.ConfigurationManager;
|
||||
import org.dspace.core.Context;
|
||||
import org.dspace.eperson.EPerson;
|
||||
import org.dspace.eperson.factory.EPersonServiceFactory;
|
||||
import org.dspace.eperson.service.EPersonService;
|
||||
import org.dspace.handle.factory.HandleServiceFactory;
|
||||
import org.dspace.handle.service.HandleService;
|
||||
|
||||
/**
|
||||
* Provides some batch editing capabilities for items in DSpace.
|
||||
* <ul>
|
||||
* <li>Metadata fields - Add, Delete</li>
|
||||
* <li>Bitstreams - Add, Delete</li>
|
||||
* </ul>
|
||||
* Provides some batch editing capabilities for items in DSpace:
|
||||
* Metadata fields - Add, Delete
|
||||
* Bitstreams - Add, Delete
|
||||
*
|
||||
* <p>
|
||||
* The design has been for compatibility with
|
||||
* {@link org.dspace.app.itemimport.service.ItemImportService}
|
||||
* The design has been for compatibility with ItemImporter
|
||||
* in the use of the DSpace archive format which is used to
|
||||
* specify changes on a per item basis. The directory names
|
||||
* to correspond to each item are arbitrary and will only be
|
||||
* used for logging purposes. The reference to the item is
|
||||
* from a required {@code dc.identifier} with the item handle to be
|
||||
* included in the {@code dublin_core.xml} (or similar metadata) file.
|
||||
* from a required dc.identifier with the item handle to be
|
||||
* included in the dublin_core.xml (or similar metadata) file.
|
||||
*
|
||||
* <p>
|
||||
* Any combination of these actions is permitted in a single run of this class.
|
||||
* Any combination of these actions is permitted in a single run of this class
|
||||
* The order of actions is important when used in combination.
|
||||
* It is the responsibility of the calling class (here, {@code ItemUpdate})
|
||||
* to register {@link UpdateAction} classes in the order which they are
|
||||
* It is the responsibility of the calling class (here, ItemUpdate)
|
||||
* to register UpdateAction classes in the order to which they are
|
||||
* to be performed.
|
||||
*
|
||||
* <p>
|
||||
* It is unfortunate that so much code needs to be borrowed from
|
||||
* {@link org.dspace.app.itemimport.service.ItemImportService} as it is not
|
||||
* reusable in private methods, etc. Some of this has been placed into the
|
||||
* {@link MetadataUtilities} class for possible reuse elsewhere.
|
||||
*
|
||||
* It is unfortunate that so much code needs to be borrowed
|
||||
* from ItemImport as it is not reusable in private methods, etc.
|
||||
* Some of this has been placed into the MetadataUtilities class
|
||||
* for possible reuse elsewhere.
|
||||
*
|
||||
* @author W. Hays based on a conceptual design by R. Rodgers
|
||||
*/
|
||||
@@ -78,13 +72,12 @@ public class ItemUpdate {
|
||||
public static final String DELETE_CONTENTS_FILE = "delete_contents";
|
||||
|
||||
public static String HANDLE_PREFIX = null;
|
||||
public static final Map<String, String> filterAliases = new HashMap<>();
|
||||
public static final Map<String, String> filterAliases = new HashMap<String, String>();
|
||||
|
||||
public static boolean verbose = false;
|
||||
|
||||
protected static final EPersonService epersonService = EPersonServiceFactory.getInstance().getEPersonService();
|
||||
protected static final ItemService itemService = ContentServiceFactory.getInstance().getItemService();
|
||||
protected static final HandleService handleService = HandleServiceFactory.getInstance().getHandleService();
|
||||
|
||||
static {
|
||||
filterAliases.put("ORIGINAL", "org.dspace.app.itemupdate.OriginalBitstreamFilter");
|
||||
@@ -114,7 +107,7 @@ public class ItemUpdate {
|
||||
|
||||
// instance variables
|
||||
protected ActionManager actionMgr = new ActionManager();
|
||||
protected List<String> undoActionList = new ArrayList<>();
|
||||
protected List<String> undoActionList = new ArrayList<String>();
|
||||
protected String eperson;
|
||||
|
||||
/**
|
||||
@@ -122,7 +115,7 @@ public class ItemUpdate {
|
||||
*/
|
||||
public static void main(String[] argv) {
|
||||
// create an options object and populate it
|
||||
CommandLineParser parser = new DefaultParser();
|
||||
CommandLineParser parser = new PosixParser();
|
||||
|
||||
Options options = new Options();
|
||||
|
||||
@@ -280,8 +273,7 @@ public class ItemUpdate {
|
||||
Class<?> cfilter = Class.forName(filterClassname);
|
||||
pr("BitstreamFilter class to instantiate: " + cfilter.toString());
|
||||
|
||||
filter = (BitstreamFilter) cfilter.getDeclaredConstructor()
|
||||
.newInstance(); //unfortunate cast, an erasure consequence
|
||||
filter = (BitstreamFilter) cfilter.newInstance(); //unfortunate cast, an erasure consequence
|
||||
} catch (Exception e) {
|
||||
pr("Error: Failure instantiating bitstream filter class: " + filterClassname);
|
||||
System.exit(1);
|
||||
@@ -338,7 +330,10 @@ public class ItemUpdate {
|
||||
iu.setEPerson(context, iu.eperson);
|
||||
context.turnOffAuthorisationSystem();
|
||||
|
||||
HANDLE_PREFIX = handleService.getCanonicalPrefix();
|
||||
HANDLE_PREFIX = ConfigurationManager.getProperty("handle.canonical.prefix");
|
||||
if (HANDLE_PREFIX == null || HANDLE_PREFIX.length() == 0) {
|
||||
HANDLE_PREFIX = "http://hdl.handle.net/";
|
||||
}
|
||||
|
||||
iu.processArchive(context, sourcedir, itemField, metadataIndexName, alterProvenance, isTest);
|
||||
|
||||
@@ -380,7 +375,7 @@ public class ItemUpdate {
|
||||
// open and process the source directory
|
||||
File sourceDir = new File(sourceDirPath);
|
||||
|
||||
if (!sourceDir.exists() || !sourceDir.isDirectory()) {
|
||||
if ((sourceDir == null) || !sourceDir.exists() || !sourceDir.isDirectory()) {
|
||||
pr("Error, cannot open archive source directory " + sourceDirPath);
|
||||
throw new Exception("error with archive source directory " + sourceDirPath);
|
||||
}
|
||||
|
@@ -27,21 +27,18 @@ import javax.xml.transform.TransformerConfigurationException;
|
||||
import javax.xml.transform.TransformerException;
|
||||
import javax.xml.transform.dom.DOMSource;
|
||||
import javax.xml.transform.stream.StreamResult;
|
||||
import javax.xml.xpath.XPath;
|
||||
import javax.xml.xpath.XPathConstants;
|
||||
import javax.xml.xpath.XPathExpressionException;
|
||||
import javax.xml.xpath.XPathFactory;
|
||||
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.apache.commons.lang.StringUtils;
|
||||
import org.apache.xpath.XPathAPI;
|
||||
import org.dspace.authorize.AuthorizeException;
|
||||
import org.dspace.content.Item;
|
||||
import org.dspace.content.MetadataField;
|
||||
import org.dspace.content.MetadataSchema;
|
||||
import org.dspace.content.MetadataSchemaEnum;
|
||||
import org.dspace.content.MetadataValue;
|
||||
import org.dspace.content.factory.ContentServiceFactory;
|
||||
import org.dspace.content.service.ItemService;
|
||||
import org.dspace.core.ConfigurationManager;
|
||||
import org.dspace.core.Context;
|
||||
import org.dspace.services.factory.DSpaceServicesFactory;
|
||||
import org.w3c.dom.Document;
|
||||
import org.w3c.dom.Element;
|
||||
import org.w3c.dom.NamedNodeMap;
|
||||
@@ -172,30 +169,33 @@ public class MetadataUtilities {
|
||||
* @param docBuilder DocumentBuilder
|
||||
* @param is - InputStream of dublin_core.xml
|
||||
* @return list of DtoMetadata representing the metadata fields relating to an Item
|
||||
* @throws SQLException if database error
|
||||
* @throws IOException if IO error
|
||||
* @throws ParserConfigurationException if parser config error
|
||||
* @throws SAXException if XML error
|
||||
* @throws TransformerException if transformer error
|
||||
* @throws AuthorizeException if authorization error
|
||||
*/
|
||||
public static List<DtoMetadata> loadDublinCore(DocumentBuilder docBuilder, InputStream is)
|
||||
throws IOException, XPathExpressionException, SAXException {
|
||||
throws SQLException, IOException, ParserConfigurationException,
|
||||
SAXException, TransformerException, AuthorizeException {
|
||||
Document document = docBuilder.parse(is);
|
||||
|
||||
List<DtoMetadata> dtomList = new ArrayList<DtoMetadata>();
|
||||
|
||||
// Get the schema, for backward compatibility we will default to the
|
||||
// dublin core schema if the schema name is not available in the import file
|
||||
String schema;
|
||||
XPath xPath = XPathFactory.newInstance().newXPath();
|
||||
NodeList metadata = (NodeList) xPath.compile("/dublin_core").evaluate(document, XPathConstants.NODESET);
|
||||
String schema = null;
|
||||
NodeList metadata = XPathAPI.selectNodeList(document, "/dublin_core");
|
||||
Node schemaAttr = metadata.item(0).getAttributes().getNamedItem("schema");
|
||||
if (schemaAttr == null) {
|
||||
schema = MetadataSchemaEnum.DC.getName();
|
||||
schema = MetadataSchema.DC_SCHEMA;
|
||||
} else {
|
||||
schema = schemaAttr.getNodeValue();
|
||||
}
|
||||
|
||||
// Get the nodes corresponding to formats
|
||||
NodeList dcNodes = (NodeList) xPath.compile("/dublin_core/dcvalue").evaluate(document, XPathConstants.NODESET);
|
||||
NodeList dcNodes = XPathAPI.selectNodeList(document, "/dublin_core/dcvalue");
|
||||
|
||||
for (int i = 0; i < dcNodes.getLength(); i++) {
|
||||
Node n = dcNodes.item(i);
|
||||
@@ -225,9 +225,7 @@ public class MetadataUtilities {
|
||||
if (language == null) {
|
||||
language = "en";
|
||||
} else if ("".equals(language)) {
|
||||
language = DSpaceServicesFactory.getInstance()
|
||||
.getConfigurationService()
|
||||
.getProperty("default.language");
|
||||
language = ConfigurationManager.getProperty("default.language");
|
||||
}
|
||||
|
||||
DtoMetadata dtom = DtoMetadata.create(schema, element, qualifier, language, value);
|
||||
|
@@ -16,7 +16,7 @@ import java.io.StreamTokenizer;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
import org.jdom2.Document;
|
||||
import org.jdom.Document;
|
||||
|
||||
/**
|
||||
* @author mwood
|
||||
|
@@ -10,28 +10,15 @@ package org.dspace.app.launcher;
|
||||
import java.io.FileNotFoundException;
|
||||
import java.io.IOException;
|
||||
import java.lang.reflect.Method;
|
||||
import java.sql.SQLException;
|
||||
import java.util.Collection;
|
||||
import java.util.Comparator;
|
||||
import java.util.List;
|
||||
import java.util.TreeMap;
|
||||
|
||||
import org.apache.commons.cli.ParseException;
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.dspace.core.Context;
|
||||
import org.dspace.scripts.DSpaceRunnable;
|
||||
import org.dspace.scripts.configuration.ScriptConfiguration;
|
||||
import org.dspace.scripts.factory.ScriptServiceFactory;
|
||||
import org.dspace.scripts.handler.DSpaceRunnableHandler;
|
||||
import org.dspace.scripts.handler.impl.CommandLineDSpaceRunnableHandler;
|
||||
import org.dspace.scripts.service.ScriptService;
|
||||
import org.dspace.servicemanager.DSpaceKernelImpl;
|
||||
import org.dspace.servicemanager.DSpaceKernelInit;
|
||||
import org.dspace.services.RequestService;
|
||||
import org.jdom2.Document;
|
||||
import org.jdom2.Element;
|
||||
import org.jdom2.input.SAXBuilder;
|
||||
import org.jdom.Document;
|
||||
import org.jdom.Element;
|
||||
import org.jdom.input.SAXBuilder;
|
||||
|
||||
/**
|
||||
* A DSpace script launcher.
|
||||
@@ -40,9 +27,6 @@ import org.jdom2.input.SAXBuilder;
|
||||
* @author Mark Diggory
|
||||
*/
|
||||
public class ScriptLauncher {
|
||||
|
||||
private static final Logger log = LogManager.getLogger();
|
||||
|
||||
/**
|
||||
* The service manager kernel
|
||||
*/
|
||||
@@ -51,8 +35,7 @@ public class ScriptLauncher {
|
||||
/**
|
||||
* Default constructor
|
||||
*/
|
||||
private ScriptLauncher() {
|
||||
}
|
||||
private ScriptLauncher() { }
|
||||
|
||||
/**
|
||||
* Execute the DSpace script launcher
|
||||
@@ -62,7 +45,7 @@ public class ScriptLauncher {
|
||||
* @throws FileNotFoundException if file doesn't exist
|
||||
*/
|
||||
public static void main(String[] args)
|
||||
throws FileNotFoundException, IOException, IllegalAccessException, InstantiationException {
|
||||
throws FileNotFoundException, IOException {
|
||||
// Initialise the service manager kernel
|
||||
try {
|
||||
kernelImpl = DSpaceKernelInit.getKernel(null);
|
||||
@@ -93,9 +76,8 @@ public class ScriptLauncher {
|
||||
}
|
||||
|
||||
// Look up command in the configuration, and execute.
|
||||
|
||||
CommandLineDSpaceRunnableHandler commandLineDSpaceRunnableHandler = new CommandLineDSpaceRunnableHandler();
|
||||
int status = handleScript(args, commandConfigs, commandLineDSpaceRunnableHandler, kernelImpl);
|
||||
int status;
|
||||
status = runOneCommand(commandConfigs, args);
|
||||
|
||||
// Destroy the service kernel if it is still alive
|
||||
if (kernelImpl != null) {
|
||||
@@ -104,55 +86,6 @@ public class ScriptLauncher {
|
||||
}
|
||||
|
||||
System.exit(status);
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* This method will take the arguments from a commandline input and it'll find the script that the first argument
|
||||
* refers to and it'll execute this script.
|
||||
* It can return a 1 or a 0 depending on whether the script failed or passed respectively
|
||||
* @param args The arguments for the script and the script as first one in the array
|
||||
* @param commandConfigs The Document
|
||||
* @param dSpaceRunnableHandler The DSpaceRunnableHandler for this execution
|
||||
* @param kernelImpl The relevant DSpaceKernelImpl
|
||||
* @return A 1 or 0 depending on whether the script failed or passed respectively
|
||||
*/
|
||||
public static int handleScript(String[] args, Document commandConfigs,
|
||||
DSpaceRunnableHandler dSpaceRunnableHandler,
|
||||
DSpaceKernelImpl kernelImpl) throws InstantiationException, IllegalAccessException {
|
||||
int status;
|
||||
ScriptService scriptService = ScriptServiceFactory.getInstance().getScriptService();
|
||||
ScriptConfiguration scriptConfiguration = scriptService.getScriptConfiguration(args[0]);
|
||||
DSpaceRunnable script = null;
|
||||
if (scriptConfiguration != null) {
|
||||
script = scriptService.createDSpaceRunnableForScriptConfiguration(scriptConfiguration);
|
||||
}
|
||||
if (script != null) {
|
||||
status = executeScript(args, dSpaceRunnableHandler, script);
|
||||
} else {
|
||||
status = runOneCommand(commandConfigs, args, kernelImpl);
|
||||
}
|
||||
return status;
|
||||
}
|
||||
|
||||
/**
|
||||
* This method will simply execute the script
|
||||
* @param args The arguments of the script with the script name as first place in the array
|
||||
* @param dSpaceRunnableHandler The relevant DSpaceRunnableHandler
|
||||
* @param script The script to be executed
|
||||
* @return A 1 or 0 depending on whether the script failed or passed respectively
|
||||
*/
|
||||
private static int executeScript(String[] args, DSpaceRunnableHandler dSpaceRunnableHandler,
|
||||
DSpaceRunnable script) {
|
||||
try {
|
||||
script.initialize(args, dSpaceRunnableHandler, null);
|
||||
script.run();
|
||||
return 0;
|
||||
} catch (ParseException e) {
|
||||
script.printHelp();
|
||||
e.printStackTrace();
|
||||
return 1;
|
||||
}
|
||||
}
|
||||
|
||||
protected static int runOneCommand(Document commandConfigs, String[] args) {
|
||||
@@ -165,7 +98,7 @@ public class ScriptLauncher {
|
||||
* @param commandConfigs Document
|
||||
* @param args the command line arguments given
|
||||
*/
|
||||
protected static int runOneCommand(Document commandConfigs, String[] args, DSpaceKernelImpl kernelImpl) {
|
||||
public static int runOneCommand(Document commandConfigs, String[] args, DSpaceKernelImpl kernelImpl) {
|
||||
String request = args[0];
|
||||
Element root = commandConfigs.getRootElement();
|
||||
List<Element> commands = root.getChildren("command");
|
||||
@@ -322,53 +255,11 @@ public class ScriptLauncher {
|
||||
}
|
||||
|
||||
/**
|
||||
* Display the commands that are defined in launcher.xml and/or the script service.
|
||||
* Display the commands that the current launcher config file knows about
|
||||
*
|
||||
* @param commandConfigs configs as Document
|
||||
*/
|
||||
private static void display(Document commandConfigs) {
|
||||
// usage
|
||||
System.out.println("Usage: dspace [command-name] {parameters}");
|
||||
|
||||
// commands from launcher.xml
|
||||
Collection<Element> launcherCommands = getLauncherCommands(commandConfigs);
|
||||
if (launcherCommands.size() > 0) {
|
||||
System.out.println("\nCommands from launcher.xml");
|
||||
for (Element command : launcherCommands) {
|
||||
displayCommand(
|
||||
command.getChild("name").getValue(),
|
||||
command.getChild("description").getValue()
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
// commands from script service
|
||||
Collection<ScriptConfiguration> serviceCommands = getServiceCommands();
|
||||
if (serviceCommands.size() > 0) {
|
||||
System.out.println("\nCommands from script service");
|
||||
for (ScriptConfiguration command : serviceCommands) {
|
||||
displayCommand(
|
||||
command.getName(),
|
||||
command.getDescription()
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Display a single command using a fixed format. Used by {@link #display}.
|
||||
* @param name the name that can be used to invoke the command
|
||||
* @param description the description of the command
|
||||
*/
|
||||
private static void displayCommand(String name, String description) {
|
||||
System.out.format(" - %s: %s\n", name, description);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get a sorted collection of the commands that are specified in launcher.xml. Used by {@link #display}.
|
||||
* @param commandConfigs the contexts of launcher.xml
|
||||
* @return sorted collection of commands
|
||||
*/
|
||||
private static Collection<Element> getLauncherCommands(Document commandConfigs) {
|
||||
// List all command elements
|
||||
List<Element> commands = commandConfigs.getRootElement().getChildren("command");
|
||||
|
||||
@@ -380,32 +271,11 @@ public class ScriptLauncher {
|
||||
sortedCommands.put(command.getChild("name").getValue(), command);
|
||||
}
|
||||
|
||||
return sortedCommands.values();
|
||||
}
|
||||
|
||||
/**
|
||||
* Get a sorted collection of the commands that are defined as beans. Used by {@link #display}.
|
||||
* @return sorted collection of commands
|
||||
*/
|
||||
private static Collection<ScriptConfiguration> getServiceCommands() {
|
||||
ScriptService scriptService = ScriptServiceFactory.getInstance().getScriptService();
|
||||
|
||||
Context throwAwayContext = new Context();
|
||||
|
||||
throwAwayContext.turnOffAuthorisationSystem();
|
||||
List<ScriptConfiguration> scriptConfigurations = scriptService.getScriptConfigurations(throwAwayContext);
|
||||
throwAwayContext.restoreAuthSystemState();
|
||||
|
||||
try {
|
||||
throwAwayContext.complete();
|
||||
} catch (SQLException exception) {
|
||||
exception.printStackTrace();
|
||||
throwAwayContext.abort();
|
||||
// Display the sorted list
|
||||
System.out.println("Usage: dspace [command-name] {parameters}");
|
||||
for (Element command : sortedCommands.values()) {
|
||||
System.out.println(" - " + command.getChild("name").getValue() +
|
||||
": " + command.getChild("description").getValue());
|
||||
}
|
||||
|
||||
scriptConfigurations.sort(Comparator.comparing(ScriptConfiguration::getName));
|
||||
|
||||
return scriptConfigurations;
|
||||
}
|
||||
|
||||
}
|
||||
|
@@ -21,10 +21,10 @@ import java.awt.image.BufferedImage;
|
||||
*/
|
||||
|
||||
public class Brand {
|
||||
private final int brandWidth;
|
||||
private final int brandHeight;
|
||||
private final Font font;
|
||||
private final int xOffset;
|
||||
private int brandWidth;
|
||||
private int brandHeight;
|
||||
private Font font;
|
||||
private int xOffset;
|
||||
|
||||
/**
|
||||
* Constructor to set up footer image attributes.
|
||||
@@ -92,7 +92,7 @@ public class Brand {
|
||||
* do the text placements and preparatory work for the brand image generation
|
||||
*
|
||||
* @param brandImage a BufferedImage object where the image is created
|
||||
* @param brandText an Identifier object describing what text is to be placed in what
|
||||
* @param identifier and Identifier object describing what text is to be placed in what
|
||||
* position within the brand
|
||||
*/
|
||||
private void drawImage(BufferedImage brandImage,
|
||||
|
@@ -39,7 +39,7 @@ class BrandText {
|
||||
* its location within a rectangular area.
|
||||
*
|
||||
* @param location one of the class location constants e.g. <code>Identifier.BL</code>
|
||||
* @param text text associated with the location
|
||||
* @param the text associated with the location
|
||||
*/
|
||||
public BrandText(String location, String text) {
|
||||
this.location = location;
|
||||
|
@@ -12,8 +12,7 @@ import java.io.InputStream;
|
||||
import javax.imageio.ImageIO;
|
||||
|
||||
import org.dspace.content.Item;
|
||||
import org.dspace.services.ConfigurationService;
|
||||
import org.dspace.services.factory.DSpaceServicesFactory;
|
||||
import org.dspace.core.ConfigurationManager;
|
||||
|
||||
/**
|
||||
* Filter image bitstreams, scaling the image to be within the bounds of
|
||||
@@ -67,19 +66,17 @@ public class BrandedPreviewJPEGFilter extends MediaFilter {
|
||||
BufferedImage buf = ImageIO.read(source);
|
||||
|
||||
// get config params
|
||||
ConfigurationService configurationService
|
||||
= DSpaceServicesFactory.getInstance().getConfigurationService();
|
||||
float xmax = (float) configurationService
|
||||
float xmax = (float) ConfigurationManager
|
||||
.getIntProperty("webui.preview.maxwidth");
|
||||
float ymax = (float) configurationService
|
||||
float ymax = (float) ConfigurationManager
|
||||
.getIntProperty("webui.preview.maxheight");
|
||||
boolean blurring = (boolean) configurationService
|
||||
boolean blurring = (boolean) ConfigurationManager
|
||||
.getBooleanProperty("webui.preview.blurring");
|
||||
boolean hqscaling = (boolean) configurationService
|
||||
boolean hqscaling = (boolean) ConfigurationManager
|
||||
.getBooleanProperty("webui.preview.hqscaling");
|
||||
int brandHeight = configurationService.getIntProperty("webui.preview.brand.height");
|
||||
String brandFont = configurationService.getProperty("webui.preview.brand.font");
|
||||
int brandFontPoint = configurationService.getIntProperty("webui.preview.brand.fontpoint");
|
||||
int brandHeight = ConfigurationManager.getIntProperty("webui.preview.brand.height");
|
||||
String brandFont = ConfigurationManager.getProperty("webui.preview.brand.font");
|
||||
int brandFontPoint = ConfigurationManager.getIntProperty("webui.preview.brand.fontpoint");
|
||||
|
||||
JPEGFilter jpegFilter = new JPEGFilter();
|
||||
return jpegFilter
|
||||
|
@@ -0,0 +1,99 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.app.mediafilter;
|
||||
|
||||
import java.io.InputStream;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
|
||||
import org.apache.commons.io.IOUtils;
|
||||
import org.apache.log4j.Logger;
|
||||
import org.apache.poi.POITextExtractor;
|
||||
import org.apache.poi.extractor.ExtractorFactory;
|
||||
import org.apache.poi.hssf.extractor.ExcelExtractor;
|
||||
import org.apache.poi.xssf.extractor.XSSFExcelExtractor;
|
||||
import org.dspace.content.Item;
|
||||
|
||||
/*
|
||||
* ExcelFilter
|
||||
*
|
||||
* Entries you must add to dspace.cfg:
|
||||
*
|
||||
* filter.plugins = blah, \
|
||||
* Excel Text Extractor
|
||||
*
|
||||
* plugin.named.org.dspace.app.mediafilter.FormatFilter = \
|
||||
* blah = blah, \
|
||||
* org.dspace.app.mediafilter.ExcelFilter = Excel Text Extractor
|
||||
*
|
||||
* #Configure each filter's input Formats
|
||||
* filter.org.dspace.app.mediafilter.ExcelFilter.inputFormats = Microsoft Excel, Microsoft Excel XML
|
||||
*
|
||||
*/
|
||||
public class ExcelFilter extends MediaFilter {
|
||||
|
||||
private static Logger log = Logger.getLogger(ExcelFilter.class);
|
||||
|
||||
public String getFilteredName(String oldFilename) {
|
||||
return oldFilename + ".txt";
|
||||
}
|
||||
|
||||
/**
|
||||
* @return String bundle name
|
||||
*/
|
||||
public String getBundleName() {
|
||||
return "TEXT";
|
||||
}
|
||||
|
||||
/**
|
||||
* @return String bitstream format
|
||||
*/
|
||||
public String getFormatString() {
|
||||
return "Text";
|
||||
}
|
||||
|
||||
/**
|
||||
* @return String description
|
||||
*/
|
||||
public String getDescription() {
|
||||
return "Extracted text";
|
||||
}
|
||||
|
||||
/**
|
||||
* @param item item
|
||||
* @param source source input stream
|
||||
* @param verbose verbose mode
|
||||
* @return InputStream the resulting input stream
|
||||
* @throws Exception if error
|
||||
*/
|
||||
@Override
|
||||
public InputStream getDestinationStream(Item item, InputStream source, boolean verbose)
|
||||
throws Exception {
|
||||
String extractedText = null;
|
||||
|
||||
try {
|
||||
POITextExtractor theExtractor = ExtractorFactory.createExtractor(source);
|
||||
if (theExtractor instanceof ExcelExtractor) {
|
||||
// for xls file
|
||||
extractedText = (theExtractor).getText();
|
||||
} else if (theExtractor instanceof XSSFExcelExtractor) {
|
||||
// for xlsx file
|
||||
extractedText = (theExtractor).getText();
|
||||
}
|
||||
} catch (Exception e) {
|
||||
log.error("Error filtering bitstream: " + e.getMessage(), e);
|
||||
throw e;
|
||||
}
|
||||
|
||||
if (extractedText != null) {
|
||||
// generate an input stream with the extracted text
|
||||
return IOUtils.toInputStream(extractedText, StandardCharsets.UTF_8);
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
}
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user