mirror of
https://github.com/DSpace/DSpace.git
synced 2025-10-07 01:54:22 +00:00
Merge branch 'main' into w2p-80239_PDFBoxThumbnail-fix
This commit is contained in:
35
.codecov.yml
Normal file
35
.codecov.yml
Normal file
@@ -0,0 +1,35 @@
|
||||
# DSpace configuration for Codecov.io coverage reports
|
||||
# These override the default YAML settings at
|
||||
# https://docs.codecov.io/docs/codecov-yaml#section-default-yaml
|
||||
# Can be validated via instructions at:
|
||||
# https://docs.codecov.io/docs/codecov-yaml#validate-your-repository-yaml
|
||||
|
||||
# Tell Codecov not to send a coverage notification until (at least) 2 builds are completed
|
||||
# Since we run Unit & Integration tests in parallel, this lets Codecov know that coverage
|
||||
# needs to be merged across those builds
|
||||
codecov:
|
||||
notify:
|
||||
after_n_builds: 2
|
||||
|
||||
# Settings related to code coverage analysis
|
||||
coverage:
|
||||
status:
|
||||
# Configuration for project-level checks. This checks how the PR changes overall coverage.
|
||||
project:
|
||||
default:
|
||||
# For each PR, auto compare coverage to previous commit.
|
||||
# Require that overall (project) coverage does NOT drop more than 0.5%
|
||||
target: auto
|
||||
threshold: 0.5%
|
||||
# Configuration for patch-level checks. This checks the relative coverage of the new PR code ONLY.
|
||||
patch:
|
||||
default:
|
||||
# Enable informational mode, which just provides info to reviewers & always passes
|
||||
# https://docs.codecov.io/docs/commit-status#section-informational
|
||||
informational: true
|
||||
|
||||
# Turn PR comments "off". This feature adds the code coverage summary as a
|
||||
# comment on each PR. See https://docs.codecov.io/docs/pull-request-comments
|
||||
# However, this same info is available from the Codecov checks in the PR's
|
||||
# "Checks" tab in GitHub. So, the comment is unnecessary.
|
||||
comment: false
|
11
.dockerignore
Normal file
11
.dockerignore
Normal file
@@ -0,0 +1,11 @@
|
||||
.git/
|
||||
.idea/
|
||||
.settings/
|
||||
*/target/
|
||||
dspace/modules/*/target/
|
||||
Dockerfile.*
|
||||
dspace/src/main/docker/dspace-postgres-pgcrypto
|
||||
dspace/src/main/docker/dspace-postgres-pgcrypto-curl
|
||||
dspace/src/main/docker/solr
|
||||
dspace/src/main/docker/README.md
|
||||
dspace/src/main/docker-compose/
|
6
.gitattributes
vendored
6
.gitattributes
vendored
@@ -1,6 +1,12 @@
|
||||
# Auto detect text files and perform LF normalization
|
||||
* text=auto
|
||||
|
||||
# Ensure Unix files always keep Unix line endings
|
||||
*.sh text eol=lf
|
||||
|
||||
# Ensure Windows files always keep Windows line endings
|
||||
*.bat text eol=crlf
|
||||
|
||||
# Standard to msysgit
|
||||
*.doc diff=astextplain
|
||||
*.DOC diff=astextplain
|
||||
|
22
.github/ISSUE_TEMPLATE/bug_report.md
vendored
Normal file
22
.github/ISSUE_TEMPLATE/bug_report.md
vendored
Normal file
@@ -0,0 +1,22 @@
|
||||
---
|
||||
name: Bug report
|
||||
about: Create a report to help us improve
|
||||
title: ''
|
||||
labels: bug, needs triage
|
||||
assignees: ''
|
||||
|
||||
---
|
||||
|
||||
**Describe the bug**
|
||||
A clear and concise description of what the bug is. Include the version(s) of DSpace where you've seen this problem. Link to examples if they are public.
|
||||
|
||||
**To Reproduce**
|
||||
Steps to reproduce the behavior:
|
||||
1. Do this
|
||||
2. Then this...
|
||||
|
||||
**Expected behavior**
|
||||
A clear and concise description of what you expected to happen.
|
||||
|
||||
**Related work**
|
||||
Link to any related tickets or PRs here.
|
20
.github/ISSUE_TEMPLATE/feature_request.md
vendored
Normal file
20
.github/ISSUE_TEMPLATE/feature_request.md
vendored
Normal file
@@ -0,0 +1,20 @@
|
||||
---
|
||||
name: Feature request
|
||||
about: Suggest a new feature for this project
|
||||
title: ''
|
||||
labels: new feature, needs triage
|
||||
assignees: ''
|
||||
|
||||
---
|
||||
|
||||
**Is your feature request related to a problem? Please describe.**
|
||||
A clear and concise description of what the problem is. Ex. I'm always frustrated when [...]
|
||||
|
||||
**Describe the solution you'd like**
|
||||
A clear and concise description of what you want to happen.
|
||||
|
||||
**Describe alternatives or workarounds you've considered**
|
||||
A clear and concise description of any alternative solutions or features you've considered.
|
||||
|
||||
**Additional context**
|
||||
Add any other context or screenshots about the feature request here.
|
26
.github/disabled-workflows/pull_request_opened.yml
vendored
Normal file
26
.github/disabled-workflows/pull_request_opened.yml
vendored
Normal file
@@ -0,0 +1,26 @@
|
||||
# This workflow runs whenever a new pull request is created
|
||||
# TEMPORARILY DISABLED. Unfortunately this doesn't work for PRs created from forked repositories (which is how we tend to create PRs).
|
||||
# There is no known workaround yet. See https://github.community/t/how-to-use-github-token-for-prs-from-forks/16818
|
||||
name: Pull Request opened
|
||||
|
||||
# Only run for newly opened PRs against the "main" branch
|
||||
on:
|
||||
pull_request:
|
||||
types: [opened]
|
||||
branches:
|
||||
- main
|
||||
|
||||
jobs:
|
||||
automation:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
# Assign the PR to whomever created it. This is useful for visualizing assignments on project boards
|
||||
# See https://github.com/marketplace/actions/pull-request-assigner
|
||||
- name: Assign PR to creator
|
||||
uses: thomaseizinger/assign-pr-creator-action@v1.0.0
|
||||
# Note, this authentication token is created automatically
|
||||
# See: https://docs.github.com/en/actions/configuring-and-managing-workflows/authenticating-with-the-github_token
|
||||
with:
|
||||
repo-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
# Ignore errors. It is possible the PR was created by someone who cannot be assigned
|
||||
continue-on-error: true
|
26
.github/pull_request_template.md
vendored
Normal file
26
.github/pull_request_template.md
vendored
Normal file
@@ -0,0 +1,26 @@
|
||||
## References
|
||||
_Add references/links to any related issues or PRs. These may include:_
|
||||
* Fixes #[issue-number]
|
||||
* Related to [REST Contract](https://github.com/DSpace/Rest7Contract)
|
||||
|
||||
## Description
|
||||
Short summary of changes (1-2 sentences).
|
||||
|
||||
## Instructions for Reviewers
|
||||
Please add a more detailed description of the changes made by your PR. At a minimum, providing a bulleted list of changes in your PR is helpful to reviewers.
|
||||
|
||||
List of changes in this PR:
|
||||
* First, ...
|
||||
* Second, ...
|
||||
|
||||
**Include guidance for how to test or review your PR.** This may include: steps to reproduce a bug, screenshots or description of a new feature, or reasons behind specific changes.
|
||||
|
||||
## Checklist
|
||||
_This checklist provides a reminder of what we are going to look for when reviewing your PR. You need not complete this checklist prior to creating your PR (draft PRs are always welcome). If you are unsure about an item in the checklist, don't hesitate to ask. We're here to help!_
|
||||
|
||||
- [ ] My PR is small in size (e.g. less than 1,000 lines of code, not including comments & integration tests). Exceptions may be made if previously agreed upon.
|
||||
- [ ] My PR passes Checkstyle validation based on the [Code Style Guide](https://wiki.lyrasis.org/display/DSPACE/Code+Style+Guide).
|
||||
- [ ] My PR includes Javadoc for _all new (or modified) public methods and classes_. It also includes Javadoc for large or complex private methods.
|
||||
- [ ] My PR passes all tests and includes new/updated Unit or Integration Tests based on the [Code Testing Guide](https://wiki.lyrasis.org/display/DSPACE/Code+Testing+Guide).
|
||||
- [ ] If my PR includes new, third-party dependencies (in any `pom.xml`), I've made sure their licenses align with the [DSpace BSD License](https://github.com/DSpace/DSpace/blob/main/LICENSE) based on the [Licensing of Contributions](https://wiki.lyrasis.org/display/DSPACE/Code+Contribution+Guidelines#CodeContributionGuidelines-LicensingofContributions) documentation.
|
||||
- [ ] If my PR modifies the REST API, I've linked to the REST Contract page (or open PR) related to this change.
|
65
.github/workflows/build.yml
vendored
Normal file
65
.github/workflows/build.yml
vendored
Normal file
@@ -0,0 +1,65 @@
|
||||
# DSpace Continuous Integration/Build via GitHub Actions
|
||||
# Concepts borrowed from
|
||||
# https://docs.github.com/en/free-pro-team@latest/actions/guides/building-and-testing-java-with-maven
|
||||
name: Build
|
||||
|
||||
# Run this Build for all pushes / PRs to current branch
|
||||
on: [push, pull_request]
|
||||
|
||||
jobs:
|
||||
tests:
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
# Give Maven 1GB of memory to work with
|
||||
# Suppress all Maven "downloading" messages in Travis logs (see https://stackoverflow.com/a/35653426)
|
||||
# This also slightly speeds builds, as there is less logging
|
||||
MAVEN_OPTS: "-Xmx1024M -Dorg.slf4j.simpleLogger.log.org.apache.maven.cli.transfer.Slf4jMavenTransferListener=warn"
|
||||
strategy:
|
||||
# Create a matrix of two separate configurations for Unit vs Integration Tests
|
||||
# This will ensure those tasks are run in parallel
|
||||
matrix:
|
||||
include:
|
||||
# NOTE: Unit Tests include deprecated REST API v6 (as it has unit tests)
|
||||
- type: "Unit Tests"
|
||||
mvnflags: "-DskipUnitTests=false -Pdspace-rest"
|
||||
# NOTE: ITs skip all code validation checks, as they are already done by Unit Test job.
|
||||
# - enforcer.skip => Skip maven-enforcer-plugin rules
|
||||
# - checkstyle.skip => Skip all checkstyle checks by maven-checkstyle-plugin
|
||||
# - license.skip => Skip all license header checks by license-maven-plugin
|
||||
# - xml.skip => Skip all XML/XSLT validation by xml-maven-plugin
|
||||
- type: "Integration Tests"
|
||||
mvnflags: "-DskipIntegrationTests=false -Denforcer.skip=true -Dcheckstyle.skip=true -Dlicense.skip=true -Dxml.skip=true"
|
||||
# Do NOT exit immediately if one matrix job fails
|
||||
# This ensures ITs continue running even if Unit Tests fail, or visa versa
|
||||
fail-fast: false
|
||||
# These are the actual CI steps to perform per job
|
||||
steps:
|
||||
# https://github.com/actions/checkout
|
||||
- name: Checkout codebase
|
||||
uses: actions/checkout@v1
|
||||
|
||||
# https://github.com/actions/setup-java
|
||||
- name: Install JDK 11
|
||||
uses: actions/setup-java@v1
|
||||
with:
|
||||
java-version: 11
|
||||
|
||||
# https://github.com/actions/cache
|
||||
- name: Cache Maven dependencies
|
||||
uses: actions/cache@v2
|
||||
with:
|
||||
# Cache entire ~/.m2/repository
|
||||
path: ~/.m2/repository
|
||||
# Cache key is hash of all pom.xml files. Therefore any changes to POMs will invalidate cache
|
||||
key: ${{ runner.os }}-maven-${{ hashFiles('**/pom.xml') }}
|
||||
restore-keys: ${{ runner.os }}-maven-
|
||||
|
||||
# Run parallel Maven builds based on the above 'strategy.matrix'
|
||||
- name: Run Maven ${{ matrix.type }}
|
||||
env:
|
||||
TEST_FLAGS: ${{ matrix.mvnflags }}
|
||||
run: mvn install -B -V -P-assembly -Pcoverage-report $TEST_FLAGS
|
||||
|
||||
# https://github.com/codecov/codecov-action
|
||||
- name: Upload coverage to Codecov.io
|
||||
uses: codecov/codecov-action@v1
|
29
.github/workflows/issue_opened.yml
vendored
Normal file
29
.github/workflows/issue_opened.yml
vendored
Normal file
@@ -0,0 +1,29 @@
|
||||
# This workflow runs whenever a new issue is created
|
||||
name: Issue opened
|
||||
|
||||
on:
|
||||
issues:
|
||||
types: [opened]
|
||||
|
||||
jobs:
|
||||
automation:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
# Add the new issue to a project board, if it needs triage
|
||||
# See https://github.com/marketplace/actions/create-project-card-action
|
||||
- name: Add issue to project board
|
||||
# Only add to project board if issue is flagged as "needs triage" or has no labels
|
||||
# NOTE: By default we flag new issues as "needs triage" in our issue template
|
||||
if: (contains(github.event.issue.labels.*.name, 'needs triage') || join(github.event.issue.labels.*.name) == '')
|
||||
uses: technote-space/create-project-card-action@v1
|
||||
# Note, the authentication token below is an ORG level Secret.
|
||||
# It must be created/recreated manually via a personal access token with "public_repo" and "admin:org" permissions
|
||||
# See: https://docs.github.com/en/actions/configuring-and-managing-workflows/authenticating-with-the-github_token#permissions-for-the-github_token
|
||||
# This is necessary because the "DSpace Backlog" project is an org level project (i.e. not repo specific)
|
||||
with:
|
||||
GITHUB_TOKEN: ${{ secrets.ORG_PROJECT_TOKEN }}
|
||||
PROJECT: DSpace Backlog
|
||||
COLUMN: Triage
|
||||
CHECK_ORG_PROJECT: true
|
||||
# Ignore errors.
|
||||
continue-on-error: true
|
25
.github/workflows/label_merge_conflicts.yml
vendored
Normal file
25
.github/workflows/label_merge_conflicts.yml
vendored
Normal file
@@ -0,0 +1,25 @@
|
||||
# This workflow checks open PRs for merge conflicts and labels them when conflicts are found
|
||||
name: Check for merge conflicts
|
||||
|
||||
# Run whenever the "main" branch is updated
|
||||
# NOTE: This means merge conflicts are only checked for when a PR is merged to main.
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
|
||||
jobs:
|
||||
triage:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
# See: https://github.com/mschilde/auto-label-merge-conflicts/
|
||||
- name: Auto-label PRs with merge conflicts
|
||||
uses: mschilde/auto-label-merge-conflicts@v2.0
|
||||
# Add "merge conflict" label if a merge conflict is detected. Remove it when resolved.
|
||||
# Note, the authentication token is created automatically
|
||||
# See: https://docs.github.com/en/actions/configuring-and-managing-workflows/authenticating-with-the-github_token
|
||||
with:
|
||||
CONFLICT_LABEL_NAME: 'merge conflict'
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
# Ignore errors
|
||||
continue-on-error: true
|
2
.gitignore
vendored
2
.gitignore
vendored
@@ -19,7 +19,7 @@ tags
|
||||
overlays/
|
||||
|
||||
## Ignore project files created by NetBeans
|
||||
nbproject/private/
|
||||
nbproject/
|
||||
build/
|
||||
nbbuild/
|
||||
dist/
|
||||
|
9
.lgtm.yml
Normal file
9
.lgtm.yml
Normal file
@@ -0,0 +1,9 @@
|
||||
# LGTM Settings (https://lgtm.com/)
|
||||
# For reference, see https://lgtm.com/help/lgtm/lgtm.yml-configuration-file
|
||||
# or template at https://lgtm.com/static/downloads/lgtm.template.yml
|
||||
|
||||
extraction:
|
||||
java:
|
||||
index:
|
||||
# Specify the Java version required to build the project
|
||||
java_version: 11
|
44
.travis.yml
44
.travis.yml
@@ -1,44 +0,0 @@
|
||||
language: java
|
||||
sudo: false
|
||||
|
||||
env:
|
||||
# Give Maven 1GB of memory to work with
|
||||
- MAVEN_OPTS=-Xmx1024M
|
||||
|
||||
jdk:
|
||||
# DS-3384 Oracle JDK 8 has DocLint enabled by default.
|
||||
# Let's use this to catch any newly introduced DocLint issues.
|
||||
- oraclejdk8
|
||||
|
||||
## Should we run into any problems with oraclejdk8 on Travis, we may try the following workaround.
|
||||
## https://docs.travis-ci.com/user/languages/java#Testing-Against-Multiple-JDKs
|
||||
## https://github.com/travis-ci/travis-ci/issues/3259#issuecomment-130860338
|
||||
#addons:
|
||||
# apt:
|
||||
# packages:
|
||||
# - oracle-java8-installer
|
||||
|
||||
# Install prerequisites for building Mirage2 more rapidly
|
||||
before_install:
|
||||
# Remove outdated settings.xml from Travis builds. Workaround for https://github.com/travis-ci/travis-ci/issues/4629
|
||||
- rm ~/.m2/settings.xml
|
||||
|
||||
# Skip install stage, as we'll do it below
|
||||
install: "echo 'Skipping install stage, dependencies will be downloaded during build and test stages.'"
|
||||
|
||||
# Two stage Build and Test
|
||||
# 1. Install & Unit Test APIs
|
||||
# 2. Assemble DSpace
|
||||
script:
|
||||
# 1. [Install & Unit Test] Check source code licenses and run source code Unit Tests
|
||||
# license:check => Validate all source code license headers
|
||||
# -Dmaven.test.skip=false => Enable DSpace Unit Tests
|
||||
# -DskipITs=false => Enable DSpace Integration Tests
|
||||
# -P !assembly => Skip normal assembly (as it can be memory intensive)
|
||||
# -B => Maven batch/non-interactive mode (recommended for CI)
|
||||
# -V => Display Maven version info before build
|
||||
# -Dsurefire.rerunFailingTestsCount=2 => try again for flakey tests, and keep track of/report on number of retries
|
||||
- "mvn clean install license:check -Dmaven.test.skip=false -DskipITs=false -P !assembly -B -V -Dsurefire.rerunFailingTestsCount=2"
|
||||
# 2. [Assemble DSpace] Ensure overlay & assembly process works (from [src]/dspace/)
|
||||
# -P !assembly => SKIP the actual building of [src]/dspace/dspace-installer (as it can be memory intensive)
|
||||
- "cd dspace && mvn package -P !assembly -B -V -Dsurefire.rerunFailingTestsCount=2"
|
63
Dockerfile
Normal file
63
Dockerfile
Normal file
@@ -0,0 +1,63 @@
|
||||
# This image will be published as dspace/dspace
|
||||
# See https://github.com/DSpace/DSpace/tree/main/dspace/src/main/docker for usage details
|
||||
#
|
||||
# This version is JDK11 compatible
|
||||
# - tomcat:8-jdk11
|
||||
# - ANT 1.10.7
|
||||
# - maven:3-jdk-11 (see dspace-dependencies)
|
||||
# - note: default tag for branch: dspace/dspace: dspace/dspace:dspace-7_x
|
||||
|
||||
# Step 1 - Run Maven Build
|
||||
FROM dspace/dspace-dependencies:dspace-7_x as build
|
||||
ARG TARGET_DIR=dspace-installer
|
||||
WORKDIR /app
|
||||
|
||||
# The dspace-install directory will be written to /install
|
||||
RUN mkdir /install \
|
||||
&& chown -Rv dspace: /install \
|
||||
&& chown -Rv dspace: /app
|
||||
|
||||
USER dspace
|
||||
|
||||
# Copy the DSpace source code into the workdir (excluding .dockerignore contents)
|
||||
ADD --chown=dspace . /app/
|
||||
COPY dspace/src/main/docker/local.cfg /app/local.cfg
|
||||
|
||||
# Build DSpace (note: this build doesn't include the optional, deprecated "dspace-rest" webapp)
|
||||
# Copy the dspace-install directory to /install. Clean up the build to keep the docker image small
|
||||
RUN mvn package && \
|
||||
mv /app/dspace/target/${TARGET_DIR}/* /install && \
|
||||
mvn clean
|
||||
|
||||
# Step 2 - Run Ant Deploy
|
||||
FROM tomcat:8-jdk11 as ant_build
|
||||
ARG TARGET_DIR=dspace-installer
|
||||
COPY --from=build /install /dspace-src
|
||||
WORKDIR /dspace-src
|
||||
|
||||
# Create the initial install deployment using ANT
|
||||
ENV ANT_VERSION 1.10.7
|
||||
ENV ANT_HOME /tmp/ant-$ANT_VERSION
|
||||
ENV PATH $ANT_HOME/bin:$PATH
|
||||
|
||||
RUN mkdir $ANT_HOME && \
|
||||
wget -qO- "https://archive.apache.org/dist/ant/binaries/apache-ant-$ANT_VERSION-bin.tar.gz" | tar -zx --strip-components=1 -C $ANT_HOME
|
||||
|
||||
RUN ant init_installation update_configs update_code update_webapps
|
||||
|
||||
# Step 3 - Run tomcat
|
||||
# Create a new tomcat image that does not retain the the build directory contents
|
||||
FROM tomcat:8-jdk11
|
||||
ENV DSPACE_INSTALL=/dspace
|
||||
COPY --from=ant_build /dspace $DSPACE_INSTALL
|
||||
EXPOSE 8080 8009
|
||||
|
||||
ENV JAVA_OPTS=-Xmx2000m
|
||||
|
||||
# Run the "server" webapp off the /server path (e.g. http://localhost:8080/server/)
|
||||
RUN ln -s $DSPACE_INSTALL/webapps/server /usr/local/tomcat/webapps/server
|
||||
# If you wish to run "server" webapp off the ROOT path, then comment out the above RUN, and uncomment the below RUN.
|
||||
# You also MUST update the URL in dspace/src/main/docker/local.cfg
|
||||
# Please note that server webapp should only run on one path at a time.
|
||||
#RUN mv /usr/local/tomcat/webapps/ROOT /usr/local/tomcat/webapps/ROOT.bk && \
|
||||
# ln -s $DSPACE_INSTALL/webapps/server /usr/local/tomcat/webapps/ROOT
|
53
Dockerfile.cli
Normal file
53
Dockerfile.cli
Normal file
@@ -0,0 +1,53 @@
|
||||
# This image will be published as dspace/dspace-cli
|
||||
# See https://github.com/DSpace/DSpace/tree/main/dspace/src/main/docker for usage details
|
||||
#
|
||||
# This version is JDK11 compatible
|
||||
# - openjdk:11
|
||||
# - ANT 1.10.7
|
||||
# - maven:3-jdk-11 (see dspace-dependencies)
|
||||
# - note: default tag for branch: dspace/dspace-cli: dspace/dspace-cli:dspace-7_x
|
||||
|
||||
# Step 1 - Run Maven Build
|
||||
FROM dspace/dspace-dependencies:dspace-7_x as build
|
||||
ARG TARGET_DIR=dspace-installer
|
||||
WORKDIR /app
|
||||
|
||||
# The dspace-install directory will be written to /install
|
||||
RUN mkdir /install \
|
||||
&& chown -Rv dspace: /install \
|
||||
&& chown -Rv dspace: /app
|
||||
|
||||
USER dspace
|
||||
|
||||
# Copy the DSpace source code into the workdir (excluding .dockerignore contents)
|
||||
ADD --chown=dspace . /app/
|
||||
COPY dspace/src/main/docker/local.cfg /app/local.cfg
|
||||
|
||||
# Build DSpace. Copy the dspace-install directory to /install. Clean up the build to keep the docker image small
|
||||
RUN mvn package && \
|
||||
mv /app/dspace/target/${TARGET_DIR}/* /install && \
|
||||
mvn clean
|
||||
|
||||
# Step 2 - Run Ant Deploy
|
||||
FROM openjdk:11 as ant_build
|
||||
ARG TARGET_DIR=dspace-installer
|
||||
COPY --from=build /install /dspace-src
|
||||
WORKDIR /dspace-src
|
||||
|
||||
# Create the initial install deployment using ANT
|
||||
ENV ANT_VERSION 1.10.7
|
||||
ENV ANT_HOME /tmp/ant-$ANT_VERSION
|
||||
ENV PATH $ANT_HOME/bin:$PATH
|
||||
|
||||
RUN mkdir $ANT_HOME && \
|
||||
wget -qO- "https://archive.apache.org/dist/ant/binaries/apache-ant-$ANT_VERSION-bin.tar.gz" | tar -zx --strip-components=1 -C $ANT_HOME
|
||||
|
||||
RUN ant init_installation update_configs update_code
|
||||
|
||||
# Step 3 - Run jdk
|
||||
# Create a new tomcat image that does not retain the the build directory contents
|
||||
FROM openjdk:11
|
||||
ENV DSPACE_INSTALL=/dspace
|
||||
COPY --from=ant_build /dspace $DSPACE_INSTALL
|
||||
|
||||
ENV JAVA_OPTS=-Xmx1000m
|
27
Dockerfile.dependencies
Normal file
27
Dockerfile.dependencies
Normal file
@@ -0,0 +1,27 @@
|
||||
# This image will be published as dspace/dspace-dependencies
|
||||
# The purpose of this image is to make the build for dspace/dspace run faster
|
||||
#
|
||||
# This version is JDK11 compatible
|
||||
# - maven:3-jdk-11
|
||||
|
||||
# Step 1 - Run Maven Build
|
||||
FROM maven:3-jdk-11 as build
|
||||
ARG TARGET_DIR=dspace-installer
|
||||
WORKDIR /app
|
||||
|
||||
RUN useradd dspace \
|
||||
&& mkdir /home/dspace \
|
||||
&& chown -Rv dspace: /home/dspace
|
||||
USER dspace
|
||||
|
||||
# Copy the DSpace source code into the workdir (excluding .dockerignore contents)
|
||||
ADD --chown=dspace . /app/
|
||||
COPY dspace/src/main/docker/local.cfg /app/local.cfg
|
||||
|
||||
# Trigger the installation of all maven dependencies
|
||||
RUN mvn package
|
||||
|
||||
# Clear the contents of the /app directory (including all maven builds), so no artifacts remain.
|
||||
# This ensures when dspace:dspace is built, it will just the Maven local cache (.m2) for dependencies
|
||||
USER root
|
||||
RUN rm -rf /app/*
|
77
Dockerfile.test
Normal file
77
Dockerfile.test
Normal file
@@ -0,0 +1,77 @@
|
||||
# This image will be published as dspace/dspace
|
||||
# See https://github.com/DSpace/DSpace/tree/main/dspace/src/main/docker for usage details
|
||||
#
|
||||
# This version is JDK11 compatible
|
||||
# - tomcat:8-jdk11
|
||||
# - ANT 1.10.7
|
||||
# - maven:3-jdk-11 (see dspace-dependencies)
|
||||
# - note: default tag for branch: dspace/dspace: dspace/dspace:dspace-7_x-test
|
||||
#
|
||||
# This image is meant for TESTING/DEVELOPMENT ONLY as it deploys the old v6 REST API under HTTP (not HTTPS)
|
||||
|
||||
# Step 1 - Run Maven Build
|
||||
FROM dspace/dspace-dependencies:dspace-7_x as build
|
||||
ARG TARGET_DIR=dspace-installer
|
||||
WORKDIR /app
|
||||
|
||||
# The dspace-install directory will be written to /install
|
||||
RUN mkdir /install \
|
||||
&& chown -Rv dspace: /install \
|
||||
&& chown -Rv dspace: /app
|
||||
|
||||
USER dspace
|
||||
|
||||
# Copy the DSpace source code into the workdir (excluding .dockerignore contents)
|
||||
ADD --chown=dspace . /app/
|
||||
COPY dspace/src/main/docker/local.cfg /app/local.cfg
|
||||
|
||||
# Build DSpace (including the optional, deprecated "dspace-rest" webapp)
|
||||
# Copy the dspace-install directory to /install. Clean up the build to keep the docker image small
|
||||
RUN mvn package -Pdspace-rest && \
|
||||
mv /app/dspace/target/${TARGET_DIR}/* /install && \
|
||||
mvn clean
|
||||
|
||||
# Step 2 - Run Ant Deploy
|
||||
FROM tomcat:8-jdk11 as ant_build
|
||||
ARG TARGET_DIR=dspace-installer
|
||||
COPY --from=build /install /dspace-src
|
||||
WORKDIR /dspace-src
|
||||
|
||||
# Create the initial install deployment using ANT
|
||||
ENV ANT_VERSION 1.10.7
|
||||
ENV ANT_HOME /tmp/ant-$ANT_VERSION
|
||||
ENV PATH $ANT_HOME/bin:$PATH
|
||||
|
||||
RUN mkdir $ANT_HOME && \
|
||||
wget -qO- "https://archive.apache.org/dist/ant/binaries/apache-ant-$ANT_VERSION-bin.tar.gz" | tar -zx --strip-components=1 -C $ANT_HOME
|
||||
|
||||
RUN ant init_installation update_configs update_code update_webapps
|
||||
|
||||
# Step 3 - Run tomcat
|
||||
# Create a new tomcat image that does not retain the the build directory contents
|
||||
FROM tomcat:8-jdk11
|
||||
ENV DSPACE_INSTALL=/dspace
|
||||
ENV TOMCAT_INSTALL=/usr/local/tomcat
|
||||
COPY --from=ant_build /dspace $DSPACE_INSTALL
|
||||
# Enable the AJP connector in Tomcat's server.xml
|
||||
# NOTE: secretRequired="false" should only be used when AJP is NOT accessible from an external network. But, secretRequired="true" isn't supported by mod_proxy_ajp until Apache 2.5
|
||||
RUN sed -i '/Service name="Catalina".*/a \\n <Connector protocol="AJP/1.3" port="8009" address="0.0.0.0" redirectPort="8443" URIEncoding="UTF-8" secretRequired="false" />' $TOMCAT_INSTALL/conf/server.xml
|
||||
# Expose Tomcat port and AJP port
|
||||
EXPOSE 8080 8009
|
||||
|
||||
ENV JAVA_OPTS=-Xmx2000m
|
||||
|
||||
# Run the "server" webapp off the /server path (e.g. http://localhost:8080/server/)
|
||||
# and the v6.x (deprecated) REST API off the "/rest" path
|
||||
RUN ln -s $DSPACE_INSTALL/webapps/server /usr/local/tomcat/webapps/server && \
|
||||
ln -s $DSPACE_INSTALL/webapps/rest /usr/local/tomcat/webapps/rest
|
||||
# If you wish to run "server" webapp off the ROOT path, then comment out the above RUN, and uncomment the below RUN.
|
||||
# You also MUST update the URL in dspace/src/main/docker/local.cfg
|
||||
# Please note that server webapp should only run on one path at a time.
|
||||
#RUN mv /usr/local/tomcat/webapps/ROOT /usr/local/tomcat/webapps/ROOT.bk && \
|
||||
# ln -s $DSPACE_INSTALL/webapps/server /usr/local/tomcat/webapps/ROOT && \
|
||||
# ln -s $DSPACE_INSTALL/webapps/rest /usr/local/tomcat/webapps/rest
|
||||
|
||||
# Overwrite the v6.x (deprecated) REST API's web.xml, so that we can run it on HTTP (defaults to requiring HTTPS)
|
||||
COPY dspace/src/main/docker/test/rest_web.xml $DSPACE_INSTALL/webapps/rest/WEB-INF/web.xml
|
||||
RUN sed -i -e "s|\${dspace.dir}|$DSPACE_INSTALL|" $DSPACE_INSTALL/webapps/rest/WEB-INF/web.xml
|
11
LICENSE
11
LICENSE
@@ -1,7 +1,6 @@
|
||||
DSpace source code license:
|
||||
DSpace source code BSD License:
|
||||
|
||||
|
||||
Copyright (c) 2002-2016, DuraSpace. All rights reserved.
|
||||
Copyright (c) 2002-2020, LYRASIS. All rights reserved.
|
||||
|
||||
Redistribution and use in source and binary forms, with or without
|
||||
modification, are permitted provided that the following conditions are
|
||||
@@ -34,7 +33,7 @@ DAMAGE.
|
||||
|
||||
|
||||
DSpace uses third-party libraries which may be distributed under
|
||||
different licenses to the above. Information about these licenses
|
||||
is detailed in the LICENSES_THIRD_PARTY file at the root of the source
|
||||
tree. You must agree to the terms of these licenses, in addition to
|
||||
different licenses to the above. Information about these licenses
|
||||
is detailed in the LICENSES_THIRD_PARTY file at the root of the source
|
||||
tree. You must agree to the terms of these licenses, in addition to
|
||||
the above DSpace source code license, in order to use this software.
|
||||
|
@@ -366,7 +366,6 @@ https://wiki.duraspace.org/display/DSPACE/Code+Contribution+Guidelines
|
||||
* FindBugs-Annotations (com.google.code.findbugs:annotations:3.0.1u2 - http://findbugs.sourceforge.net/)
|
||||
* MaxMind GeoIP Legacy API (com.maxmind.geoip:geoip-api:1.3.0 - https://github.com/maxmind/geoip-api-java)
|
||||
* JHighlight (com.uwyn:jhighlight:1.0 - https://jhighlight.dev.java.net/)
|
||||
* DSpace TM-Extractors Dependency (org.dspace.dependencies:dspace-tm-extractors:1.0.1 - http://projects.dspace.org/dspace-pom/dspace-tm-extractors)
|
||||
* A Hibernate O/RM Module (org.hibernate:hibernate-core:4.2.21.Final - http://hibernate.org)
|
||||
* A Hibernate O/RM Module (org.hibernate:hibernate-ehcache:4.2.21.Final - http://hibernate.org)
|
||||
* Hibernate Commons Annotations (org.hibernate.common:hibernate-commons-annotations:4.0.2.Final - http://hibernate.org)
|
||||
|
21
NOTICE
21
NOTICE
@@ -1,15 +1,18 @@
|
||||
Licensing Notices
|
||||
=================
|
||||
|
||||
Licensing Notice
|
||||
[July 2019] DuraSpace joined with LYRASIS (another 501(c)3 organization) in July 2019.
|
||||
LYRASIS holds the copyrights of DuraSpace.
|
||||
|
||||
Fedora Commons joined with the DSpace Foundation and began operating under
|
||||
[July 2009] Fedora Commons joined with the DSpace Foundation and began operating under
|
||||
the new name DuraSpace in July 2009. DuraSpace holds the copyrights of
|
||||
the DSpace Foundation, Inc.
|
||||
|
||||
The DSpace Foundation, Inc. is a 501(c)3 corporation established in July 2007
|
||||
with a mission to promote and advance the dspace platform enabling management,
|
||||
access and preservation of digital works. The Foundation was able to transfer
|
||||
the legal copyright from Hewlett-Packard Company (HP) and Massachusetts
|
||||
Institute of Technology (MIT) to the DSpace Foundation in October 2007. Many
|
||||
of the files in the source code may contain a copyright statement stating HP
|
||||
and MIT possess the copyright, in these instances please note that the copy
|
||||
[July 2007] The DSpace Foundation, Inc. is a 501(c)3 corporation established in July 2007
|
||||
with a mission to promote and advance the dspace platform enabling management,
|
||||
access and preservation of digital works. The Foundation was able to transfer
|
||||
the legal copyright from Hewlett-Packard Company (HP) and Massachusetts
|
||||
Institute of Technology (MIT) to the DSpace Foundation in October 2007. Many
|
||||
of the files in the source code may contain a copyright statement stating HP
|
||||
and MIT possess the copyright, in these instances please note that the copy
|
||||
right has transferred to the DSpace foundation, and subsequently to DuraSpace.
|
||||
|
103
README.md
103
README.md
@@ -1,24 +1,24 @@
|
||||
|
||||
# DSpace
|
||||
|
||||
[](https://travis-ci.org/DSpace/DSpace)
|
||||
[](https://github.com/DSpace/DSpace/actions?query=workflow%3ABuild)
|
||||
|
||||
[DSpace Documentation](https://wiki.duraspace.org/display/DSDOC/) |
|
||||
[DSpace Documentation](https://wiki.lyrasis.org/display/DSDOC/) |
|
||||
[DSpace Releases](https://github.com/DSpace/DSpace/releases) |
|
||||
[DSpace Wiki](https://wiki.duraspace.org/display/DSPACE/Home) |
|
||||
[Support](https://wiki.duraspace.org/display/DSPACE/Support)
|
||||
[DSpace Wiki](https://wiki.lyrasis.org/display/DSPACE/Home) |
|
||||
[Support](https://wiki.lyrasis.org/display/DSPACE/Support)
|
||||
|
||||
DSpace open source software is a turnkey repository application used by more than
|
||||
DSpace open source software is a turnkey repository application used by more than
|
||||
2,000 organizations and institutions worldwide to provide durable access to digital resources.
|
||||
For more information, visit http://www.dspace.org/
|
||||
|
||||
***
|
||||
:warning: **Work on DSpace 7 has begun on our `master` branch.** This means that there is temporarily NO user interface on this `master` branch. DSpace 7 will feature a new, unified [Angular](https://angular.io/) user interface, along with an enhanced, rebuilt REST API. The latest status of this work can be found on the [DSpace 7 UI Working Group](https://wiki.duraspace.org/display/DSPACE/DSpace+7+UI+Working+Group) page. Additionally, the codebases can be found in the following places:
|
||||
* DSpace 7 REST API work is occurring on the [`master` branch](https://github.com/DSpace/DSpace/tree/master/dspace-spring-rest) of this repository.
|
||||
* The REST Contract is being documented at https://github.com/DSpace/Rest7Contract
|
||||
:warning: **Work on DSpace 7 has begun on our `main` branch.** This means that there is NO user interface on this `main` branch. DSpace 7 will feature a new, unified [Angular](https://angular.io/) user interface, along with an enhanced, rebuilt REST API. The latest status of this work can be found on the [DSpace 7 Working Group](https://wiki.lyrasis.org/display/DSPACE/DSpace+7+Working+Group) page. Additionally, the codebases can be found in the following places:
|
||||
* DSpace 7 REST API work is occurring on the [`main` branch](https://github.com/DSpace/DSpace/tree/main/dspace-server-webapp) of this repository.
|
||||
* The REST Contract is at https://github.com/DSpace/Rest7Contract
|
||||
* DSpace 7 Angular UI work is occurring at https://github.com/DSpace/dspace-angular
|
||||
|
||||
**If you would like to get involved in our DSpace 7 development effort, we welcome new contributors.** Just join one of our meetings or get in touch via Slack. See the [DSpace 7 UI Working Group](https://wiki.duraspace.org/display/DSPACE/DSpace+7+UI+Working+Group) wiki page for more info.
|
||||
|
||||
**If you would like to get involved in our DSpace 7 development effort, we welcome new contributors.** Just join one of our meetings or get in touch via Slack. See the [DSpace 7 Working Group](https://wiki.lyrasis.org/display/DSPACE/DSpace+7+Working+Group) wiki page for more info.
|
||||
|
||||
**If you are looking for the ongoing maintenance work for DSpace 6 (or prior releases)**, you can find that work on the corresponding maintenance branch (e.g. [`dspace-6_x`](https://github.com/DSpace/DSpace/tree/dspace-6_x)) in this repository.
|
||||
***
|
||||
@@ -31,29 +31,32 @@ Past releases are all available via GitHub at https://github.com/DSpace/DSpace/r
|
||||
|
||||
## Documentation / Installation
|
||||
|
||||
Documentation for each release may be viewed online or downloaded via our [Documentation Wiki](https://wiki.duraspace.org/display/DSDOC/).
|
||||
Documentation for each release may be viewed online or downloaded via our [Documentation Wiki](https://wiki.lyrasis.org/display/DSDOC/).
|
||||
|
||||
The latest DSpace Installation instructions are available at:
|
||||
https://wiki.duraspace.org/display/DSDOC6x/Installing+DSpace
|
||||
https://wiki.lyrasis.org/display/DSDOC7x/Installing+DSpace
|
||||
|
||||
Please be aware that, as a Java web application, DSpace requires a database (PostgreSQL or Oracle)
|
||||
Please be aware that, as a Java web application, DSpace requires a database (PostgreSQL or Oracle)
|
||||
and a servlet container (usually Tomcat) in order to function.
|
||||
More information about these and all other prerequisites can be found in the Installation instructions above.
|
||||
|
||||
## Running DSpace 7 in Docker
|
||||
See [Running DSpace 7 with Docker Compose](dspace/src/main/docker-compose/README.md)
|
||||
|
||||
## Contributing
|
||||
|
||||
DSpace is a community built and supported project. We do not have a centralized development or support team,
|
||||
DSpace is a community built and supported project. We do not have a centralized development or support team,
|
||||
but have a dedicated group of volunteers who help us improve the software, documentation, resources, etc.
|
||||
|
||||
We welcome contributions of any type. Here's a few basic guides that provide suggestions for contributing to DSpace:
|
||||
* [How to Contribute to DSpace](https://wiki.duraspace.org/display/DSPACE/How+to+Contribute+to+DSpace): How to contribute in general (via code, documentation, bug reports, expertise, etc)
|
||||
* [Code Contribution Guidelines](https://wiki.duraspace.org/display/DSPACE/Code+Contribution+Guidelines): How to give back code or contribute features, bug fixes, etc.
|
||||
* [DSpace Community Advisory Team (DCAT)](https://wiki.duraspace.org/display/cmtygp/DSpace+Community+Advisory+Team): If you are not a developer, we also have an interest group specifically for repository managers. The DCAT group meets virtually, once a month, and sends open invitations to join their meetings via the [DCAT mailing list](https://groups.google.com/d/forum/DSpaceCommunityAdvisoryTeam).
|
||||
* [How to Contribute to DSpace](https://wiki.lyrasis.org/display/DSPACE/How+to+Contribute+to+DSpace): How to contribute in general (via code, documentation, bug reports, expertise, etc)
|
||||
* [Code Contribution Guidelines](https://wiki.lyrasis.org/display/DSPACE/Code+Contribution+Guidelines): How to give back code or contribute features, bug fixes, etc.
|
||||
* [DSpace Community Advisory Team (DCAT)](https://wiki.lyrasis.org/display/cmtygp/DSpace+Community+Advisory+Team): If you are not a developer, we also have an interest group specifically for repository managers. The DCAT group meets virtually, once a month, and sends open invitations to join their meetings via the [DCAT mailing list](https://groups.google.com/d/forum/DSpaceCommunityAdvisoryTeam).
|
||||
|
||||
We also encourage GitHub Pull Requests (PRs) at any time. Please see our [Development with Git](https://wiki.duraspace.org/display/DSPACE/Development+with+Git) guide for more info.
|
||||
We also encourage GitHub Pull Requests (PRs) at any time. Please see our [Development with Git](https://wiki.lyrasis.org/display/DSPACE/Development+with+Git) guide for more info.
|
||||
|
||||
In addition, a listing of all known contributors to DSpace software can be
|
||||
found online at: https://wiki.duraspace.org/display/DSPACE/DSpaceContributors
|
||||
found online at: https://wiki.lyrasis.org/display/DSPACE/DSpaceContributors
|
||||
|
||||
## Getting Help
|
||||
|
||||
@@ -61,22 +64,72 @@ DSpace provides public mailing lists where you can post questions or raise topic
|
||||
We welcome everyone to participate in these lists:
|
||||
|
||||
* [dspace-community@googlegroups.com](https://groups.google.com/d/forum/dspace-community) : General discussion about DSpace platform, announcements, sharing of best practices
|
||||
* [dspace-tech@googlegroups.com](https://groups.google.com/d/forum/dspace-tech) : Technical support mailing list. See also our guide for [How to troubleshoot an error](https://wiki.duraspace.org/display/DSPACE/Troubleshoot+an+error).
|
||||
* [dspace-tech@googlegroups.com](https://groups.google.com/d/forum/dspace-tech) : Technical support mailing list. See also our guide for [How to troubleshoot an error](https://wiki.lyrasis.org/display/DSPACE/Troubleshoot+an+error).
|
||||
* [dspace-devel@googlegroups.com](https://groups.google.com/d/forum/dspace-devel) : Developers / Development mailing list
|
||||
|
||||
Great Q&A is also available under the [DSpace tag on Stackoverflow](http://stackoverflow.com/questions/tagged/dspace)
|
||||
|
||||
Additional support options are listed at https://wiki.duraspace.org/display/DSPACE/Support
|
||||
Additional support options are at https://wiki.lyrasis.org/display/DSPACE/Support
|
||||
|
||||
DSpace also has an active service provider network. If you'd rather hire a service provider to
|
||||
install, upgrade, customize or host DSpace, then we recommend getting in touch with one of our
|
||||
DSpace also has an active service provider network. If you'd rather hire a service provider to
|
||||
install, upgrade, customize or host DSpace, then we recommend getting in touch with one of our
|
||||
[Registered Service Providers](http://www.dspace.org/service-providers).
|
||||
|
||||
## Issue Tracker
|
||||
|
||||
The DSpace Issue Tracker can be found at: https://jira.duraspace.org/projects/DS/summary
|
||||
DSpace uses GitHub to track issues:
|
||||
* Backend (REST API) issues: https://github.com/DSpace/DSpace/issues
|
||||
* Frontend (User Interface) issues: https://github.com/DSpace/dspace-angular/issues
|
||||
|
||||
## Testing
|
||||
|
||||
### Running Tests
|
||||
|
||||
By default, in DSpace, Unit Tests and Integration Tests are disabled. However, they are
|
||||
run automatically by [GitHub Actions](https://github.com/DSpace/DSpace/actions?query=workflow%3ABuild) for all Pull Requests and code commits.
|
||||
|
||||
* How to run both Unit Tests (via `maven-surefire-plugin`) and Integration Tests (via `maven-failsafe-plugin`):
|
||||
```
|
||||
mvn install -DskipUnitTests=false -DskipIntegrationTests=false
|
||||
```
|
||||
* How to run _only_ Unit Tests:
|
||||
```
|
||||
mvn test -DskipUnitTests=false
|
||||
```
|
||||
* How to run a *single* Unit Test
|
||||
```
|
||||
# Run all tests in a specific test class
|
||||
# NOTE: failIfNoTests=false is required to skip tests in other modules
|
||||
mvn test -DskipUnitTests=false -Dtest=[full.package.testClassName] -DfailIfNoTests=false
|
||||
|
||||
# Run one test method in a specific test class
|
||||
mvn test -DskipUnitTests=false -Dtest=[full.package.testClassName]#[testMethodName] -DfailIfNoTests=false
|
||||
```
|
||||
* How to run _only_ Integration Tests
|
||||
```
|
||||
mvn install -DskipIntegrationTests=false
|
||||
```
|
||||
* How to run a *single* Integration Test
|
||||
```
|
||||
# Run all integration tests in a specific test class
|
||||
# NOTE: failIfNoTests=false is required to skip tests in other modules
|
||||
mvn install -DskipIntegrationTests=false -Dit.test=[full.package.testClassName] -DfailIfNoTests=false
|
||||
|
||||
# Run one test method in a specific test class
|
||||
mvn install -DskipIntegrationTests=false -Dit.test=[full.package.testClassName]#[testMethodName] -DfailIfNoTests=false
|
||||
```
|
||||
* How to run only tests of a specific DSpace module
|
||||
```
|
||||
# Before you can run only one module's tests, other modules may need installing into your ~/.m2
|
||||
cd [dspace-src]
|
||||
mvn clean install
|
||||
|
||||
# Then, move into a module subdirectory, and run the test command
|
||||
cd [dspace-src]/dspace-server-webapp
|
||||
# Choose your test command from the lists above
|
||||
```
|
||||
|
||||
## License
|
||||
|
||||
DSpace source code is freely available under a standard [BSD 3-Clause license](https://opensource.org/licenses/BSD-3-Clause).
|
||||
The full license is available at http://www.dspace.org/license/
|
||||
The full license is available in the [LICENSE](LICENSE) file or online at http://www.dspace.org/license/
|
||||
|
15
SECURITY.md
Normal file
15
SECURITY.md
Normal file
@@ -0,0 +1,15 @@
|
||||
# Security Policy
|
||||
|
||||
## Supported Versions
|
||||
|
||||
For information regarding which versions of DSpace are currently under support, please see our DSpace Software Support Policy:
|
||||
|
||||
https://wiki.lyrasis.org/display/DSPACE/DSpace+Software+Support+Policy
|
||||
|
||||
## Reporting a Vulnerability
|
||||
|
||||
If you believe you have found a security vulnerability in a supported version of DSpace, we encourage you to let us know right away.
|
||||
We will investigate all legitimate reports and do our best to quickly fix the problem. Please see our DSpace Software Support Policy
|
||||
for information on privately reporting vulnerabilities:
|
||||
|
||||
https://wiki.lyrasis.org/display/DSPACE/DSpace+Software+Support+Policy
|
@@ -44,15 +44,16 @@ For more information on CheckStyle configurations below, see: http://checkstyle.
|
||||
with @SuppressWarnings. See also SuppressWarningsHolder below -->
|
||||
<module name="SuppressWarningsFilter" />
|
||||
|
||||
<!-- Maximum line length is 120 characters -->
|
||||
<module name="LineLength">
|
||||
<property name="fileExtensions" value="java"/>
|
||||
<property name="max" value="120"/>
|
||||
<!-- Only exceptions for packages, imports, URLs, and JavaDoc {@link} tags -->
|
||||
<property name="ignorePattern" value="^package.*|^import.*|http://|https://|@link"/>
|
||||
</module>
|
||||
|
||||
<!-- Check individual Java source files for specific rules -->
|
||||
<module name="TreeWalker">
|
||||
<!-- Maximum line length is 120 characters -->
|
||||
<module name="LineLength">
|
||||
<property name="max" value="120"/>
|
||||
<!-- Only exceptions for packages, imports, URLs, and JavaDoc {@link} tags -->
|
||||
<property name="ignorePattern" value="^package.*|^import.*|http://|https://|@link"/>
|
||||
</module>
|
||||
|
||||
<!-- Highlight any TODO or FIXME comments in info messages -->
|
||||
<module name="TodoComment">
|
||||
<property name="severity" value="info"/>
|
||||
@@ -94,11 +95,8 @@ For more information on CheckStyle configurations below, see: http://checkstyle.
|
||||
<!-- <property name="scope" value="public"/> -->
|
||||
<!-- TODO: Above rule has been disabled because of large amount of missing public method Javadocs -->
|
||||
<property name="scope" value="nothing"/>
|
||||
<!-- Allow RuntimeExceptions to be undeclared -->
|
||||
<property name="allowUndeclaredRTE" value="true"/>
|
||||
<!-- Allow params, throws and return tags to be optional -->
|
||||
<property name="allowMissingParamTags" value="true"/>
|
||||
<property name="allowMissingThrowsTags" value="true"/>
|
||||
<property name="allowMissingReturnTag" value="true"/>
|
||||
</module>
|
||||
|
||||
|
25
docker-compose-cli.yml
Normal file
25
docker-compose-cli.yml
Normal file
@@ -0,0 +1,25 @@
|
||||
version: "3.7"
|
||||
|
||||
services:
|
||||
dspace-cli:
|
||||
image: "${DOCKER_OWNER:-dspace}/dspace-cli:${DSPACE_VER:-dspace-7_x}"
|
||||
container_name: dspace-cli
|
||||
build:
|
||||
context: .
|
||||
dockerfile: Dockerfile.cli
|
||||
#environment:
|
||||
volumes:
|
||||
- ./dspace/src/main/docker-compose/local.cfg:/dspace/config/local.cfg
|
||||
- assetstore:/dspace/assetstore
|
||||
entrypoint: /dspace/bin/dspace
|
||||
command: help
|
||||
networks:
|
||||
- dspacenet
|
||||
tty: true
|
||||
stdin_open: true
|
||||
|
||||
volumes:
|
||||
assetstore:
|
||||
|
||||
networks:
|
||||
dspacenet:
|
93
docker-compose.yml
Normal file
93
docker-compose.yml
Normal file
@@ -0,0 +1,93 @@
|
||||
version: '3.7'
|
||||
networks:
|
||||
dspacenet:
|
||||
ipam:
|
||||
config:
|
||||
# Define a custom subnet for our DSpace network, so that we can easily trust requests from host to container.
|
||||
# If you customize this value, be sure to customize the 'proxies.trusted.ipranges' in your local.cfg.
|
||||
- subnet: 172.23.0.0/16
|
||||
services:
|
||||
# DSpace (backend) webapp container
|
||||
dspace:
|
||||
container_name: dspace
|
||||
image: "${DOCKER_OWNER:-dspace}/dspace:${DSPACE_VER:-dspace-7_x-test}"
|
||||
build:
|
||||
context: .
|
||||
dockerfile: Dockerfile.test
|
||||
depends_on:
|
||||
- dspacedb
|
||||
networks:
|
||||
dspacenet:
|
||||
ports:
|
||||
- published: 8080
|
||||
target: 8080
|
||||
- published: 8009
|
||||
target: 8009
|
||||
stdin_open: true
|
||||
tty: true
|
||||
volumes:
|
||||
- assetstore:/dspace/assetstore
|
||||
- ./dspace/src/main/docker-compose/local.cfg:/dspace/config/local.cfg
|
||||
# Ensure that the database is ready BEFORE starting tomcat
|
||||
# 1. While a TCP connection to dspacedb port 5432 is not available, continue to sleep
|
||||
# 2. Then, run database migration to init database tables
|
||||
# 3. Finally, start Tomcat
|
||||
entrypoint:
|
||||
- /bin/bash
|
||||
- '-c'
|
||||
- |
|
||||
while (!</dev/tcp/dspacedb/5432) > /dev/null 2>&1; do sleep 1; done;
|
||||
/dspace/bin/dspace database migrate
|
||||
catalina.sh run
|
||||
# DSpace database container
|
||||
dspacedb:
|
||||
container_name: dspacedb
|
||||
environment:
|
||||
PGDATA: /pgdata
|
||||
# Uses a custom Postgres image with pgcrypto installed
|
||||
image: dspace/dspace-postgres-pgcrypto
|
||||
networks:
|
||||
dspacenet:
|
||||
ports:
|
||||
- published: 5432
|
||||
target: 5432
|
||||
stdin_open: true
|
||||
tty: true
|
||||
volumes:
|
||||
- pgdata:/pgdata
|
||||
# DSpace Solr container
|
||||
dspacesolr:
|
||||
container_name: dspacesolr
|
||||
# Uses official Solr image at https://hub.docker.com/_/solr/
|
||||
image: solr:8.8
|
||||
networks:
|
||||
dspacenet:
|
||||
ports:
|
||||
- published: 8983
|
||||
target: 8983
|
||||
stdin_open: true
|
||||
tty: true
|
||||
working_dir: /var/solr/data
|
||||
volumes:
|
||||
# Mount our local Solr core configs so that they are available as Solr configsets on container
|
||||
- ./dspace/solr/authority:/opt/solr/server/solr/configsets/authority
|
||||
- ./dspace/solr/oai:/opt/solr/server/solr/configsets/oai
|
||||
- ./dspace/solr/search:/opt/solr/server/solr/configsets/search
|
||||
- ./dspace/solr/statistics:/opt/solr/server/solr/configsets/statistics
|
||||
# Keep Solr data directory between reboots
|
||||
- solr_data:/var/solr/data
|
||||
# Initialize all DSpace Solr cores using the mounted local configsets (see above), then start Solr
|
||||
entrypoint:
|
||||
- /bin/bash
|
||||
- '-c'
|
||||
- |
|
||||
init-var-solr
|
||||
precreate-core authority /opt/solr/server/solr/configsets/authority
|
||||
precreate-core oai /opt/solr/server/solr/configsets/oai
|
||||
precreate-core search /opt/solr/server/solr/configsets/search
|
||||
precreate-core statistics /opt/solr/server/solr/configsets/statistics
|
||||
exec solr -f
|
||||
volumes:
|
||||
assetstore:
|
||||
pgdata:
|
||||
solr_data:
|
@@ -1,5 +1,4 @@
|
||||
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
|
||||
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
|
||||
<modelVersion>4.0.0</modelVersion>
|
||||
<groupId>org.dspace</groupId>
|
||||
<artifactId>dspace-api</artifactId>
|
||||
@@ -13,7 +12,7 @@
|
||||
<parent>
|
||||
<groupId>org.dspace</groupId>
|
||||
<artifactId>dspace-parent</artifactId>
|
||||
<version>7.0-SNAPSHOT</version>
|
||||
<version>7.0-beta6-SNAPSHOT</version>
|
||||
<relativePath>..</relativePath>
|
||||
</parent>
|
||||
|
||||
@@ -51,6 +50,33 @@
|
||||
<configuration>
|
||||
<debug>true</debug>
|
||||
<showDeprecation>true</showDeprecation>
|
||||
<annotationProcessorPaths>
|
||||
<!-- Enable Hibernate's Metamodel Generator to generate metadata model classes
|
||||
(ending in _ suffix) for more type-safe Criteria queries -->
|
||||
<path>
|
||||
<groupId>org.hibernate</groupId>
|
||||
<artifactId>hibernate-jpamodelgen</artifactId>
|
||||
<version>${hibernate.version}</version>
|
||||
</path>
|
||||
<!-- Enable JAXB -->
|
||||
<path>
|
||||
<groupId>javax.xml.bind</groupId>
|
||||
<artifactId>jaxb-api</artifactId>
|
||||
<version>${jaxb-api.version}</version>
|
||||
</path>
|
||||
<!-- Enable Commons Annotations -->
|
||||
<path>
|
||||
<groupId>javax.annotation</groupId>
|
||||
<artifactId>javax.annotation-api</artifactId>
|
||||
<version>${javax-annotation.version}</version>
|
||||
</path>
|
||||
<!-- Enable http://errorprone.info -->
|
||||
<path>
|
||||
<groupId>com.google.errorprone</groupId>
|
||||
<artifactId>error_prone_core</artifactId>
|
||||
<version>${errorprone.version}</version>
|
||||
</path>
|
||||
</annotationProcessorPaths>
|
||||
</configuration>
|
||||
</plugin>
|
||||
<plugin>
|
||||
@@ -72,26 +98,11 @@
|
||||
</execution>
|
||||
</executions>
|
||||
</plugin>
|
||||
<!-- Verify OS license headers for all source code files -->
|
||||
<plugin>
|
||||
<groupId>com.mycila</groupId>
|
||||
<artifactId>license-maven-plugin</artifactId>
|
||||
<configuration>
|
||||
<excludes>
|
||||
<exclude>**/src/test/resources/**</exclude>
|
||||
<exclude>**/src/test/data/**</exclude>
|
||||
<exclude>**/.gitignore</exclude>
|
||||
<exclude>**/src/main/resources/rebel.xml</exclude>
|
||||
<exclude>src/test/data/dspaceFolder/config/spiders/**</exclude>
|
||||
<exclude>src/main/java/org/apache/solr/handler/extraction/ExtractingParams.java</exclude>
|
||||
</excludes>
|
||||
</configuration>
|
||||
</plugin>
|
||||
|
||||
<plugin>
|
||||
<groupId>org.codehaus.mojo</groupId>
|
||||
<artifactId>build-helper-maven-plugin</artifactId>
|
||||
<version>1.9.1</version>
|
||||
<version>3.0.0</version>
|
||||
<executions>
|
||||
<execution>
|
||||
<phase>validate</phase>
|
||||
@@ -116,50 +127,87 @@
|
||||
</executions>
|
||||
</plugin>
|
||||
|
||||
<!-- This plugin allows us to run a Groovy script in our Maven POM
|
||||
(see: https://groovy.github.io/gmaven/groovy-maven-plugin/execute.html )
|
||||
We are generating a OS-agnostic version (agnostic.build.dir) of
|
||||
the ${project.build.directory} property (full path of target dir).
|
||||
This is needed by the Surefire & Failsafe plugins (see below)
|
||||
to initialize the Unit Test environment's dspace.cfg file.
|
||||
Otherwise, the Unit Test Framework will not work on Windows OS.
|
||||
This Groovy code was mostly borrowed from:
|
||||
http://stackoverflow.com/questions/3872355/how-to-convert-file-separator-in-maven
|
||||
-->
|
||||
<plugin>
|
||||
<groupId>org.codehaus.gmaven</groupId>
|
||||
<artifactId>groovy-maven-plugin</artifactId>
|
||||
<executions>
|
||||
<execution>
|
||||
<id>setproperty</id>
|
||||
<phase>initialize</phase>
|
||||
<goals>
|
||||
<goal>execute</goal>
|
||||
</goals>
|
||||
<configuration>
|
||||
<source>
|
||||
project.properties['agnostic.build.dir'] = project.build.directory.replace(File.separator, '/');
|
||||
log.info("Initializing Maven property 'agnostic.build.dir' to: {}", project.properties['agnostic.build.dir']);
|
||||
</source>
|
||||
</configuration>
|
||||
</execution>
|
||||
</executions>
|
||||
</plugin>
|
||||
|
||||
<plugin>
|
||||
<groupId>com.mycila</groupId>
|
||||
<artifactId>license-maven-plugin</artifactId>
|
||||
<configuration>
|
||||
<excludes>
|
||||
<exclude>src/test/resources/**</exclude>
|
||||
<exclude>src/test/data/**</exclude>
|
||||
<!-- Ignore license header requirements on Flyway upgrade scripts -->
|
||||
<exclude>src/main/resources/org/dspace/storage/rdbms/flywayupgrade/**</exclude>
|
||||
</excludes>
|
||||
</configuration>
|
||||
</plugin>
|
||||
|
||||
</plugins>
|
||||
</build>
|
||||
|
||||
|
||||
<profiles>
|
||||
<profile>
|
||||
<id>findbugs</id>
|
||||
<id>spotbugs</id>
|
||||
<activation>
|
||||
<activeByDefault>false</activeByDefault>
|
||||
<!-- property>
|
||||
<name>maven.test.skip</name>
|
||||
<value>false</value>
|
||||
</property -->
|
||||
</activation>
|
||||
<build>
|
||||
<plugins>
|
||||
<plugin>
|
||||
<groupId>org.codehaus.mojo</groupId>
|
||||
<artifactId>findbugs-maven-plugin</artifactId>
|
||||
<groupId>com.github.spotbugs</groupId>
|
||||
<artifactId>spotbugs-maven-plugin</artifactId>
|
||||
</plugin>
|
||||
</plugins>
|
||||
</build>
|
||||
</profile>
|
||||
|
||||
<!-- If Unit Testing is enabled, then setup the Unit Test Environment.
|
||||
See also the 'skiptests' profile in Parent POM. -->
|
||||
<!-- Setup the Unit Test Environment (when -DskipUnitTests=false) -->
|
||||
<profile>
|
||||
<id>test-environment</id>
|
||||
<id>unit-test-environment</id>
|
||||
<activation>
|
||||
<activeByDefault>false</activeByDefault>
|
||||
<property>
|
||||
<name>maven.test.skip</name>
|
||||
<name>skipUnitTests</name>
|
||||
<value>false</value>
|
||||
</property>
|
||||
</activation>
|
||||
<build>
|
||||
<plugins>
|
||||
<!-- Unit/Integration Testing setup: This plugin unzips the
|
||||
<!-- Unit Testing setup: This plugin unzips the
|
||||
'testEnvironment.zip' file (created by dspace-parent POM), into
|
||||
the 'target/testing/' folder, to essentially create a test
|
||||
install of DSpace, against which Tests can be run. -->
|
||||
<plugin>
|
||||
<artifactId>maven-dependency-plugin</artifactId>
|
||||
<version>2.8</version>
|
||||
<configuration>
|
||||
<outputDirectory>${project.build.directory}/testing</outputDirectory>
|
||||
<artifactItems>
|
||||
@@ -174,12 +222,65 @@
|
||||
</configuration>
|
||||
<executions>
|
||||
<execution>
|
||||
<id>setupTestEnvironment</id>
|
||||
<id>setupUnitTestEnvironment</id>
|
||||
<phase>generate-test-resources</phase>
|
||||
<goals>
|
||||
<goal>unpack</goal>
|
||||
</goals>
|
||||
</execution>
|
||||
</executions>
|
||||
</plugin>
|
||||
|
||||
<!-- Run Unit Testing! This plugin just kicks off the tests. -->
|
||||
<plugin>
|
||||
<artifactId>maven-surefire-plugin</artifactId>
|
||||
<configuration>
|
||||
<systemPropertyVariables>
|
||||
<!-- Specify the dspace.dir to use for test environment -->
|
||||
<!-- ${agnostic.build.dir} is set dynamically by groovy-maven-plugin above -->
|
||||
<!-- This system property is loaded by AbstractDSpaceTest to initialize the test environment -->
|
||||
<dspace.dir>${agnostic.build.dir}/testing/dspace/</dspace.dir>
|
||||
<!-- Turn off any DSpace logging -->
|
||||
<dspace.log.init.disable>true</dspace.log.init.disable>
|
||||
<solr.install.dir>${agnostic.build.dir}/testing/dspace/solr/</solr.install.dir>
|
||||
</systemPropertyVariables>
|
||||
</configuration>
|
||||
</plugin>
|
||||
</plugins>
|
||||
</build>
|
||||
</profile>
|
||||
|
||||
<!-- Setup the Integration Test Environment (when -DskipIntegrationTests=false) -->
|
||||
<profile>
|
||||
<id>integration-test-environment</id>
|
||||
<activation>
|
||||
<activeByDefault>false</activeByDefault>
|
||||
<property>
|
||||
<name>skipIntegrationTests</name>
|
||||
<value>false</value>
|
||||
</property>
|
||||
</activation>
|
||||
<build>
|
||||
<plugins>
|
||||
<!-- Integration Testing setup: This plugin unzips the
|
||||
'testEnvironment.zip' file (created by dspace-parent POM), into
|
||||
the 'target/testing/' folder, to essentially create a test
|
||||
install of DSpace, against which Tests can be run. -->
|
||||
<plugin>
|
||||
<artifactId>maven-dependency-plugin</artifactId>
|
||||
<configuration>
|
||||
<outputDirectory>${project.build.directory}/testing</outputDirectory>
|
||||
<artifactItems>
|
||||
<artifactItem>
|
||||
<groupId>org.dspace</groupId>
|
||||
<artifactId>dspace-parent</artifactId>
|
||||
<version>${project.version}</version>
|
||||
<type>zip</type>
|
||||
<classifier>testEnvironment</classifier>
|
||||
</artifactItem>
|
||||
</artifactItems>
|
||||
</configuration>
|
||||
<executions>
|
||||
<execution>
|
||||
<id>setupIntegrationTestEnvironment</id>
|
||||
<phase>pre-integration-test</phase>
|
||||
@@ -190,167 +291,132 @@
|
||||
</executions>
|
||||
</plugin>
|
||||
|
||||
<!-- This plugin allows us to run a Groovy script in our Maven POM
|
||||
(see: http://gmaven.codehaus.org/Executing+Groovy+Code )
|
||||
We are generating a OS-agnostic version (agnostic.build.dir) of
|
||||
the ${project.build.directory} property (full path of target dir).
|
||||
This is needed by the FileWeaver & Surefire plugins (see below)
|
||||
to initialize the Unit Test environment's dspace.cfg file.
|
||||
Otherwise, the Unit Test Framework will not work on Windows OS.
|
||||
This Groovy code was mostly borrowed from:
|
||||
http://stackoverflow.com/questions/3872355/how-to-convert-file-separator-in-maven
|
||||
-->
|
||||
<plugin>
|
||||
<groupId>org.codehaus.gmaven</groupId>
|
||||
<artifactId>groovy-maven-plugin</artifactId>
|
||||
<version>2.0</version>
|
||||
<executions>
|
||||
<execution>
|
||||
<id>setproperty</id>
|
||||
<phase>generate-test-resources
|
||||
</phase> <!-- XXX I think this should be 'initialize' - MHW -->
|
||||
<goals>
|
||||
<goal>execute</goal>
|
||||
</goals>
|
||||
<configuration>
|
||||
<source>
|
||||
project.properties['agnostic.build.dir'] = project.build.directory.replace(File.separator, '/');
|
||||
println("Initializing Maven property 'agnostic.build.dir' to: " + project.properties['agnostic.build.dir']);
|
||||
</source>
|
||||
</configuration>
|
||||
</execution>
|
||||
</executions>
|
||||
</plugin>
|
||||
|
||||
<!-- Run Unit Testing! This plugin just kicks off the tests (when enabled). -->
|
||||
<plugin>
|
||||
<artifactId>maven-surefire-plugin</artifactId>
|
||||
<configuration>
|
||||
<systemPropertyVariables>
|
||||
<!-- Specify the dspace.dir to use for test environment -->
|
||||
<!-- This system property is loaded by AbstractDSpaceTest to initialize the test environment -->
|
||||
<dspace.dir>${agnostic.build.dir}/testing/dspace/</dspace.dir>
|
||||
<!-- Turn off any DSpace logging -->
|
||||
<dspace.log.init.disable>true</dspace.log.init.disable>
|
||||
</systemPropertyVariables>
|
||||
</configuration>
|
||||
</plugin>
|
||||
|
||||
|
||||
<plugin>
|
||||
<groupId>org.codehaus.mojo</groupId>
|
||||
<artifactId>xml-maven-plugin</artifactId>
|
||||
<version>1.0.1</version>
|
||||
<executions>
|
||||
<execution>
|
||||
<id>validate-ALL-xml-and-xsl</id>
|
||||
<phase>process-test-resources</phase>
|
||||
<goals>
|
||||
<goal>validate</goal>
|
||||
</goals>
|
||||
</execution>
|
||||
</executions>
|
||||
<configuration>
|
||||
<validationSets>
|
||||
<!-- validate ALL XML and XSL config files in the testing folder -->
|
||||
<validationSet>
|
||||
<dir>${agnostic.build.dir}/testing</dir>
|
||||
<includes>
|
||||
<include>**/*.xml</include>
|
||||
<include>**/*.xsl</include>
|
||||
<include>**/*.xconf</include>
|
||||
</includes>
|
||||
</validationSet>
|
||||
<!-- validate ALL XML and XSL files throughout the project -->
|
||||
<validationSet>
|
||||
<dir>${root.basedir}</dir>
|
||||
<includes>
|
||||
<include>**/*.xml</include>
|
||||
<include>**/*.xsl</include>
|
||||
<include>**/*.xmap</include>
|
||||
</includes>
|
||||
</validationSet>
|
||||
</validationSets>
|
||||
</configuration>
|
||||
</plugin>
|
||||
|
||||
|
||||
<!-- Run Integration Testing! This plugin just kicks off the tests (when enabled). -->
|
||||
<!-- Run Integration Testing! This plugin just kicks off the tests. -->
|
||||
<plugin>
|
||||
<artifactId>maven-failsafe-plugin</artifactId>
|
||||
<configuration>
|
||||
<systemPropertyVariables>
|
||||
<!-- Specify the dspace.dir to use for test environment -->
|
||||
<!-- ${agnostic.build.dir} is set dynamically by groovy-maven-plugin above -->
|
||||
<dspace.dir>${agnostic.build.dir}/testing/dspace/</dspace.dir>
|
||||
<!-- Turn off any DSpace logging -->
|
||||
<dspace.log.init.disable>true</dspace.log.init.disable>
|
||||
<solr.install.dir>${agnostic.build.dir}/testing/dspace/solr/</solr.install.dir>
|
||||
</systemPropertyVariables>
|
||||
</configuration>
|
||||
</plugin>
|
||||
</plugins>
|
||||
</build>
|
||||
|
||||
</profile>
|
||||
</profiles>
|
||||
|
||||
<dependencies>
|
||||
|
||||
<dependency>
|
||||
<groupId>org.hibernate</groupId>
|
||||
<artifactId>hibernate-core</artifactId>
|
||||
<artifactId>hibernate-ehcache</artifactId>
|
||||
<exclusions>
|
||||
<!-- Newer version pulled in via Jersey below -->
|
||||
<exclusion>
|
||||
<groupId>org.jboss.logging</groupId>
|
||||
<artifactId>jboss-logging</artifactId>
|
||||
<groupId>org.javassist</groupId>
|
||||
<artifactId>javassist</artifactId>
|
||||
</exclusion>
|
||||
</exclusions>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.hibernate</groupId>
|
||||
<artifactId>hibernate-ehcache</artifactId>
|
||||
<artifactId>hibernate-jpamodelgen</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.hibernate</groupId>
|
||||
<artifactId>hibernate-validator-cdi</artifactId>
|
||||
<version>${hibernate-validator.version}</version>
|
||||
<groupId>org.hibernate.validator</groupId>
|
||||
<artifactId>hibernate-validator-cdi</artifactId>
|
||||
<version>${hibernate-validator.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.hibernate.javax.persistence</groupId>
|
||||
<artifactId>hibernate-jpa-2.1-api</artifactId>
|
||||
<version>1.0.0.Final</version>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>org.springframework</groupId>
|
||||
<artifactId>spring-orm</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.glassfish</groupId>
|
||||
<artifactId>javax.el</artifactId>
|
||||
<version>3.0.1-b10</version>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>org.dspace</groupId>
|
||||
<groupId>net.handle</groupId>
|
||||
<artifactId>handle</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.dspace</groupId>
|
||||
<artifactId>jargon</artifactId>
|
||||
<groupId>net.cnri</groupId>
|
||||
<artifactId>cnri-servlet-container</artifactId>
|
||||
<exclusions>
|
||||
<!-- Newer versions provided in our parent POM -->
|
||||
<exclusion>
|
||||
<groupId>org.ow2.asm</groupId>
|
||||
<artifactId>asm-commons</artifactId>
|
||||
</exclusion>
|
||||
<!-- Newer version of Bouncycastle brought in via solr-cell -->
|
||||
<exclusion>
|
||||
<groupId>org.bouncycastle</groupId>
|
||||
<artifactId>bcpkix-jdk15on</artifactId>
|
||||
</exclusion>
|
||||
<exclusion>
|
||||
<groupId>org.bouncycastle</groupId>
|
||||
<artifactId>bcprov-jdk15on</artifactId>
|
||||
</exclusion>
|
||||
<!-- Newer version of Jetty in our parent POM & via Solr -->
|
||||
<exclusion>
|
||||
<groupId>org.eclipse.jetty</groupId>
|
||||
<artifactId>jetty-alpn-java-server</artifactId>
|
||||
</exclusion>
|
||||
<exclusion>
|
||||
<groupId>org.eclipse.jetty</groupId>
|
||||
<artifactId>jetty-deploy</artifactId>
|
||||
</exclusion>
|
||||
<exclusion>
|
||||
<groupId>org.eclipse.jetty</groupId>
|
||||
<artifactId>jetty-servlet</artifactId>
|
||||
</exclusion>
|
||||
<exclusion>
|
||||
<groupId>org.eclipse.jetty</groupId>
|
||||
<artifactId>jetty-servlets</artifactId>
|
||||
</exclusion>
|
||||
<exclusion>
|
||||
<groupId>org.eclipse.jetty</groupId>
|
||||
<artifactId>jetty-webapp</artifactId>
|
||||
</exclusion>
|
||||
<exclusion>
|
||||
<groupId>org.eclipse.jetty</groupId>
|
||||
<artifactId>jetty-xml</artifactId>
|
||||
</exclusion>
|
||||
<exclusion>
|
||||
<groupId>org.eclipse.jetty.http2</groupId>
|
||||
<artifactId>http2-common</artifactId>
|
||||
</exclusion>
|
||||
<exclusion>
|
||||
<groupId>org.eclipse.jetty.http2</groupId>
|
||||
<artifactId>http2-server</artifactId>
|
||||
</exclusion>
|
||||
</exclusions>
|
||||
</dependency>
|
||||
<!-- Jetty is needed to run Handle Server -->
|
||||
<dependency>
|
||||
<groupId>org.eclipse.jetty</groupId>
|
||||
<artifactId>jetty-server</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.dspace</groupId>
|
||||
<artifactId>mets</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.dspace.dependencies</groupId>
|
||||
<artifactId>dspace-tm-extractors</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.jena</groupId>
|
||||
<artifactId>apache-jena-libs</artifactId>
|
||||
<type>pom</type>
|
||||
<exclusions>
|
||||
<exclusion>
|
||||
<groupId>com.fasterxml.jackson.core</groupId>
|
||||
<artifactId>jackson-core</artifactId>
|
||||
</exclusion>
|
||||
<exclusion>
|
||||
<groupId>com.fasterxml.jackson.core</groupId>
|
||||
<artifactId>jackson-databind</artifactId>
|
||||
<groupId>log4j</groupId>
|
||||
<artifactId>log4j</artifactId>
|
||||
</exclusion>
|
||||
</exclusions>
|
||||
</dependency>
|
||||
@@ -362,10 +428,6 @@
|
||||
<groupId>commons-codec</groupId>
|
||||
<artifactId>commons-codec</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>commons-collections</groupId>
|
||||
<artifactId>commons-collections</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.commons</groupId>
|
||||
<artifactId>commons-collections4</artifactId>
|
||||
@@ -384,8 +446,8 @@
|
||||
<artifactId>commons-io</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>commons-lang</groupId>
|
||||
<artifactId>commons-lang</artifactId>
|
||||
<groupId>org.apache.commons</groupId>
|
||||
<artifactId>commons-lang3</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.commons</groupId>
|
||||
@@ -401,9 +463,13 @@
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>javax.servlet</groupId>
|
||||
<artifactId>servlet-api</artifactId>
|
||||
<artifactId>javax.servlet-api</artifactId>
|
||||
<scope>provided</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>javax.annotation</groupId>
|
||||
<artifactId>javax.annotation-api</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>jaxen</groupId>
|
||||
<artifactId>jaxen</artifactId>
|
||||
@@ -418,14 +484,6 @@
|
||||
<groupId>org.jdom</groupId>
|
||||
<artifactId>jdom</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>log4j</groupId>
|
||||
<artifactId>log4j</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>oro</groupId>
|
||||
<artifactId>oro</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.pdfbox</groupId>
|
||||
<artifactId>pdfbox</artifactId>
|
||||
@@ -434,30 +492,10 @@
|
||||
<groupId>org.apache.pdfbox</groupId>
|
||||
<artifactId>fontbox</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.bouncycastle</groupId>
|
||||
<artifactId>bcprov-jdk15</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.bouncycastle</groupId>
|
||||
<artifactId>bcmail-jdk15</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.poi</groupId>
|
||||
<artifactId>poi</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.poi</groupId>
|
||||
<artifactId>poi-scratchpad</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>rome</groupId>
|
||||
<artifactId>rome</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>rome</groupId>
|
||||
<artifactId>opensearch</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>xalan</groupId>
|
||||
<artifactId>xalan</artifactId>
|
||||
@@ -465,20 +503,6 @@
|
||||
<dependency>
|
||||
<groupId>xerces</groupId>
|
||||
<artifactId>xercesImpl</artifactId>
|
||||
<exclusions>
|
||||
<exclusion>
|
||||
<groupId>xml-apis</groupId>
|
||||
<artifactId>xml-apis</artifactId>
|
||||
</exclusion>
|
||||
</exclusions>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>xml-apis</groupId>
|
||||
<artifactId>xml-apis</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>javax.activation</groupId>
|
||||
<artifactId>activation</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.ibm.icu</groupId>
|
||||
@@ -492,11 +516,6 @@
|
||||
<groupId>org.dspace</groupId>
|
||||
<artifactId>dspace-services</artifactId>
|
||||
</dependency>
|
||||
<dependency> <!-- Keep jmockit before junit -->
|
||||
<groupId>org.jmockit</groupId>
|
||||
<artifactId>jmockit</artifactId>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>junit</groupId>
|
||||
<artifactId>junit</artifactId>
|
||||
@@ -504,7 +523,7 @@
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.hamcrest</groupId>
|
||||
<artifactId>hamcrest-core</artifactId>
|
||||
<artifactId>hamcrest-all</artifactId>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
@@ -514,72 +533,138 @@
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.mockito</groupId>
|
||||
<artifactId>mockito-core</artifactId>
|
||||
<artifactId>mockito-inline</artifactId>
|
||||
<scope>test</scope>
|
||||
<exclusions>
|
||||
<!-- Different version provided by hibernate-ehcache -->
|
||||
<exclusion>
|
||||
<groupId>net.bytebuddy</groupId>
|
||||
<artifactId>byte-buddy</artifactId>
|
||||
</exclusion>
|
||||
</exclusions>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.springframework</groupId>
|
||||
<artifactId>spring-test</artifactId>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
|
||||
<!-- Used for RSS / ATOM syndication feeds -->
|
||||
<dependency>
|
||||
<groupId>org.rometools</groupId>
|
||||
<artifactId>rome-modules</artifactId>
|
||||
<version>1.0</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>gr.ekt.bte</groupId>
|
||||
<artifactId>bte-core</artifactId>
|
||||
<version>0.9.3.5</version>
|
||||
<exclusions>
|
||||
<!-- A more recent version is retrieved from another dependency -->
|
||||
<exclusion>
|
||||
<groupId>org.mockito</groupId>
|
||||
<artifactId>mockito-core</artifactId>
|
||||
</exclusion>
|
||||
</exclusions>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>gr.ekt.bte</groupId>
|
||||
<artifactId>bte-io</artifactId>
|
||||
<version>0.9.3.5</version>
|
||||
<exclusions>
|
||||
<!-- A more recent version is retrieved from another dependency -->
|
||||
<exclusion>
|
||||
<groupId>org.apache.commons</groupId>
|
||||
<artifactId>commons-lang3</artifactId>
|
||||
</exclusion>
|
||||
<!-- A more recent version is retrieved from another dependency -->
|
||||
<exclusion>
|
||||
<groupId>org.mockito</groupId>
|
||||
<artifactId>mockito-core</artifactId>
|
||||
</exclusion>
|
||||
</exclusions>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.httpcomponents</groupId>
|
||||
<artifactId>httpcore</artifactId>
|
||||
<groupId>org.jbibtex</groupId>
|
||||
<artifactId>jbibtex</artifactId>
|
||||
<version>1.0.10</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.httpcomponents</groupId>
|
||||
<artifactId>httpclient</artifactId>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>org.apache.solr</groupId>
|
||||
<artifactId>solr-solrj</artifactId>
|
||||
<version>${solr.version}</version>
|
||||
<version>${solr.client.version}</version>
|
||||
<exclusions>
|
||||
<!-- Newer Jetty version brought in via Parent POM -->
|
||||
<exclusion>
|
||||
<groupId>org.slf4j</groupId>
|
||||
<artifactId>jcl-over-slf4j</artifactId>
|
||||
<groupId>org.eclipse.jetty</groupId>
|
||||
<artifactId>jetty-http</artifactId>
|
||||
</exclusion>
|
||||
<exclusion>
|
||||
<groupId>org.slf4j</groupId>
|
||||
<artifactId>slf4j-api</artifactId>
|
||||
<groupId>org.eclipse.jetty</groupId>
|
||||
<artifactId>jetty-io</artifactId>
|
||||
</exclusion>
|
||||
<exclusion>
|
||||
<groupId>org.eclipse.jetty</groupId>
|
||||
<artifactId>jetty-util</artifactId>
|
||||
</exclusion>
|
||||
</exclusions>
|
||||
</dependency>
|
||||
|
||||
<!-- Solr Core is needed for Integration Tests (to run a MockSolrServer) -->
|
||||
<!-- The following Solr / Lucene dependencies also support integration tests -->
|
||||
<dependency>
|
||||
<groupId>commons-configuration</groupId>
|
||||
<artifactId>commons-configuration</artifactId>
|
||||
<groupId>org.apache.solr</groupId>
|
||||
<artifactId>solr-core</artifactId>
|
||||
<scope>test</scope>
|
||||
<version>${solr.client.version}</version>
|
||||
<exclusions>
|
||||
<!-- Newer version brought in by opencsv -->
|
||||
<exclusion>
|
||||
<groupId>org.apache.commons</groupId>
|
||||
<artifactId>commons-text</artifactId>
|
||||
</exclusion>
|
||||
<!-- Newer Jetty version brought in via Parent POM -->
|
||||
<exclusion>
|
||||
<groupId>org.eclipse.jetty</groupId>
|
||||
<artifactId>jetty-http</artifactId>
|
||||
</exclusion>
|
||||
<exclusion>
|
||||
<groupId>org.eclipse.jetty</groupId>
|
||||
<artifactId>jetty-io</artifactId>
|
||||
</exclusion>
|
||||
<exclusion>
|
||||
<groupId>org.eclipse.jetty</groupId>
|
||||
<artifactId>jetty-util</artifactId>
|
||||
</exclusion>
|
||||
</exclusions>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.solr</groupId>
|
||||
<artifactId>solr-cell</artifactId>
|
||||
<exclusions>
|
||||
<!-- Newer version brought in by opencsv -->
|
||||
<exclusion>
|
||||
<groupId>org.apache.commons</groupId>
|
||||
<artifactId>commons-text</artifactId>
|
||||
</exclusion>
|
||||
<!-- Newer Jetty version brought in via Parent POM -->
|
||||
<exclusion>
|
||||
<groupId>org.eclipse.jetty</groupId>
|
||||
<artifactId>jetty-http</artifactId>
|
||||
</exclusion>
|
||||
<exclusion>
|
||||
<groupId>org.eclipse.jetty</groupId>
|
||||
<artifactId>jetty-io</artifactId>
|
||||
</exclusion>
|
||||
<exclusion>
|
||||
<groupId>org.eclipse.jetty</groupId>
|
||||
<artifactId>jetty-util</artifactId>
|
||||
</exclusion>
|
||||
</exclusions>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.lucene</groupId>
|
||||
<artifactId>lucene-core</artifactId>
|
||||
</dependency>
|
||||
<!-- Used for full-text indexing with Solr -->
|
||||
<dependency>
|
||||
<groupId>org.apache.tika</groupId>
|
||||
<artifactId>tika-parsers</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.lucene</groupId>
|
||||
<artifactId>lucene-analyzers-icu</artifactId>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.lucene</groupId>
|
||||
<artifactId>lucene-analyzers-smartcn</artifactId>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.lucene</groupId>
|
||||
<artifactId>lucene-analyzers-stempel</artifactId>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.xmlbeans</groupId>
|
||||
<artifactId>xmlbeans</artifactId>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>com.maxmind.geoip2</groupId>
|
||||
<artifactId>geoip2</artifactId>
|
||||
@@ -595,12 +680,6 @@
|
||||
<version>2.1.7</version>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>org.apache.lucene</groupId>
|
||||
<artifactId>lucene-core</artifactId>
|
||||
<version>4.10.4</version>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>com.coverity.security</groupId>
|
||||
<artifactId>coverity-escapers</artifactId>
|
||||
@@ -617,20 +696,13 @@
|
||||
<dependency>
|
||||
<groupId>com.google.guava</groupId>
|
||||
<artifactId>guava</artifactId>
|
||||
<version>19.0</version>
|
||||
</dependency>
|
||||
|
||||
|
||||
<dependency>
|
||||
<groupId>org.postgresql</groupId>
|
||||
<artifactId>postgresql</artifactId>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>org.slf4j</groupId>
|
||||
<artifactId>slf4j-api</artifactId>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>jdbm</groupId>
|
||||
<artifactId>jdbm</artifactId>
|
||||
@@ -648,7 +720,7 @@
|
||||
<dependency>
|
||||
<groupId>org.flywaydb</groupId>
|
||||
<artifactId>flyway-core</artifactId>
|
||||
<version>4.0.3</version>
|
||||
<version>6.5.5</version>
|
||||
</dependency>
|
||||
|
||||
<!-- Google Analytics -->
|
||||
@@ -672,6 +744,7 @@
|
||||
<groupId>com.google.oauth-client</groupId>
|
||||
<artifactId>google-oauth-client</artifactId>
|
||||
</dependency>
|
||||
|
||||
<!-- FindBugs -->
|
||||
<dependency>
|
||||
<groupId>com.google.code.findbugs</groupId>
|
||||
@@ -681,6 +754,7 @@
|
||||
<groupId>com.google.code.findbugs</groupId>
|
||||
<artifactId>annotations</artifactId>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>joda-time</groupId>
|
||||
<artifactId>joda-time</artifactId>
|
||||
@@ -692,49 +766,137 @@
|
||||
<type>jar</type>
|
||||
</dependency>
|
||||
|
||||
<!-- JAXB API and implementation (no longer bundled as of Java 11) -->
|
||||
<dependency>
|
||||
<groupId>javax.xml.bind</groupId>
|
||||
<artifactId>jaxb-api</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.glassfish.jaxb</groupId>
|
||||
<artifactId>jaxb-runtime</artifactId>
|
||||
</dependency>
|
||||
|
||||
<!-- Apache Axiom -->
|
||||
<dependency>
|
||||
<groupId>org.apache.ws.commons.axiom</groupId>
|
||||
<artifactId>axiom-impl</artifactId>
|
||||
<!-- NOTE: SWORDv2 needs 1.2.14, required by Abdera: https://abdera.apache.org/ -->
|
||||
<version>1.2.14</version>
|
||||
<version>${axiom.version}</version>
|
||||
<exclusions>
|
||||
<!-- Exclude Geronimo as it is NOT necessary when using javax.activation (which we use)
|
||||
See: https://ws.apache.org/axiom/userguide/ch04.html#d0e732 -->
|
||||
<exclusion>
|
||||
<groupId>org.apache.geronimo.specs</groupId>
|
||||
<artifactId>*</artifactId>
|
||||
</exclusion>
|
||||
<!-- Exclude Woodstox, as later version provided by Solr dependencies -->
|
||||
<exclusion>
|
||||
<groupId>org.codehaus.woodstox</groupId>
|
||||
<artifactId>woodstox-core-asl</artifactId>
|
||||
</exclusion>
|
||||
</exclusions>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>org.apache.ws.commons.axiom</groupId>
|
||||
<artifactId>axiom-api</artifactId>
|
||||
<!-- NOTE: SWORDv2 needs 1.2.14, required by Abdera: https://abdera.apache.org/ -->
|
||||
<version>1.2.14</version>
|
||||
<version>${axiom.version}</version>
|
||||
<exclusions>
|
||||
<!-- Exclude Geronimo as it is NOT necessary when using javax.activation (which we use)
|
||||
See: https://ws.apache.org/axiom/userguide/ch04.html#d0e732 -->
|
||||
<exclusion>
|
||||
<groupId>org.apache.geronimo.specs</groupId>
|
||||
<artifactId>*</artifactId>
|
||||
</exclusion>
|
||||
<!-- Exclude Woodstox, as later version provided by Solr dependencies -->
|
||||
<exclusion>
|
||||
<groupId>org.codehaus.woodstox</groupId>
|
||||
<artifactId>woodstox-core-asl</artifactId>
|
||||
</exclusion>
|
||||
</exclusions>
|
||||
</dependency>
|
||||
|
||||
<!-- Jersey / JAX-RS client (javax.ws.rs.*) dependencies needed to integrate with external sources/services -->
|
||||
<dependency>
|
||||
<groupId>org.glassfish.jersey.core</groupId>
|
||||
<artifactId>jersey-client</artifactId>
|
||||
<version>${jersey.version}</version>
|
||||
</dependency>
|
||||
<!-- Required because Jersey no longer includes a dependency injection provider by default.
|
||||
Needed to support PubMed API call in "PubmedImportMetadataSourceServiceImpl.GetRecord" -->
|
||||
<dependency>
|
||||
<groupId>org.glassfish.jersey.inject</groupId>
|
||||
<artifactId>jersey-hk2</artifactId>
|
||||
<version>${jersey.version}</version>
|
||||
</dependency>
|
||||
|
||||
<!-- S3 -->
|
||||
<dependency>
|
||||
<groupId>com.amazonaws</groupId>
|
||||
<artifactId>aws-java-sdk-s3</artifactId>
|
||||
<version>1.10.50</version>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>org.orcid</groupId>
|
||||
<artifactId>orcid-model</artifactId>
|
||||
<version>3.0.2</version>
|
||||
<exclusions>
|
||||
<exclusion>
|
||||
<groupId>joda-time</groupId>
|
||||
<artifactId>joda-time</artifactId>
|
||||
<groupId>javax.validation</groupId>
|
||||
<artifactId>validation-api</artifactId>
|
||||
</exclusion>
|
||||
<exclusion>
|
||||
<groupId>com.fasterxml.jackson.core</groupId>
|
||||
<artifactId>jackson-databind</artifactId>
|
||||
<groupId>com.fasterxml.jackson.jaxrs</groupId>
|
||||
<artifactId>jackson-jaxrs-json-provider</artifactId>
|
||||
</exclusion>
|
||||
<exclusion>
|
||||
<groupId>org.yaml</groupId>
|
||||
<artifactId>snakeyaml</artifactId>
|
||||
</exclusion>
|
||||
<exclusion>
|
||||
<groupId>org.javassist</groupId>
|
||||
<artifactId>javassist</artifactId>
|
||||
</exclusion>
|
||||
<exclusion>
|
||||
<groupId>io.swagger</groupId>
|
||||
<artifactId>swagger-jersey-jaxrs</artifactId>
|
||||
</exclusion>
|
||||
</exclusions>
|
||||
</dependency>
|
||||
<!-- S3 also wanted jackson... -->
|
||||
|
||||
<dependency>
|
||||
<groupId>com.fasterxml.jackson.core</groupId>
|
||||
<artifactId>jackson-core</artifactId>
|
||||
<groupId>org.json</groupId>
|
||||
<artifactId>json</artifactId>
|
||||
<version>20180130</version>
|
||||
</dependency>
|
||||
|
||||
<!-- Used for Solr core export/import -->
|
||||
<dependency>
|
||||
<groupId>com.fasterxml.jackson.core</groupId>
|
||||
<artifactId>jackson-databind</artifactId>
|
||||
<groupId>com.opencsv</groupId>
|
||||
<artifactId>opencsv</artifactId>
|
||||
<version>5.2</version>
|
||||
</dependency>
|
||||
|
||||
<!-- Email templating -->
|
||||
<dependency>
|
||||
<groupId>org.apache.velocity</groupId>
|
||||
<artifactId>velocity-engine-core</artifactId>
|
||||
<version>2.0</version>
|
||||
<type>jar</type>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>org.xmlunit</groupId>
|
||||
<artifactId>xmlunit-core</artifactId>
|
||||
<version>2.6.3</version>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>org.apache.bcel</groupId>
|
||||
<artifactId>bcel</artifactId>
|
||||
<version>6.4.0</version>
|
||||
</dependency>
|
||||
|
||||
</dependencies>
|
||||
|
||||
</project>
|
||||
|
@@ -1,163 +0,0 @@
|
||||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright ownership.
|
||||
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
* (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.apache.solr.handler.extraction;
|
||||
|
||||
|
||||
/**
|
||||
* The various Solr Parameters names to use when extracting content.
|
||||
**/
|
||||
public interface ExtractingParams {
|
||||
|
||||
/**
|
||||
* Map all generated attribute names to field names with lowercase and underscores.
|
||||
*/
|
||||
public static final String LOWERNAMES = "lowernames";
|
||||
|
||||
/**
|
||||
* if true, ignore TikaException (give up to extract text but index meta data)
|
||||
*/
|
||||
public static final String IGNORE_TIKA_EXCEPTION = "ignoreTikaException";
|
||||
|
||||
|
||||
/**
|
||||
* The param prefix for mapping Tika metadata to Solr fields.
|
||||
* <p>
|
||||
* To map a field, add a name like:
|
||||
* <pre>fmap.title=solr.title</pre>
|
||||
*
|
||||
* In this example, the tika "title" metadata value will be added to a Solr field named "solr.title"
|
||||
*/
|
||||
public static final String MAP_PREFIX = "fmap.";
|
||||
|
||||
/**
|
||||
* The boost value for the name of the field. The boost can be specified by a name mapping.
|
||||
* <p>
|
||||
* For example
|
||||
* <pre>
|
||||
* map.title=solr.title
|
||||
* boost.solr.title=2.5
|
||||
* </pre>
|
||||
* will boost the solr.title field for this document by 2.5
|
||||
*/
|
||||
public static final String BOOST_PREFIX = "boost.";
|
||||
|
||||
/**
|
||||
* Pass in literal values to be added to the document, as in
|
||||
* <pre>
|
||||
* literal.myField=Foo
|
||||
* </pre>
|
||||
*/
|
||||
public static final String LITERALS_PREFIX = "literal.";
|
||||
|
||||
|
||||
/**
|
||||
* Restrict the extracted parts of a document to be indexed
|
||||
* by passing in an XPath expression. All content that satisfies the XPath expr.
|
||||
* will be passed to the {@link org.apache.solr.handler.extraction.SolrContentHandler}.
|
||||
* <p>
|
||||
* See Tika's docs for what the extracted document looks like.
|
||||
*
|
||||
* @see #CAPTURE_ELEMENTS
|
||||
*/
|
||||
public static final String XPATH_EXPRESSION = "xpath";
|
||||
|
||||
|
||||
/**
|
||||
* Only extract and return the content, do not index it.
|
||||
*/
|
||||
public static final String EXTRACT_ONLY = "extractOnly";
|
||||
|
||||
/**
|
||||
* Content output format if extractOnly is true. Default is "xml", alternative is "text".
|
||||
*/
|
||||
public static final String EXTRACT_FORMAT = "extractFormat";
|
||||
|
||||
/**
|
||||
* Capture attributes separately according to the name of the element, instead of just adding them to the string
|
||||
* buffer
|
||||
*/
|
||||
public static final String CAPTURE_ATTRIBUTES = "captureAttr";
|
||||
|
||||
/**
|
||||
* Literal field values will by default override other values such as metadata and content. Set this to false to
|
||||
* revert to pre-4.0 behaviour
|
||||
*/
|
||||
public static final String LITERALS_OVERRIDE = "literalsOverride";
|
||||
|
||||
/**
|
||||
* Capture the specified fields (and everything included below it that isn't capture by some other capture field)
|
||||
* separately from the default. This is different
|
||||
* then the case of passing in an XPath expression.
|
||||
* <p>
|
||||
* The Capture field is based on the localName returned to the
|
||||
* {@link org.apache.solr.handler.extraction.SolrContentHandler}
|
||||
* by Tika, not to be confused by the mapped field. The field name can then
|
||||
* be mapped into the index schema.
|
||||
* <p>
|
||||
* For instance, a Tika document may look like:
|
||||
* <pre>
|
||||
* <html>
|
||||
* ...
|
||||
* <body>
|
||||
* <p>some text here. <div>more text</div></p>
|
||||
* Some more text
|
||||
* </body>
|
||||
* </pre>
|
||||
* By passing in the p tag, you could capture all P tags separately from the rest of the t
|
||||
* Thus, in the example, the capture of the P tag would be: "some text here. more text"
|
||||
*/
|
||||
public static final String CAPTURE_ELEMENTS = "capture";
|
||||
|
||||
/**
|
||||
* The type of the stream. If not specified, Tika will use mime type detection.
|
||||
*/
|
||||
public static final String STREAM_TYPE = "stream.type";
|
||||
|
||||
|
||||
/**
|
||||
* Optional. The file name. If specified, Tika can take this into account while
|
||||
* guessing the MIME type.
|
||||
*/
|
||||
public static final String RESOURCE_NAME = "resource.name";
|
||||
|
||||
/**
|
||||
* Optional. The password for this resource. Will be used instead of the rule based password lookup mechanisms
|
||||
*/
|
||||
public static final String RESOURCE_PASSWORD = "resource.password";
|
||||
|
||||
/**
|
||||
* Optional. If specified, the prefix will be prepended to all Metadata, such that it would be possible
|
||||
* to setup a dynamic field to automatically capture it
|
||||
*/
|
||||
public static final String UNKNOWN_FIELD_PREFIX = "uprefix";
|
||||
|
||||
/**
|
||||
* Optional. If specified and the name of a potential field cannot be determined, the default Field specified
|
||||
* will be used instead.
|
||||
*/
|
||||
public static final String DEFAULT_FIELD = "defaultField";
|
||||
|
||||
/**
|
||||
* Optional. If specified, loads the file as a source for password lookups for Tika encrypted documents.
|
||||
* <p>
|
||||
* File format is Java properties format with one key=value per line.
|
||||
* The key is evaluated as a regex against the file name, and the value is the password
|
||||
* The rules are evaluated top-bottom, i.e. the first match will be used
|
||||
* If you want a fallback password to be always used, supply a .*=<defaultmypassword> at the end
|
||||
*/
|
||||
public static final String PASSWORD_MAP_FILE = "passwordsFile";
|
||||
}
|
@@ -14,10 +14,10 @@ import java.util.UUID;
|
||||
|
||||
import org.apache.commons.cli.CommandLine;
|
||||
import org.apache.commons.cli.CommandLineParser;
|
||||
import org.apache.commons.cli.DefaultParser;
|
||||
import org.apache.commons.cli.HelpFormatter;
|
||||
import org.apache.commons.cli.Options;
|
||||
import org.apache.commons.cli.PosixParser;
|
||||
import org.apache.commons.collections.CollectionUtils;
|
||||
import org.apache.commons.collections4.CollectionUtils;
|
||||
import org.dspace.authorize.AuthorizeException;
|
||||
import org.dspace.content.Community;
|
||||
import org.dspace.content.factory.ContentServiceFactory;
|
||||
@@ -51,7 +51,7 @@ public class CommunityFiliator {
|
||||
*/
|
||||
public static void main(String[] argv) throws Exception {
|
||||
// create an options object and populate it
|
||||
CommandLineParser parser = new PosixParser();
|
||||
CommandLineParser parser = new DefaultParser();
|
||||
|
||||
Options options = new Options();
|
||||
|
||||
@@ -180,13 +180,9 @@ public class CommunityFiliator {
|
||||
// second test - circularity: parent's parents can't include proposed
|
||||
// child
|
||||
List<Community> parentDads = parent.getParentCommunities();
|
||||
|
||||
for (int i = 0; i < parentDads.size(); i++) {
|
||||
if (parentDads.get(i).getID().equals(child.getID())) {
|
||||
System.out
|
||||
.println("Error, circular parentage - child is parent of parent");
|
||||
System.exit(1);
|
||||
}
|
||||
if (parentDads.contains(child)) {
|
||||
System.out.println("Error, circular parentage - child is parent of parent");
|
||||
System.exit(1);
|
||||
}
|
||||
|
||||
// everthing's OK
|
||||
@@ -210,26 +206,15 @@ public class CommunityFiliator {
|
||||
throws SQLException, AuthorizeException, IOException {
|
||||
// verify that child is indeed a child of parent
|
||||
List<Community> parentKids = parent.getSubcommunities();
|
||||
boolean isChild = false;
|
||||
|
||||
for (int i = 0; i < parentKids.size(); i++) {
|
||||
if (parentKids.get(i).getID().equals(child.getID())) {
|
||||
isChild = true;
|
||||
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if (!isChild) {
|
||||
System.out
|
||||
.println("Error, child community not a child of parent community");
|
||||
if (!parentKids.contains(child)) {
|
||||
System.out.println("Error, child community not a child of parent community");
|
||||
System.exit(1);
|
||||
}
|
||||
|
||||
// OK remove the mappings - but leave the community, which will become
|
||||
// top-level
|
||||
child.getParentCommunities().remove(parent);
|
||||
parent.getSubcommunities().remove(child);
|
||||
child.removeParentCommunity(parent);
|
||||
parent.removeSubCommunity(child);
|
||||
communityService.update(c, child);
|
||||
communityService.update(c, parent);
|
||||
|
||||
|
@@ -13,10 +13,9 @@ import java.util.Locale;
|
||||
|
||||
import org.apache.commons.cli.CommandLine;
|
||||
import org.apache.commons.cli.CommandLineParser;
|
||||
import org.apache.commons.cli.DefaultParser;
|
||||
import org.apache.commons.cli.Options;
|
||||
import org.apache.commons.cli.PosixParser;
|
||||
import org.apache.commons.lang.StringUtils;
|
||||
import org.dspace.core.ConfigurationManager;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.dspace.core.Context;
|
||||
import org.dspace.core.I18nUtil;
|
||||
import org.dspace.eperson.EPerson;
|
||||
@@ -24,6 +23,8 @@ import org.dspace.eperson.Group;
|
||||
import org.dspace.eperson.factory.EPersonServiceFactory;
|
||||
import org.dspace.eperson.service.EPersonService;
|
||||
import org.dspace.eperson.service.GroupService;
|
||||
import org.dspace.services.ConfigurationService;
|
||||
import org.dspace.services.factory.DSpaceServicesFactory;
|
||||
|
||||
/**
|
||||
* A command-line tool for creating an initial administrator for setting up a
|
||||
@@ -61,7 +62,7 @@ public final class CreateAdministrator {
|
||||
*/
|
||||
public static void main(String[] argv)
|
||||
throws Exception {
|
||||
CommandLineParser parser = new PosixParser();
|
||||
CommandLineParser parser = new DefaultParser();
|
||||
Options options = new Options();
|
||||
|
||||
CreateAdministrator ca = new CreateAdministrator();
|
||||
@@ -115,7 +116,7 @@ public final class CreateAdministrator {
|
||||
String lastName = null;
|
||||
char[] password1 = null;
|
||||
char[] password2 = null;
|
||||
String language = I18nUtil.DEFAULTLOCALE.getLanguage();
|
||||
String language = I18nUtil.getDefaultLocale().getLanguage();
|
||||
|
||||
while (!dataOK) {
|
||||
System.out.print("E-mail address: ");
|
||||
@@ -147,9 +148,10 @@ public final class CreateAdministrator {
|
||||
lastName = lastName.trim();
|
||||
}
|
||||
|
||||
if (ConfigurationManager.getProperty("webui.supported.locales") != null) {
|
||||
System.out.println("Select one of the following languages: " + ConfigurationManager
|
||||
.getProperty("webui.supported.locales"));
|
||||
ConfigurationService cfg = DSpaceServicesFactory.getInstance().getConfigurationService();
|
||||
if (cfg.hasProperty("webui.supported.locales")) {
|
||||
System.out.println("Select one of the following languages: "
|
||||
+ cfg.getProperty("webui.supported.locales"));
|
||||
System.out.print("Language: ");
|
||||
System.out.flush();
|
||||
|
||||
|
@@ -10,6 +10,7 @@ package org.dspace.administer;
|
||||
import java.io.BufferedWriter;
|
||||
import java.io.FileWriter;
|
||||
import java.io.IOException;
|
||||
import java.io.Writer;
|
||||
import java.sql.SQLException;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
@@ -17,25 +18,28 @@ import java.util.Map;
|
||||
|
||||
import org.apache.commons.cli.CommandLine;
|
||||
import org.apache.commons.cli.CommandLineParser;
|
||||
import org.apache.commons.cli.DefaultParser;
|
||||
import org.apache.commons.cli.Options;
|
||||
import org.apache.commons.cli.ParseException;
|
||||
import org.apache.commons.cli.PosixParser;
|
||||
import org.apache.xml.serialize.Method;
|
||||
import org.apache.xml.serialize.OutputFormat;
|
||||
import org.apache.xml.serialize.XMLSerializer;
|
||||
import org.dspace.content.MetadataField;
|
||||
import org.dspace.content.MetadataSchema;
|
||||
import org.dspace.content.factory.ContentServiceFactory;
|
||||
import org.dspace.content.service.MetadataFieldService;
|
||||
import org.dspace.content.service.MetadataSchemaService;
|
||||
import org.dspace.core.Context;
|
||||
import org.xml.sax.SAXException;
|
||||
import org.w3c.dom.DOMConfiguration;
|
||||
import org.w3c.dom.Document;
|
||||
import org.w3c.dom.Element;
|
||||
import org.w3c.dom.bootstrap.DOMImplementationRegistry;
|
||||
import org.w3c.dom.ls.DOMImplementationLS;
|
||||
import org.w3c.dom.ls.LSOutput;
|
||||
import org.w3c.dom.ls.LSSerializer;
|
||||
|
||||
|
||||
/**
|
||||
* @author Graham Triggs
|
||||
*
|
||||
* This class creates an xml document as passed in the arguments and
|
||||
* This class creates an XML document as passed in the arguments and
|
||||
* from the metadata schemas for the repository.
|
||||
*
|
||||
* The form of the XML is as follows
|
||||
@@ -61,17 +65,20 @@ public class MetadataExporter {
|
||||
private MetadataExporter() { }
|
||||
|
||||
/**
|
||||
* @param args commandline arguments
|
||||
* @param args command line arguments
|
||||
* @throws ParseException if parser error
|
||||
* @throws SAXException if XML parse error
|
||||
* @throws IOException if IO error
|
||||
* @throws SQLException if database error
|
||||
* @throws RegistryExportException if export error
|
||||
* @throws ClassNotFoundException if no suitable DOM implementation
|
||||
* @throws InstantiationException if no suitable DOM implementation
|
||||
* @throws IllegalAccessException if no suitable DOM implementation
|
||||
*/
|
||||
public static void main(String[] args)
|
||||
throws ParseException, SQLException, IOException, SAXException, RegistryExportException {
|
||||
throws ParseException, SQLException, IOException, RegistryExportException,
|
||||
ClassNotFoundException, InstantiationException, IllegalAccessException {
|
||||
// create an options object and populate it
|
||||
CommandLineParser parser = new PosixParser();
|
||||
CommandLineParser parser = new DefaultParser();
|
||||
Options options = new Options();
|
||||
options.addOption("f", "file", true, "output xml file for registry");
|
||||
options.addOption("s", "schema", true, "the name of the schema to export");
|
||||
@@ -95,32 +102,31 @@ public class MetadataExporter {
|
||||
}
|
||||
|
||||
/**
|
||||
* Save a registry to a filepath
|
||||
* Save a registry to a file path
|
||||
*
|
||||
* @param file filepath
|
||||
* @param file file path
|
||||
* @param schema schema definition to save
|
||||
* @throws SQLException if database error
|
||||
* @throws IOException if IO error
|
||||
* @throws SAXException if XML error
|
||||
* @throws RegistryExportException if export error
|
||||
* @throws ClassNotFoundException if no suitable DOM implementation
|
||||
* @throws InstantiationException if no suitable DOM implementation
|
||||
* @throws IllegalAccessException if no suitable DOM implementation
|
||||
*/
|
||||
public static void saveRegistry(String file, String schema)
|
||||
throws SQLException, IOException, SAXException, RegistryExportException {
|
||||
throws SQLException, IOException, RegistryExportException,
|
||||
ClassNotFoundException, InstantiationException, IllegalAccessException {
|
||||
// create a context
|
||||
Context context = new Context();
|
||||
context.turnOffAuthorisationSystem();
|
||||
|
||||
OutputFormat xmlFormat = new OutputFormat(Method.XML, "UTF-8", true);
|
||||
xmlFormat.setLineWidth(120);
|
||||
xmlFormat.setIndent(4);
|
||||
|
||||
XMLSerializer xmlSerializer = new XMLSerializer(new BufferedWriter(new FileWriter(file)), xmlFormat);
|
||||
// XMLSerializer xmlSerializer = new XMLSerializer(System.out, xmlFormat);
|
||||
xmlSerializer.startDocument();
|
||||
xmlSerializer.startElement("dspace-dc-types", null);
|
||||
// Initialize an XML document.
|
||||
Document document = DOMImplementationRegistry.newInstance()
|
||||
.getDOMImplementation("XML 3.0")
|
||||
.createDocument(null, "dspace-dc-types", null);
|
||||
|
||||
// Save the schema definition(s)
|
||||
saveSchema(context, xmlSerializer, schema);
|
||||
saveSchema(context, document, schema);
|
||||
|
||||
List<MetadataField> mdFields = null;
|
||||
|
||||
@@ -139,55 +145,64 @@ public class MetadataExporter {
|
||||
mdFields = metadataFieldService.findAll(context);
|
||||
}
|
||||
|
||||
// Output the metadata fields
|
||||
// Compose the metadata fields
|
||||
for (MetadataField mdField : mdFields) {
|
||||
saveType(context, xmlSerializer, mdField);
|
||||
saveType(context, document, mdField);
|
||||
}
|
||||
|
||||
xmlSerializer.endElement("dspace-dc-types");
|
||||
xmlSerializer.endDocument();
|
||||
// Serialize the completed document to the output file.
|
||||
try (Writer writer = new BufferedWriter(new FileWriter(file))) {
|
||||
DOMImplementationLS lsImplementation
|
||||
= (DOMImplementationLS) DOMImplementationRegistry.newInstance()
|
||||
.getDOMImplementation("LS");
|
||||
LSSerializer serializer = lsImplementation.createLSSerializer();
|
||||
DOMConfiguration configuration = serializer.getDomConfig();
|
||||
configuration.setParameter("format-pretty-print", true);
|
||||
LSOutput lsOutput = lsImplementation.createLSOutput();
|
||||
lsOutput.setEncoding("UTF-8");
|
||||
lsOutput.setCharacterStream(writer);
|
||||
serializer.write(document, lsOutput);
|
||||
}
|
||||
|
||||
// abort the context, as we shouldn't have changed it!!
|
||||
context.abort();
|
||||
}
|
||||
|
||||
/**
|
||||
* Serialize the schema registry. If the parameter 'schema' is null or empty, save all schemas
|
||||
* Compose the schema registry. If the parameter 'schema' is null or empty, save all schemas.
|
||||
*
|
||||
* @param context DSpace Context
|
||||
* @param xmlSerializer XML serializer
|
||||
* @param document the document being built
|
||||
* @param schema schema (may be null to save all)
|
||||
* @throws SQLException if database error
|
||||
* @throws SAXException if XML error
|
||||
* @throws RegistryExportException if export error
|
||||
*/
|
||||
public static void saveSchema(Context context, XMLSerializer xmlSerializer, String schema)
|
||||
throws SQLException, SAXException, RegistryExportException {
|
||||
public static void saveSchema(Context context, Document document, String schema)
|
||||
throws SQLException, RegistryExportException {
|
||||
if (schema != null && !"".equals(schema)) {
|
||||
// Find a single named schema
|
||||
MetadataSchema mdSchema = metadataSchemaService.find(context, schema);
|
||||
|
||||
saveSchema(xmlSerializer, mdSchema);
|
||||
saveSchema(document, mdSchema);
|
||||
} else {
|
||||
// Find all schemas
|
||||
List<MetadataSchema> mdSchemas = metadataSchemaService.findAll(context);
|
||||
|
||||
for (MetadataSchema mdSchema : mdSchemas) {
|
||||
saveSchema(xmlSerializer, mdSchema);
|
||||
saveSchema(document, mdSchema);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Serialize a single schema (namespace) registry entry
|
||||
* Compose a single schema (namespace) registry entry
|
||||
*
|
||||
* @param xmlSerializer XML serializer
|
||||
* @param mdSchema DSpace metadata schema
|
||||
* @throws SAXException if XML error
|
||||
* @param document the output document being built.
|
||||
* @param mdSchema DSpace metadata schema
|
||||
* @throws RegistryExportException if export error
|
||||
*/
|
||||
private static void saveSchema(XMLSerializer xmlSerializer, MetadataSchema mdSchema)
|
||||
throws SAXException, RegistryExportException {
|
||||
private static void saveSchema(Document document, MetadataSchema mdSchema)
|
||||
throws RegistryExportException {
|
||||
// If we haven't got a schema, it's an error
|
||||
if (mdSchema == null) {
|
||||
throw new RegistryExportException("no schema to export");
|
||||
@@ -206,35 +221,34 @@ public class MetadataExporter {
|
||||
return;
|
||||
}
|
||||
|
||||
// Output the parent tag
|
||||
xmlSerializer.startElement("dc-schema", null);
|
||||
Element document_element = document.getDocumentElement();
|
||||
|
||||
// Output the schema name
|
||||
xmlSerializer.startElement("name", null);
|
||||
xmlSerializer.characters(name.toCharArray(), 0, name.length());
|
||||
xmlSerializer.endElement("name");
|
||||
// Compose the parent tag
|
||||
Element schema_element = document.createElement("dc-schema");
|
||||
document_element.appendChild(schema_element);
|
||||
|
||||
// Output the schema namespace
|
||||
xmlSerializer.startElement("namespace", null);
|
||||
xmlSerializer.characters(namespace.toCharArray(), 0, namespace.length());
|
||||
xmlSerializer.endElement("namespace");
|
||||
// Compose the schema name
|
||||
Element name_element = document.createElement("name");
|
||||
schema_element.appendChild(name_element);
|
||||
name_element.setTextContent(name);
|
||||
|
||||
xmlSerializer.endElement("dc-schema");
|
||||
// Compose the schema namespace
|
||||
Element namespace_element = document.createElement("namespace");
|
||||
schema_element.appendChild(namespace_element);
|
||||
namespace_element.setTextContent(namespace);
|
||||
}
|
||||
|
||||
/**
|
||||
* Serialize a single metadata field registry entry to xml
|
||||
* Compose a single metadata field registry entry to XML.
|
||||
*
|
||||
* @param context DSpace context
|
||||
* @param xmlSerializer xml serializer
|
||||
* @param document the output document being built.
|
||||
* @param mdField DSpace metadata field
|
||||
* @throws SAXException if XML error
|
||||
* @throws RegistryExportException if export error
|
||||
* @throws SQLException if database error
|
||||
* @throws IOException if IO error
|
||||
*/
|
||||
private static void saveType(Context context, XMLSerializer xmlSerializer, MetadataField mdField)
|
||||
throws SAXException, RegistryExportException, SQLException, IOException {
|
||||
private static void saveType(Context context, Document document, MetadataField mdField)
|
||||
throws RegistryExportException, SQLException {
|
||||
// If we haven't been given a field, it's an error
|
||||
if (mdField == null) {
|
||||
throw new RegistryExportException("no field to export");
|
||||
@@ -251,38 +265,39 @@ public class MetadataExporter {
|
||||
throw new RegistryExportException("incomplete field information");
|
||||
}
|
||||
|
||||
// Output the parent tag
|
||||
xmlSerializer.startElement("dc-type", null);
|
||||
Element document_element = document.getDocumentElement();
|
||||
|
||||
// Output the schema name
|
||||
xmlSerializer.startElement("schema", null);
|
||||
xmlSerializer.characters(schemaName.toCharArray(), 0, schemaName.length());
|
||||
xmlSerializer.endElement("schema");
|
||||
// Compose the parent tag
|
||||
Element dc_type = document.createElement("dc-type");
|
||||
document_element.appendChild(dc_type);
|
||||
|
||||
// Output the element
|
||||
xmlSerializer.startElement("element", null);
|
||||
xmlSerializer.characters(element.toCharArray(), 0, element.length());
|
||||
xmlSerializer.endElement("element");
|
||||
// Compose the schema name
|
||||
Element schema_element = document.createElement("schema");
|
||||
dc_type.appendChild(schema_element);
|
||||
schema_element.setTextContent(schemaName);
|
||||
|
||||
// Output the qualifier, if present
|
||||
// Compose the element
|
||||
Element element_element = document.createElement("element");
|
||||
dc_type.appendChild(element_element);
|
||||
element_element.setTextContent(element);
|
||||
|
||||
// Compose the qualifier, if present
|
||||
if (qualifier != null) {
|
||||
xmlSerializer.startElement("qualifier", null);
|
||||
xmlSerializer.characters(qualifier.toCharArray(), 0, qualifier.length());
|
||||
xmlSerializer.endElement("qualifier");
|
||||
Element qualifier_element = document.createElement("qualifier");
|
||||
dc_type.appendChild(qualifier_element);
|
||||
qualifier_element.setTextContent(qualifier);
|
||||
} else {
|
||||
xmlSerializer.comment("unqualified");
|
||||
dc_type.appendChild(document.createComment("unqualified"));
|
||||
}
|
||||
|
||||
// Output the scope note, if present
|
||||
// Compose the scope note, if present
|
||||
if (scopeNote != null) {
|
||||
xmlSerializer.startElement("scope_note", null);
|
||||
xmlSerializer.characters(scopeNote.toCharArray(), 0, scopeNote.length());
|
||||
xmlSerializer.endElement("scope_note");
|
||||
Element scope_element = document.createElement("scope_note");
|
||||
dc_type.appendChild(scope_element);
|
||||
scope_element.setTextContent(scopeNote);
|
||||
} else {
|
||||
xmlSerializer.comment("no scope note");
|
||||
dc_type.appendChild(document.createComment("no scope note"));
|
||||
}
|
||||
|
||||
xmlSerializer.endElement("dc-type");
|
||||
}
|
||||
|
||||
static Map<Integer, String> schemaMap = new HashMap<Integer, String>();
|
||||
@@ -317,7 +332,7 @@ public class MetadataExporter {
|
||||
}
|
||||
|
||||
/**
|
||||
* Print the usage message to stdout
|
||||
* Print the usage message to standard output
|
||||
*/
|
||||
public static void usage() {
|
||||
String usage = "Use this class with the following options:\n" +
|
||||
|
@@ -14,13 +14,14 @@ import javax.xml.transform.TransformerException;
|
||||
|
||||
import org.apache.commons.cli.CommandLine;
|
||||
import org.apache.commons.cli.CommandLineParser;
|
||||
import org.apache.commons.cli.DefaultParser;
|
||||
import org.apache.commons.cli.Options;
|
||||
import org.apache.commons.cli.ParseException;
|
||||
import org.apache.commons.cli.PosixParser;
|
||||
import org.apache.xpath.XPathAPI;
|
||||
import org.dspace.authorize.AuthorizeException;
|
||||
import org.dspace.content.MetadataField;
|
||||
import org.dspace.content.MetadataSchema;
|
||||
import org.dspace.content.MetadataSchemaEnum;
|
||||
import org.dspace.content.NonUniqueMetadataException;
|
||||
import org.dspace.content.factory.ContentServiceFactory;
|
||||
import org.dspace.content.service.MetadataFieldService;
|
||||
@@ -93,7 +94,7 @@ public class MetadataImporter {
|
||||
boolean forceUpdate = false;
|
||||
|
||||
// create an options object and populate it
|
||||
CommandLineParser parser = new PosixParser();
|
||||
CommandLineParser parser = new DefaultParser();
|
||||
Options options = new Options();
|
||||
options.addOption("f", "file", true, "source xml file for DC fields");
|
||||
options.addOption("u", "update", false, "update an existing schema");
|
||||
@@ -248,7 +249,7 @@ public class MetadataImporter {
|
||||
|
||||
// If the schema is not provided default to DC
|
||||
if (schema == null) {
|
||||
schema = MetadataSchema.DC_SCHEMA;
|
||||
schema = MetadataSchemaEnum.DC.getName();
|
||||
}
|
||||
|
||||
|
||||
|
@@ -17,7 +17,7 @@ import javax.xml.parsers.DocumentBuilderFactory;
|
||||
import javax.xml.parsers.ParserConfigurationException;
|
||||
import javax.xml.transform.TransformerException;
|
||||
|
||||
import org.apache.log4j.Logger;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.apache.xpath.XPathAPI;
|
||||
import org.dspace.authorize.AuthorizeException;
|
||||
import org.dspace.content.BitstreamFormat;
|
||||
@@ -47,7 +47,7 @@ public class RegistryLoader {
|
||||
/**
|
||||
* log4j category
|
||||
*/
|
||||
private static Logger log = Logger.getLogger(RegistryLoader.class);
|
||||
private static Logger log = org.apache.logging.log4j.LogManager.getLogger(RegistryLoader.class);
|
||||
|
||||
protected static BitstreamFormatService bitstreamFormatService = ContentServiceFactory.getInstance()
|
||||
.getBitstreamFormatService();
|
||||
|
@@ -7,12 +7,24 @@
|
||||
*/
|
||||
package org.dspace.administer;
|
||||
|
||||
import java.io.BufferedWriter;
|
||||
import java.io.File;
|
||||
import java.io.FileWriter;
|
||||
import static org.dspace.content.service.DSpaceObjectService.MD_COPYRIGHT_TEXT;
|
||||
import static org.dspace.content.service.DSpaceObjectService.MD_INTRODUCTORY_TEXT;
|
||||
import static org.dspace.content.service.DSpaceObjectService.MD_LICENSE;
|
||||
import static org.dspace.content.service.DSpaceObjectService.MD_NAME;
|
||||
import static org.dspace.content.service.DSpaceObjectService.MD_PROVENANCE_DESCRIPTION;
|
||||
import static org.dspace.content.service.DSpaceObjectService.MD_SHORT_DESCRIPTION;
|
||||
import static org.dspace.content.service.DSpaceObjectService.MD_SIDEBAR_TEXT;
|
||||
|
||||
import java.io.FileInputStream;
|
||||
import java.io.FileNotFoundException;
|
||||
import java.io.FileOutputStream;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.io.OutputStream;
|
||||
import java.io.PrintWriter;
|
||||
import java.sql.SQLException;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import javax.xml.parsers.DocumentBuilder;
|
||||
import javax.xml.parsers.DocumentBuilderFactory;
|
||||
@@ -21,12 +33,19 @@ import javax.xml.transform.TransformerException;
|
||||
|
||||
import org.apache.commons.cli.CommandLine;
|
||||
import org.apache.commons.cli.CommandLineParser;
|
||||
import org.apache.commons.cli.DefaultParser;
|
||||
import org.apache.commons.cli.HelpFormatter;
|
||||
import org.apache.commons.cli.Option;
|
||||
import org.apache.commons.cli.Options;
|
||||
import org.apache.commons.cli.PosixParser;
|
||||
import org.apache.commons.cli.ParseException;
|
||||
import org.apache.xpath.XPathAPI;
|
||||
import org.dspace.authorize.AuthorizeException;
|
||||
import org.dspace.content.Collection;
|
||||
import org.dspace.content.Community;
|
||||
import org.dspace.content.Item;
|
||||
import org.dspace.content.MetadataFieldName;
|
||||
import org.dspace.content.MetadataSchemaEnum;
|
||||
import org.dspace.content.MetadataValue;
|
||||
import org.dspace.content.factory.ContentServiceFactory;
|
||||
import org.dspace.content.service.CollectionService;
|
||||
import org.dspace.content.service.CommunityService;
|
||||
@@ -34,6 +53,7 @@ import org.dspace.core.Context;
|
||||
import org.dspace.eperson.factory.EPersonServiceFactory;
|
||||
import org.dspace.eperson.service.EPersonService;
|
||||
import org.jdom.Element;
|
||||
import org.jdom.output.Format;
|
||||
import org.jdom.output.XMLOutputter;
|
||||
import org.w3c.dom.Document;
|
||||
import org.w3c.dom.Node;
|
||||
@@ -45,44 +65,51 @@ import org.xml.sax.SAXException;
|
||||
* an XML file.
|
||||
*
|
||||
* The XML file structure needs to be:
|
||||
* {@code
|
||||
* <pre>{@code
|
||||
* <import_structure>
|
||||
* <community>
|
||||
* <name>....</name>
|
||||
* <community>...</community>
|
||||
* <collection>
|
||||
* <name>....</name>
|
||||
* </collection>
|
||||
* </community>
|
||||
* <community>
|
||||
* <name>....</name>
|
||||
* <community>...</community>
|
||||
* <collection>
|
||||
* <name>....</name>
|
||||
* </collection>
|
||||
* </community>
|
||||
* </import_structure>
|
||||
* }
|
||||
* it can be arbitrarily deep, and supports all the metadata elements
|
||||
* }</pre>
|
||||
* <p>
|
||||
* It can be arbitrarily deep, and supports all the metadata elements
|
||||
* that make up the community and collection metadata. See the system
|
||||
* documentation for more details
|
||||
* documentation for more details.
|
||||
*
|
||||
* @author Richard Jones
|
||||
*/
|
||||
|
||||
public class StructBuilder {
|
||||
/**
|
||||
* the output xml document which will contain updated information about the
|
||||
* imported structure
|
||||
/** Name of the root element for the document to be imported. */
|
||||
static final String INPUT_ROOT = "import_structure";
|
||||
|
||||
/*
|
||||
* Name of the root element for the document produced by importing.
|
||||
* Community and collection elements are annotated with their identifiers.
|
||||
*/
|
||||
private static org.jdom.Document xmlOutput = new org.jdom.Document(new Element("imported_structure"));
|
||||
static final String RESULT_ROOT = "imported_structure";
|
||||
|
||||
/**
|
||||
* a hashtable to hold metadata for the collection being worked on
|
||||
* A table to hold metadata for the collection being worked on.
|
||||
*/
|
||||
private static Map<String, String> collectionMap = new HashMap<String, String>();
|
||||
private static final Map<String, MetadataFieldName> collectionMap = new HashMap<>();
|
||||
|
||||
/**
|
||||
* a hashtable to hold metadata for the community being worked on
|
||||
* A table to hold metadata for the community being worked on.
|
||||
*/
|
||||
private static Map<String, String> communityMap = new HashMap<String, String>();
|
||||
private static final Map<String, MetadataFieldName> communityMap = new HashMap<>();
|
||||
|
||||
protected static CommunityService communityService = ContentServiceFactory.getInstance().getCommunityService();
|
||||
protected static CollectionService collectionService = ContentServiceFactory.getInstance().getCollectionService();
|
||||
protected static EPersonService ePersonService = EPersonServiceFactory.getInstance().getEPersonService();
|
||||
protected static CommunityService communityService
|
||||
= ContentServiceFactory.getInstance().getCommunityService();
|
||||
protected static CollectionService collectionService
|
||||
= ContentServiceFactory.getInstance().getCollectionService();
|
||||
protected static EPersonService ePersonService
|
||||
= EPersonServiceFactory.getInstance().getEPersonService();
|
||||
|
||||
/**
|
||||
* Default constructor
|
||||
@@ -91,135 +118,356 @@ public class StructBuilder {
|
||||
|
||||
/**
|
||||
* Main method to be run from the command line to import a structure into
|
||||
* DSpace
|
||||
* DSpacee or export existing structure to a file.The command is of the form:
|
||||
*
|
||||
* This is of the form:
|
||||
* <p>{@code StructBuilder -f [XML source] -e [administrator email] -o [output file]}
|
||||
*
|
||||
* {@code StructBuilder -f [xml source] -e [administrator email] -o [output file]}
|
||||
* <p>to import, or
|
||||
*
|
||||
* The output file will contain exactly the same as the source xml document, but
|
||||
* with the handle for each imported item added as an attribute.
|
||||
* <p>{@code StructBuilder -x -e [administrator email] -o [output file]}</p>
|
||||
*
|
||||
* @param argv the command line arguments given
|
||||
* @throws Exception if an error occurs
|
||||
* <p>to export. The output will contain exactly the same as the source XML
|
||||
* document, but with the Handle for each imported item added as an attribute.
|
||||
*
|
||||
*
|
||||
* @param argv command line arguments.
|
||||
* @throws ParserConfigurationException passed through.
|
||||
* @throws SQLException passed through.
|
||||
* @throws FileNotFoundException if input or output could not be opened.
|
||||
* @throws TransformerException if the input document is invalid.
|
||||
*/
|
||||
public static void main(String[] argv)
|
||||
throws Exception {
|
||||
CommandLineParser parser = new PosixParser();
|
||||
|
||||
throws ParserConfigurationException, SQLException,
|
||||
FileNotFoundException, IOException, TransformerException {
|
||||
// Define command line options.
|
||||
Options options = new Options();
|
||||
|
||||
options.addOption("f", "file", true, "file");
|
||||
options.addOption("e", "eperson", true, "eperson");
|
||||
options.addOption("o", "output", true, "output");
|
||||
options.addOption("h", "help", false, "Print this help message.");
|
||||
options.addOption("?", "help");
|
||||
options.addOption("x", "export", false, "Export the current structure as XML.");
|
||||
|
||||
CommandLine line = parser.parse(options, argv);
|
||||
options.addOption(Option.builder("e").longOpt("eperson")
|
||||
.desc("User who is manipulating the repository's structure.")
|
||||
.hasArg().argName("eperson").required().build());
|
||||
|
||||
String file = null;
|
||||
String eperson = null;
|
||||
String output = null;
|
||||
options.addOption(Option.builder("f").longOpt("file")
|
||||
.desc("File of new structure information.")
|
||||
.hasArg().argName("input").build());
|
||||
|
||||
if (line.hasOption('f')) {
|
||||
file = line.getOptionValue('f');
|
||||
options.addOption(Option.builder("o").longOpt("output")
|
||||
.desc("File to receive the structure map ('-' for standard out).")
|
||||
.hasArg().argName("output").required().build());
|
||||
|
||||
// Parse the command line.
|
||||
CommandLineParser parser = new DefaultParser();
|
||||
CommandLine line = null;
|
||||
try {
|
||||
line = parser.parse(options, argv);
|
||||
} catch (ParseException ex) {
|
||||
System.err.println(ex.getMessage());
|
||||
usage(options);
|
||||
System.exit(1);
|
||||
}
|
||||
|
||||
if (line.hasOption('e')) {
|
||||
eperson = line.getOptionValue('e');
|
||||
}
|
||||
|
||||
if (line.hasOption('o')) {
|
||||
output = line.getOptionValue('o');
|
||||
}
|
||||
|
||||
if (output == null || eperson == null || file == null) {
|
||||
usage();
|
||||
// If the user asked for help, give it and exit.
|
||||
if (line.hasOption('h') || line.hasOption('?')) {
|
||||
giveHelp(options);
|
||||
System.exit(0);
|
||||
}
|
||||
|
||||
// Otherwise, analyze the command.
|
||||
// Must be import or export.
|
||||
if (!(line.hasOption('f') || line.hasOption('x'))) {
|
||||
giveHelp(options);
|
||||
System.exit(1);
|
||||
}
|
||||
|
||||
// Open the output stream.
|
||||
String output = line.getOptionValue('o');
|
||||
OutputStream outputStream;
|
||||
if ("-".equals(output)) {
|
||||
outputStream = System.out;
|
||||
} else {
|
||||
outputStream = new FileOutputStream(output);
|
||||
}
|
||||
|
||||
// create a context
|
||||
Context context = new Context();
|
||||
|
||||
// set the context
|
||||
context.setCurrentUser(ePersonService.findByEmail(context, eperson));
|
||||
// set the context.
|
||||
String eperson = line.getOptionValue('e');
|
||||
try {
|
||||
context.setCurrentUser(ePersonService.findByEmail(context, eperson));
|
||||
} catch (SQLException ex) {
|
||||
System.err.format("That user could not be found: %s%n", ex.getMessage());
|
||||
System.exit(1);
|
||||
}
|
||||
|
||||
// Export? Import?
|
||||
if (line.hasOption('x')) { // export
|
||||
exportStructure(context, outputStream);
|
||||
} else { // Must be import
|
||||
String input = line.getOptionValue('f');
|
||||
if (null == input) {
|
||||
usage(options);
|
||||
System.exit(1);
|
||||
}
|
||||
|
||||
InputStream inputStream;
|
||||
if ("-".equals(input)) {
|
||||
inputStream = System.in;
|
||||
} else {
|
||||
inputStream = new FileInputStream(input);
|
||||
}
|
||||
|
||||
importStructure(context, inputStream, outputStream);
|
||||
// save changes from import
|
||||
context.complete();
|
||||
}
|
||||
System.exit(0);
|
||||
}
|
||||
|
||||
/**
|
||||
* Import new Community/Collection structure.
|
||||
*
|
||||
* @param context
|
||||
* @param input XML which describes the new communities and collections.
|
||||
* @param output input, annotated with the new objects' identifiers.
|
||||
* @throws IOException
|
||||
* @throws ParserConfigurationException
|
||||
* @throws SAXException
|
||||
* @throws TransformerException
|
||||
* @throws SQLException
|
||||
*/
|
||||
static void importStructure(Context context, InputStream input, OutputStream output)
|
||||
throws IOException, ParserConfigurationException, SQLException, TransformerException {
|
||||
|
||||
// load the XML
|
||||
Document document = loadXML(file);
|
||||
Document document = null;
|
||||
try {
|
||||
document = loadXML(input);
|
||||
} catch (IOException ex) {
|
||||
System.err.format("The input document could not be read: %s%n", ex.getMessage());
|
||||
System.exit(1);
|
||||
} catch (SAXException ex) {
|
||||
System.err.format("The input document could not be parsed: %s%n", ex.getMessage());
|
||||
System.exit(1);
|
||||
}
|
||||
|
||||
// run the preliminary validation, to be sure that the the XML document
|
||||
// is properly structured
|
||||
validate(document);
|
||||
// is properly structured.
|
||||
try {
|
||||
validate(document);
|
||||
} catch (TransformerException ex) {
|
||||
System.err.format("The input document is invalid: %s%n", ex.getMessage());
|
||||
System.exit(1);
|
||||
}
|
||||
|
||||
// Check for 'identifier' attributes -- possibly output by this class.
|
||||
NodeList identifierNodes = XPathAPI.selectNodeList(document, "//*[@identifier]");
|
||||
if (identifierNodes.getLength() > 0) {
|
||||
System.err.println("The input document has 'identifier' attributes, which will be ignored.");
|
||||
}
|
||||
|
||||
// load the mappings into the member variable hashmaps
|
||||
communityMap.put("name", "name");
|
||||
communityMap.put("description", "short_description");
|
||||
communityMap.put("intro", "introductory_text");
|
||||
communityMap.put("copyright", "copyright_text");
|
||||
communityMap.put("sidebar", "side_bar_text");
|
||||
communityMap.put("name", MD_NAME);
|
||||
communityMap.put("description", MD_SHORT_DESCRIPTION);
|
||||
communityMap.put("intro", MD_INTRODUCTORY_TEXT);
|
||||
communityMap.put("copyright", MD_COPYRIGHT_TEXT);
|
||||
communityMap.put("sidebar", MD_SIDEBAR_TEXT);
|
||||
|
||||
collectionMap.put("name", "name");
|
||||
collectionMap.put("description", "short_description");
|
||||
collectionMap.put("intro", "introductory_text");
|
||||
collectionMap.put("copyright", "copyright_text");
|
||||
collectionMap.put("sidebar", "side_bar_text");
|
||||
collectionMap.put("license", "license");
|
||||
collectionMap.put("provenance", "provenance_description");
|
||||
collectionMap.put("name", MD_NAME);
|
||||
collectionMap.put("description", MD_SHORT_DESCRIPTION);
|
||||
collectionMap.put("intro", MD_INTRODUCTORY_TEXT);
|
||||
collectionMap.put("copyright", MD_COPYRIGHT_TEXT);
|
||||
collectionMap.put("sidebar", MD_SIDEBAR_TEXT);
|
||||
collectionMap.put("license", MD_LICENSE);
|
||||
collectionMap.put("provenance", MD_PROVENANCE_DESCRIPTION);
|
||||
|
||||
// get the top level community list
|
||||
NodeList first = XPathAPI.selectNodeList(document, "/import_structure/community");
|
||||
Element[] elements = new Element[]{};
|
||||
try {
|
||||
// get the top level community list
|
||||
NodeList first = XPathAPI.selectNodeList(document, "/import_structure/community");
|
||||
|
||||
// run the import starting with the top level communities
|
||||
Element[] elements = handleCommunities(context, first, null);
|
||||
// run the import starting with the top level communities
|
||||
elements = handleCommunities(context, first, null);
|
||||
} catch (TransformerException ex) {
|
||||
System.err.format("Input content not understood: %s%n", ex.getMessage());
|
||||
System.exit(1);
|
||||
} catch (AuthorizeException ex) {
|
||||
System.err.format("Not authorized: %s%n", ex.getMessage());
|
||||
System.exit(1);
|
||||
}
|
||||
|
||||
// generate the output
|
||||
Element root = xmlOutput.getRootElement();
|
||||
for (int i = 0; i < elements.length; i++) {
|
||||
root.addContent(elements[i]);
|
||||
final Element root = new Element(RESULT_ROOT);
|
||||
|
||||
for (Element element : elements) {
|
||||
root.addContent(element);
|
||||
}
|
||||
|
||||
// finally write the string into the output file
|
||||
// finally write the string into the output file.
|
||||
final org.jdom.Document xmlOutput = new org.jdom.Document(root);
|
||||
try {
|
||||
BufferedWriter out = new BufferedWriter(new FileWriter(output));
|
||||
out.write(new XMLOutputter().outputString(xmlOutput));
|
||||
out.close();
|
||||
new XMLOutputter().output(xmlOutput, output);
|
||||
} catch (IOException e) {
|
||||
System.out.println("Unable to write to output file " + output);
|
||||
System.exit(0);
|
||||
System.out.printf("Unable to write to output file %s: %s%n",
|
||||
output, e.getMessage());
|
||||
System.exit(1);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Add a single community, and its children, to the Document.
|
||||
*
|
||||
* @param community
|
||||
* @return a fragment representing this Community.
|
||||
*/
|
||||
private static Element exportACommunity(Community community) {
|
||||
// Export this Community.
|
||||
Element element = new Element("community");
|
||||
element.setAttribute("identifier", community.getHandle());
|
||||
element.addContent(new Element("name").setText(community.getName()));
|
||||
element.addContent(new Element("description")
|
||||
.setText(communityService.getMetadataFirstValue(community,
|
||||
MetadataSchemaEnum.DC.getName(), "description", "abstract", Item.ANY)));
|
||||
element.addContent(new Element("intro")
|
||||
.setText(communityService.getMetadataFirstValue(community,
|
||||
MetadataSchemaEnum.DC.getName(), "description", null, Item.ANY)));
|
||||
element.addContent(new Element("copyright")
|
||||
.setText(communityService.getMetadataFirstValue(community,
|
||||
MetadataSchemaEnum.DC.getName(), "rights", null, Item.ANY)));
|
||||
element.addContent(new Element("sidebar")
|
||||
.setText(communityService.getMetadataFirstValue(community,
|
||||
MetadataSchemaEnum.DC.getName(), "description", "tableofcontents", Item.ANY)));
|
||||
|
||||
// Export this Community's Community children.
|
||||
for (Community subCommunity : community.getSubcommunities()) {
|
||||
element.addContent(exportACommunity(subCommunity));
|
||||
}
|
||||
|
||||
context.complete();
|
||||
// Export this Community's Collection children.
|
||||
for (Collection collection : community.getCollections()) {
|
||||
element.addContent(exportACollection(collection));
|
||||
}
|
||||
|
||||
return element;
|
||||
}
|
||||
|
||||
/**
|
||||
* Output the usage information
|
||||
* Add a single Collection to the Document.
|
||||
*
|
||||
* @param collection
|
||||
* @return a fragment representing this Collection.
|
||||
*/
|
||||
private static void usage() {
|
||||
System.out.println("Usage: java StructBuilder -f <source XML file> -o <output file> -e <eperson email>");
|
||||
System.out.println(
|
||||
"Communities will be created from the top level, and a map of communities to handles will be returned in " +
|
||||
"the output file");
|
||||
return;
|
||||
private static Element exportACollection(Collection collection) {
|
||||
// Export this Collection.
|
||||
Element element = new Element("collection");
|
||||
element.setAttribute("identifier", collection.getHandle());
|
||||
element.addContent(new Element("name").setText(collection.getName()));
|
||||
element.addContent(new Element("description")
|
||||
.setText(collectionService.getMetadataFirstValue(collection,
|
||||
MetadataSchemaEnum.DC.getName(), "description", "abstract", Item.ANY)));
|
||||
element.addContent(new Element("intro")
|
||||
.setText(collectionService.getMetadataFirstValue(collection,
|
||||
MetadataSchemaEnum.DC.getName(), "description", null, Item.ANY)));
|
||||
element.addContent(new Element("copyright")
|
||||
.setText(collectionService.getMetadataFirstValue(collection,
|
||||
MetadataSchemaEnum.DC.getName(), "rights", null, Item.ANY)));
|
||||
element.addContent(new Element("sidebar")
|
||||
.setText(collectionService.getMetadataFirstValue(collection,
|
||||
MetadataSchemaEnum.DC.getName(), "description", "tableofcontents", Item.ANY)));
|
||||
element.addContent(new Element("license")
|
||||
.setText(collectionService.getMetadataFirstValue(collection,
|
||||
MetadataSchemaEnum.DC.getName(), "rights", "license", Item.ANY)));
|
||||
// Provenance is special: multivalued
|
||||
for (MetadataValue value : collectionService.getMetadata(collection,
|
||||
MetadataSchemaEnum.DC.getName(), "provenance", null, Item.ANY)) {
|
||||
element.addContent(new Element("provenance")
|
||||
.setText(value.getValue()));
|
||||
}
|
||||
|
||||
return element;
|
||||
}
|
||||
|
||||
/**
|
||||
* Validate the XML document. This method does not return, but if validation
|
||||
* fails it generates an error and ceases execution
|
||||
* Write out the existing Community/Collection structure.
|
||||
*/
|
||||
static void exportStructure(Context context, OutputStream output) {
|
||||
// Build a document from the Community/Collection hierarchy.
|
||||
Element rootElement = new Element(INPUT_ROOT); // To be read by importStructure, perhaps
|
||||
|
||||
List<Community> communities = null;
|
||||
try {
|
||||
communities = communityService.findAllTop(context);
|
||||
} catch (SQLException ex) {
|
||||
System.out.printf("Unable to get the list of top-level communities: %s%n",
|
||||
ex.getMessage());
|
||||
System.exit(1);
|
||||
}
|
||||
|
||||
for (Community community : communities) {
|
||||
rootElement.addContent(exportACommunity(community));
|
||||
}
|
||||
|
||||
// Now write the structure out.
|
||||
org.jdom.Document xmlOutput = new org.jdom.Document(rootElement);
|
||||
try {
|
||||
XMLOutputter outputter = new XMLOutputter(Format.getPrettyFormat());
|
||||
outputter.output(xmlOutput, output);
|
||||
} catch (IOException e) {
|
||||
System.out.printf("Unable to write to output file %s: %s%n",
|
||||
output, e.getMessage());
|
||||
System.exit(1);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Output the usage information.
|
||||
*/
|
||||
private static void usage(Options options) {
|
||||
HelpFormatter helper = new HelpFormatter();
|
||||
try (PrintWriter writer = new PrintWriter(System.out);) {
|
||||
helper.printUsage(writer, 80/* FIXME Magic */,
|
||||
"structure-builder", options);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Help the user more.
|
||||
*/
|
||||
private static void giveHelp(Options options) {
|
||||
HelpFormatter formatter = new HelpFormatter();
|
||||
formatter.printHelp("struct-builder",
|
||||
"Import or export Community/Collection structure.",
|
||||
options,
|
||||
"When importing (-f), communities will be created from the "
|
||||
+ "top level, and a map of communities to handles will "
|
||||
+ "be returned in the output file. When exporting (-x),"
|
||||
+ "the current structure will be written to the map file.",
|
||||
true);
|
||||
}
|
||||
|
||||
/**
|
||||
* Validate the XML document. This method returns if the document is valid.
|
||||
* If validation fails it generates an error and ceases execution.
|
||||
*
|
||||
* @param document the XML document object
|
||||
* @throws TransformerException if transformer error
|
||||
*/
|
||||
private static void validate(org.w3c.dom.Document document)
|
||||
throws TransformerException {
|
||||
StringBuffer err = new StringBuffer();
|
||||
StringBuilder err = new StringBuilder();
|
||||
boolean trip = false;
|
||||
|
||||
err.append("The following errors were encountered parsing the source XML\n");
|
||||
err.append("No changes have been made to the DSpace instance\n\n");
|
||||
err.append("The following errors were encountered parsing the source XML.\n");
|
||||
err.append("No changes have been made to the DSpace instance.\n\n");
|
||||
|
||||
NodeList first = XPathAPI.selectNodeList(document, "/import_structure/community");
|
||||
if (first.getLength() == 0) {
|
||||
err.append("-There are no top level communities in the source document");
|
||||
err.append("-There are no top level communities in the source document.");
|
||||
System.out.println(err.toString());
|
||||
System.exit(0);
|
||||
System.exit(1);
|
||||
}
|
||||
|
||||
String errs = validateCommunities(first, 1);
|
||||
@@ -230,13 +478,13 @@ public class StructBuilder {
|
||||
|
||||
if (trip) {
|
||||
System.out.println(err.toString());
|
||||
System.exit(0);
|
||||
System.exit(1);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Validate the communities section of the XML document. This returns a string
|
||||
* containing any errors encountered, or null if there were no errors
|
||||
* containing any errors encountered, or null if there were no errors.
|
||||
*
|
||||
* @param communities the NodeList of communities to validate
|
||||
* @param level the level in the XML document that we are at, for the purposes
|
||||
@@ -246,7 +494,7 @@ public class StructBuilder {
|
||||
*/
|
||||
private static String validateCommunities(NodeList communities, int level)
|
||||
throws TransformerException {
|
||||
StringBuffer err = new StringBuffer();
|
||||
StringBuilder err = new StringBuilder();
|
||||
boolean trip = false;
|
||||
String errs = null;
|
||||
|
||||
@@ -255,8 +503,9 @@ public class StructBuilder {
|
||||
NodeList name = XPathAPI.selectNodeList(n, "name");
|
||||
if (name.getLength() != 1) {
|
||||
String pos = Integer.toString(i + 1);
|
||||
err.append("-The level " + level + " community in position " + pos);
|
||||
err.append(" does not contain exactly one name field\n");
|
||||
err.append("-The level ").append(level)
|
||||
.append(" community in position ").append(pos)
|
||||
.append(" does not contain exactly one name field.\n");
|
||||
trip = true;
|
||||
}
|
||||
|
||||
@@ -286,7 +535,7 @@ public class StructBuilder {
|
||||
|
||||
/**
|
||||
* validate the collection section of the XML document. This generates a
|
||||
* string containing any errors encountered, or returns null if no errors
|
||||
* string containing any errors encountered, or returns null if no errors.
|
||||
*
|
||||
* @param collections a NodeList of collections to validate
|
||||
* @param level the level in the XML document for the purposes of error reporting
|
||||
@@ -294,7 +543,7 @@ public class StructBuilder {
|
||||
*/
|
||||
private static String validateCollections(NodeList collections, int level)
|
||||
throws TransformerException {
|
||||
StringBuffer err = new StringBuffer();
|
||||
StringBuilder err = new StringBuilder();
|
||||
boolean trip = false;
|
||||
String errs = null;
|
||||
|
||||
@@ -303,8 +552,9 @@ public class StructBuilder {
|
||||
NodeList name = XPathAPI.selectNodeList(n, "name");
|
||||
if (name.getLength() != 1) {
|
||||
String pos = Integer.toString(i + 1);
|
||||
err.append("-The level " + level + " collection in position " + pos);
|
||||
err.append(" does not contain exactly one name field\n");
|
||||
err.append("-The level ").append(level)
|
||||
.append(" collection in position ").append(pos)
|
||||
.append(" does not contain exactly one name field.\n");
|
||||
trip = true;
|
||||
}
|
||||
}
|
||||
@@ -317,17 +567,17 @@ public class StructBuilder {
|
||||
}
|
||||
|
||||
/**
|
||||
* Load in the XML from file.
|
||||
* Load the XML document from input.
|
||||
*
|
||||
* @param filename the filename to load from
|
||||
* @return the DOM representation of the XML file
|
||||
* @param input the filename to load from.
|
||||
* @return the DOM representation of the XML input.
|
||||
*/
|
||||
private static org.w3c.dom.Document loadXML(String filename)
|
||||
private static org.w3c.dom.Document loadXML(InputStream input)
|
||||
throws IOException, ParserConfigurationException, SAXException {
|
||||
DocumentBuilder builder = DocumentBuilderFactory.newInstance()
|
||||
.newDocumentBuilder();
|
||||
|
||||
org.w3c.dom.Document document = builder.parse(new File(filename));
|
||||
org.w3c.dom.Document document = builder.parse(input);
|
||||
|
||||
return document;
|
||||
}
|
||||
@@ -338,7 +588,7 @@ public class StructBuilder {
|
||||
* @param node the node from which we want to extract the string value
|
||||
* @return the string value of the node
|
||||
*/
|
||||
public static String getStringValue(Node node) {
|
||||
private static String getStringValue(Node node) {
|
||||
String value = node.getNodeValue();
|
||||
|
||||
if (node.hasChildNodes()) {
|
||||
@@ -363,7 +613,7 @@ public class StructBuilder {
|
||||
* created communities (e.g. the handles they have been assigned)
|
||||
*/
|
||||
private static Element[] handleCommunities(Context context, NodeList communities, Community parent)
|
||||
throws TransformerException, SQLException, Exception {
|
||||
throws TransformerException, SQLException, AuthorizeException {
|
||||
Element[] elements = new Element[communities.getLength()];
|
||||
|
||||
for (int i = 0; i < communities.getLength(); i++) {
|
||||
@@ -378,24 +628,24 @@ public class StructBuilder {
|
||||
}
|
||||
|
||||
// default the short description to be an empty string
|
||||
communityService.setMetadata(context, community, "short_description", " ");
|
||||
communityService.setMetadataSingleValue(context, community,
|
||||
MD_SHORT_DESCRIPTION, null, " ");
|
||||
|
||||
// now update the metadata
|
||||
Node tn = communities.item(i);
|
||||
for (Map.Entry<String, String> entry : communityMap.entrySet()) {
|
||||
for (Map.Entry<String, MetadataFieldName> entry : communityMap.entrySet()) {
|
||||
NodeList nl = XPathAPI.selectNodeList(tn, entry.getKey());
|
||||
if (nl.getLength() == 1) {
|
||||
communityService.setMetadata(context, community, entry.getValue(), getStringValue(nl.item(0)));
|
||||
communityService.setMetadataSingleValue(context, community,
|
||||
entry.getValue(), null, getStringValue(nl.item(0)));
|
||||
}
|
||||
}
|
||||
|
||||
// FIXME: at the moment, if the community already exists by name
|
||||
// then this will throw a PSQLException on a duplicate key
|
||||
// violation
|
||||
// Ideally we'd skip this row and continue to create sub
|
||||
// communities
|
||||
// and so forth where they don't exist, but it's proving
|
||||
// difficult
|
||||
// then this will throw an SQLException on a duplicate key
|
||||
// violation.
|
||||
// Ideally we'd skip this row and continue to create sub communities
|
||||
// and so forth where they don't exist, but it's proving difficult
|
||||
// to isolate the community that already exists without hitting
|
||||
// the database directly.
|
||||
communityService.update(context, community);
|
||||
@@ -411,30 +661,41 @@ public class StructBuilder {
|
||||
element.setAttribute("identifier", community.getHandle());
|
||||
|
||||
Element nameElement = new Element("name");
|
||||
nameElement.setText(communityService.getMetadata(community, "name"));
|
||||
nameElement.setText(communityService.getMetadataFirstValue(
|
||||
community, CommunityService.MD_NAME, Item.ANY));
|
||||
element.addContent(nameElement);
|
||||
|
||||
if (communityService.getMetadata(community, "short_description") != null) {
|
||||
String fieldValue;
|
||||
|
||||
fieldValue = communityService.getMetadataFirstValue(community,
|
||||
CommunityService.MD_SHORT_DESCRIPTION, Item.ANY);
|
||||
if (fieldValue != null) {
|
||||
Element descriptionElement = new Element("description");
|
||||
descriptionElement.setText(communityService.getMetadata(community, "short_description"));
|
||||
descriptionElement.setText(fieldValue);
|
||||
element.addContent(descriptionElement);
|
||||
}
|
||||
|
||||
if (communityService.getMetadata(community, "introductory_text") != null) {
|
||||
fieldValue = communityService.getMetadataFirstValue(community,
|
||||
CommunityService.MD_INTRODUCTORY_TEXT, Item.ANY);
|
||||
if (fieldValue != null) {
|
||||
Element introElement = new Element("intro");
|
||||
introElement.setText(communityService.getMetadata(community, "introductory_text"));
|
||||
introElement.setText(fieldValue);
|
||||
element.addContent(introElement);
|
||||
}
|
||||
|
||||
if (communityService.getMetadata(community, "copyright_text") != null) {
|
||||
fieldValue = communityService.getMetadataFirstValue(community,
|
||||
CommunityService.MD_COPYRIGHT_TEXT, Item.ANY);
|
||||
if (fieldValue != null) {
|
||||
Element copyrightElement = new Element("copyright");
|
||||
copyrightElement.setText(communityService.getMetadata(community, "copyright_text"));
|
||||
copyrightElement.setText(fieldValue);
|
||||
element.addContent(copyrightElement);
|
||||
}
|
||||
|
||||
if (communityService.getMetadata(community, "side_bar_text") != null) {
|
||||
fieldValue = communityService.getMetadataFirstValue(community,
|
||||
CommunityService.MD_SIDEBAR_TEXT, Item.ANY);
|
||||
if (fieldValue != null) {
|
||||
Element sidebarElement = new Element("sidebar");
|
||||
sidebarElement.setText(communityService.getMetadata(community, "side_bar_text"));
|
||||
sidebarElement.setText(fieldValue);
|
||||
element.addContent(sidebarElement);
|
||||
}
|
||||
|
||||
@@ -470,7 +731,7 @@ public class StructBuilder {
|
||||
* created collections (e.g. the handle)
|
||||
*/
|
||||
private static Element[] handleCollections(Context context, NodeList collections, Community parent)
|
||||
throws TransformerException, SQLException, AuthorizeException, IOException, Exception {
|
||||
throws TransformerException, SQLException, AuthorizeException {
|
||||
Element[] elements = new Element[collections.getLength()];
|
||||
|
||||
for (int i = 0; i < collections.getLength(); i++) {
|
||||
@@ -478,14 +739,16 @@ public class StructBuilder {
|
||||
Collection collection = collectionService.create(context, parent);
|
||||
|
||||
// default the short description to the empty string
|
||||
collectionService.setMetadata(context, collection, "short_description", " ");
|
||||
collectionService.setMetadataSingleValue(context, collection,
|
||||
MD_SHORT_DESCRIPTION, Item.ANY, " ");
|
||||
|
||||
// import the rest of the metadata
|
||||
Node tn = collections.item(i);
|
||||
for (Map.Entry<String, String> entry : collectionMap.entrySet()) {
|
||||
for (Map.Entry<String, MetadataFieldName> entry : collectionMap.entrySet()) {
|
||||
NodeList nl = XPathAPI.selectNodeList(tn, entry.getKey());
|
||||
if (nl.getLength() == 1) {
|
||||
collectionService.setMetadata(context, collection, entry.getValue(), getStringValue(nl.item(0)));
|
||||
collectionService.setMetadataSingleValue(context, collection,
|
||||
entry.getValue(), null, getStringValue(nl.item(0)));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -494,42 +757,57 @@ public class StructBuilder {
|
||||
element.setAttribute("identifier", collection.getHandle());
|
||||
|
||||
Element nameElement = new Element("name");
|
||||
nameElement.setText(collectionService.getMetadata(collection, "name"));
|
||||
nameElement.setText(collectionService.getMetadataFirstValue(collection,
|
||||
CollectionService.MD_NAME, Item.ANY));
|
||||
element.addContent(nameElement);
|
||||
|
||||
if (collectionService.getMetadata(collection, "short_description") != null) {
|
||||
String fieldValue;
|
||||
|
||||
fieldValue = collectionService.getMetadataFirstValue(collection,
|
||||
CollectionService.MD_SHORT_DESCRIPTION, Item.ANY);
|
||||
if (fieldValue != null) {
|
||||
Element descriptionElement = new Element("description");
|
||||
descriptionElement.setText(collectionService.getMetadata(collection, "short_description"));
|
||||
descriptionElement.setText(fieldValue);
|
||||
element.addContent(descriptionElement);
|
||||
}
|
||||
|
||||
if (collectionService.getMetadata(collection, "introductory_text") != null) {
|
||||
fieldValue = collectionService.getMetadataFirstValue(collection,
|
||||
CollectionService.MD_INTRODUCTORY_TEXT, Item.ANY);
|
||||
if (fieldValue != null) {
|
||||
Element introElement = new Element("intro");
|
||||
introElement.setText(collectionService.getMetadata(collection, "introductory_text"));
|
||||
introElement.setText(fieldValue);
|
||||
element.addContent(introElement);
|
||||
}
|
||||
|
||||
if (collectionService.getMetadata(collection, "copyright_text") != null) {
|
||||
fieldValue = collectionService.getMetadataFirstValue(collection,
|
||||
CollectionService.MD_COPYRIGHT_TEXT, Item.ANY);
|
||||
if (fieldValue != null) {
|
||||
Element copyrightElement = new Element("copyright");
|
||||
copyrightElement.setText(collectionService.getMetadata(collection, "copyright_text"));
|
||||
copyrightElement.setText(fieldValue);
|
||||
element.addContent(copyrightElement);
|
||||
}
|
||||
|
||||
if (collectionService.getMetadata(collection, "side_bar_text") != null) {
|
||||
fieldValue = collectionService.getMetadataFirstValue(collection,
|
||||
CollectionService.MD_SIDEBAR_TEXT, Item.ANY);
|
||||
if (fieldValue != null) {
|
||||
Element sidebarElement = new Element("sidebar");
|
||||
sidebarElement.setText(collectionService.getMetadata(collection, "side_bar_text"));
|
||||
sidebarElement.setText(fieldValue);
|
||||
element.addContent(sidebarElement);
|
||||
}
|
||||
|
||||
if (collectionService.getMetadata(collection, "license") != null) {
|
||||
fieldValue = collectionService.getMetadataFirstValue(collection,
|
||||
CollectionService.MD_LICENSE, Item.ANY);
|
||||
if (fieldValue != null) {
|
||||
Element sidebarElement = new Element("license");
|
||||
sidebarElement.setText(collectionService.getMetadata(collection, "license"));
|
||||
sidebarElement.setText(fieldValue);
|
||||
element.addContent(sidebarElement);
|
||||
}
|
||||
|
||||
if (collectionService.getMetadata(collection, "provenance_description") != null) {
|
||||
fieldValue = collectionService.getMetadataFirstValue(collection,
|
||||
CollectionService.MD_PROVENANCE_DESCRIPTION, Item.ANY);
|
||||
if (fieldValue != null) {
|
||||
Element sidebarElement = new Element("provenance");
|
||||
sidebarElement.setText(collectionService.getMetadata(collection, "provenance_description"));
|
||||
sidebarElement.setText(fieldValue);
|
||||
element.addContent(sidebarElement);
|
||||
}
|
||||
|
||||
@@ -538,5 +816,4 @@ public class StructBuilder {
|
||||
|
||||
return elements;
|
||||
}
|
||||
|
||||
}
|
||||
|
@@ -19,6 +19,7 @@ import org.dspace.content.Item;
|
||||
* @author Stuart Lewis
|
||||
*/
|
||||
public class BulkEditChange {
|
||||
|
||||
/**
|
||||
* The item these changes relate to
|
||||
*/
|
||||
|
@@ -8,14 +8,10 @@
|
||||
package org.dspace.app.bulkedit;
|
||||
|
||||
import java.io.BufferedReader;
|
||||
import java.io.BufferedWriter;
|
||||
import java.io.File;
|
||||
import java.io.FileInputStream;
|
||||
import java.io.FileOutputStream;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.io.InputStreamReader;
|
||||
import java.io.OutputStreamWriter;
|
||||
import java.io.Serializable;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
@@ -27,6 +23,8 @@ import java.util.UUID;
|
||||
import java.util.regex.Matcher;
|
||||
import java.util.regex.Pattern;
|
||||
|
||||
import org.apache.commons.io.IOUtils;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.dspace.authority.AuthorityValue;
|
||||
import org.dspace.authority.factory.AuthorityServiceFactory;
|
||||
import org.dspace.authority.service.AuthorityValueService;
|
||||
@@ -34,6 +32,7 @@ import org.dspace.content.Collection;
|
||||
import org.dspace.content.Item;
|
||||
import org.dspace.content.MetadataField;
|
||||
import org.dspace.content.MetadataSchema;
|
||||
import org.dspace.content.MetadataSchemaEnum;
|
||||
import org.dspace.content.MetadataValue;
|
||||
import org.dspace.content.authority.Choices;
|
||||
import org.dspace.content.factory.ContentServiceFactory;
|
||||
@@ -139,18 +138,18 @@ public class DSpaceCSV implements Serializable {
|
||||
/**
|
||||
* Create a new instance, reading the lines in from file
|
||||
*
|
||||
* @param f The file to read from
|
||||
* @param inputStream the inputstream to read from
|
||||
* @param c The DSpace Context
|
||||
* @throws Exception thrown if there is an error reading or processing the file
|
||||
*/
|
||||
public DSpaceCSV(File f, Context c) throws Exception {
|
||||
public DSpaceCSV(InputStream inputStream, Context c) throws Exception {
|
||||
// Initialise the class
|
||||
init();
|
||||
|
||||
// Open the CSV file
|
||||
BufferedReader input = null;
|
||||
try {
|
||||
input = new BufferedReader(new InputStreamReader(new FileInputStream(f), "UTF-8"));
|
||||
input = new BufferedReader(new InputStreamReader(inputStream, StandardCharsets.UTF_8));
|
||||
|
||||
// Read the heading line
|
||||
String head = input.readLine();
|
||||
@@ -168,16 +167,22 @@ public class DSpaceCSV implements Serializable {
|
||||
if ("collection".equals(element)) {
|
||||
// Store the heading
|
||||
headings.add(element);
|
||||
} else if ("rowName".equals(element)) {
|
||||
// Store the heading
|
||||
headings.add(element);
|
||||
} else if ("action".equals(element)) { // Store the action
|
||||
// Store the heading
|
||||
headings.add(element);
|
||||
} else if (!"id".equals(element)) {
|
||||
String authorityPrefix = "";
|
||||
AuthorityValue authorityValueType = authorityValueService.getAuthorityValueType(element);
|
||||
if (authorityValueType != null) {
|
||||
String authorityType = authorityValueType.getAuthorityType();
|
||||
authorityPrefix = element.substring(0, authorityType.length() + 1);
|
||||
element = element.substring(authorityPrefix.length());
|
||||
if (StringUtils.startsWith(element, "[authority]")) {
|
||||
element = StringUtils.substringAfter(element, "[authority]");
|
||||
AuthorityValue authorityValueType = authorityValueService.getAuthorityValueType(element);
|
||||
if (authorityValueType != null) {
|
||||
String authorityType = authorityValueType.getAuthorityType();
|
||||
authorityPrefix = element.substring(0, authorityType.length() + 1);
|
||||
element = element.substring(authorityPrefix.length());
|
||||
}
|
||||
}
|
||||
|
||||
// Verify that the heading is valid in the metadata registry
|
||||
@@ -198,20 +203,24 @@ public class DSpaceCSV implements Serializable {
|
||||
}
|
||||
|
||||
// Check that the scheme exists
|
||||
MetadataSchema foundSchema = metadataSchemaService.find(c, metadataSchema);
|
||||
if (foundSchema == null) {
|
||||
throw new MetadataImportInvalidHeadingException(clean[0],
|
||||
MetadataImportInvalidHeadingException.SCHEMA,
|
||||
columnCounter);
|
||||
}
|
||||
if (!StringUtils.equals(metadataSchema, MetadataSchemaEnum.RELATION.getName())) {
|
||||
MetadataSchema foundSchema = metadataSchemaService.find(c, metadataSchema);
|
||||
if (foundSchema == null) {
|
||||
throw new MetadataImportInvalidHeadingException(clean[0],
|
||||
MetadataImportInvalidHeadingException
|
||||
.SCHEMA,
|
||||
columnCounter);
|
||||
}
|
||||
|
||||
// Check that the metadata element exists in the schema
|
||||
MetadataField foundField = metadataFieldService
|
||||
.findByElement(c, foundSchema, metadataElement, metadataQualifier);
|
||||
if (foundField == null) {
|
||||
throw new MetadataImportInvalidHeadingException(clean[0],
|
||||
MetadataImportInvalidHeadingException.ELEMENT,
|
||||
columnCounter);
|
||||
// Check that the metadata element exists in the schema
|
||||
MetadataField foundField = metadataFieldService
|
||||
.findByElement(c, foundSchema, metadataElement, metadataQualifier);
|
||||
if (foundField == null) {
|
||||
throw new MetadataImportInvalidHeadingException(clean[0],
|
||||
MetadataImportInvalidHeadingException
|
||||
.ELEMENT,
|
||||
columnCounter);
|
||||
}
|
||||
}
|
||||
|
||||
// Store the heading
|
||||
@@ -297,7 +306,7 @@ public class DSpaceCSV implements Serializable {
|
||||
// Specify default values
|
||||
String[] defaultValues =
|
||||
new String[] {
|
||||
"dc.date.accessioned, dc.date.available, dc.date.updated, dc.description.provenance"
|
||||
"dc.date.accessioned", "dc.date.available", "dc.date.updated", "dc.description.provenance"
|
||||
};
|
||||
String[] toIgnoreArray =
|
||||
DSpaceServicesFactory.getInstance()
|
||||
@@ -614,21 +623,15 @@ public class DSpaceCSV implements Serializable {
|
||||
}
|
||||
|
||||
/**
|
||||
* Save the CSV file to the given filename
|
||||
*
|
||||
* @param filename The filename to save the CSV file to
|
||||
* @throws IOException Thrown if an error occurs when writing the file
|
||||
* Creates and returns an InputStream from the CSV Lines in this DSpaceCSV
|
||||
* @return The InputStream created from the CSVLines in this DSpaceCSV
|
||||
*/
|
||||
public final void save(String filename) throws IOException {
|
||||
// Save the file
|
||||
BufferedWriter out = new BufferedWriter(
|
||||
new OutputStreamWriter(
|
||||
new FileOutputStream(filename), "UTF-8"));
|
||||
public InputStream getInputStream() {
|
||||
StringBuilder stringBuilder = new StringBuilder();
|
||||
for (String csvLine : getCSVLinesAsStringArray()) {
|
||||
out.write(csvLine + "\n");
|
||||
stringBuilder.append(csvLine + "\n");
|
||||
}
|
||||
out.flush();
|
||||
out.close();
|
||||
return IOUtils.toInputStream(stringBuilder.toString(), StandardCharsets.UTF_8);
|
||||
}
|
||||
|
||||
/**
|
||||
|
@@ -0,0 +1,115 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.app.bulkedit;
|
||||
|
||||
import java.sql.SQLException;
|
||||
|
||||
import org.apache.commons.cli.ParseException;
|
||||
import org.apache.commons.lang3.ArrayUtils;
|
||||
import org.dspace.content.MetadataField;
|
||||
import org.dspace.content.factory.ContentServiceFactory;
|
||||
import org.dspace.content.service.MetadataFieldService;
|
||||
import org.dspace.content.service.MetadataValueService;
|
||||
import org.dspace.core.Context;
|
||||
import org.dspace.scripts.DSpaceRunnable;
|
||||
import org.dspace.services.ConfigurationService;
|
||||
import org.dspace.services.factory.DSpaceServicesFactory;
|
||||
import org.dspace.utils.DSpace;
|
||||
|
||||
/**
|
||||
* {@link DSpaceRunnable} implementation to delete all the values of the given
|
||||
* metadata field.
|
||||
*
|
||||
* @author Luca Giamminonni (luca.giamminonni at 4science.it)
|
||||
*
|
||||
*/
|
||||
public class MetadataDeletion extends DSpaceRunnable<MetadataDeletionScriptConfiguration<MetadataDeletion>> {
|
||||
|
||||
private MetadataValueService metadataValueService;
|
||||
|
||||
private MetadataFieldService metadataFieldService;
|
||||
|
||||
private ConfigurationService configurationService;
|
||||
|
||||
private String metadataField;
|
||||
|
||||
private boolean list;
|
||||
|
||||
@Override
|
||||
public void internalRun() throws Exception {
|
||||
|
||||
if (list) {
|
||||
listErasableMetadata();
|
||||
return;
|
||||
}
|
||||
|
||||
Context context = new Context();
|
||||
|
||||
try {
|
||||
context.turnOffAuthorisationSystem();
|
||||
performMetadataValuesDeletion(context);
|
||||
} finally {
|
||||
context.restoreAuthSystemState();
|
||||
context.complete();
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
private void listErasableMetadata() {
|
||||
String[] erasableMetadata = getErasableMetadata();
|
||||
if (ArrayUtils.isEmpty(erasableMetadata)) {
|
||||
handler.logInfo("No fields has been configured to be cleared via bulk deletion");
|
||||
} else {
|
||||
handler.logInfo("The fields that can be bulk deleted are: " + String.join(", ", erasableMetadata));
|
||||
}
|
||||
}
|
||||
|
||||
private void performMetadataValuesDeletion(Context context) throws SQLException {
|
||||
|
||||
MetadataField field = metadataFieldService.findByString(context, metadataField, '.');
|
||||
if (field == null) {
|
||||
throw new IllegalArgumentException("No metadata field found with name " + metadataField);
|
||||
}
|
||||
|
||||
if (!ArrayUtils.contains(getErasableMetadata(), metadataField)) {
|
||||
throw new IllegalArgumentException("The given metadata field cannot be bulk deleted");
|
||||
}
|
||||
|
||||
handler.logInfo(String.format("Deleting the field '%s' from all objects", metadataField));
|
||||
|
||||
metadataValueService.deleteByMetadataField(context, field);
|
||||
}
|
||||
|
||||
private String[] getErasableMetadata() {
|
||||
return configurationService.getArrayProperty("bulkedit.allow-bulk-deletion");
|
||||
}
|
||||
|
||||
@Override
|
||||
@SuppressWarnings("unchecked")
|
||||
public MetadataDeletionScriptConfiguration<MetadataDeletion> getScriptConfiguration() {
|
||||
return new DSpace().getServiceManager()
|
||||
.getServiceByName("metadata-deletion", MetadataDeletionScriptConfiguration.class);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setup() throws ParseException {
|
||||
|
||||
metadataValueService = ContentServiceFactory.getInstance().getMetadataValueService();
|
||||
metadataFieldService = ContentServiceFactory.getInstance().getMetadataFieldService();
|
||||
configurationService = DSpaceServicesFactory.getInstance().getConfigurationService();
|
||||
|
||||
metadataField = commandLine.getOptionValue('m');
|
||||
list = commandLine.hasOption('l');
|
||||
|
||||
if (!list && metadataField == null) {
|
||||
throw new ParseException("One of the following parameters is required: -m or -l");
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
}
|
@@ -0,0 +1,18 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.app.bulkedit;
|
||||
|
||||
/**
|
||||
* The {@link MetadataDeletion} for CLI.
|
||||
*
|
||||
* @author Luca Giamminonni (luca.giamminonni at 4science.it)
|
||||
*
|
||||
*/
|
||||
public class MetadataDeletionCli extends MetadataDeletion {
|
||||
|
||||
}
|
@@ -0,0 +1,18 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.app.bulkedit;
|
||||
|
||||
/**
|
||||
* Script configuration for {@link MetadataDeletionCli}.
|
||||
*
|
||||
* @author Luca Giamminonni (luca.giamminonni at 4science.it)
|
||||
*
|
||||
*/
|
||||
public class MetadataDeletionCliScriptConfiguration extends MetadataDeletionScriptConfiguration<MetadataDeletionCli> {
|
||||
|
||||
}
|
@@ -0,0 +1,68 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.app.bulkedit;
|
||||
|
||||
import java.sql.SQLException;
|
||||
|
||||
import org.apache.commons.cli.Options;
|
||||
import org.dspace.authorize.service.AuthorizeService;
|
||||
import org.dspace.core.Context;
|
||||
import org.dspace.scripts.configuration.ScriptConfiguration;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
|
||||
/**
|
||||
* The {@link ScriptConfiguration} for the {@link MetadataDeletion} script.
|
||||
*/
|
||||
public class MetadataDeletionScriptConfiguration<T extends MetadataDeletion> extends ScriptConfiguration<T> {
|
||||
|
||||
@Autowired
|
||||
private AuthorizeService authorizeService;
|
||||
|
||||
private Class<T> dspaceRunnableClass;
|
||||
|
||||
@Override
|
||||
public boolean isAllowedToExecute(Context context) {
|
||||
try {
|
||||
return authorizeService.isAdmin(context);
|
||||
} catch (SQLException e) {
|
||||
throw new RuntimeException("SQLException occurred when checking if the current user is an admin", e);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public Options getOptions() {
|
||||
if (options == null) {
|
||||
|
||||
Options options = new Options();
|
||||
|
||||
options.addOption("m", "metadata", true, "metadata field name");
|
||||
options.getOption("m").setType(String.class);
|
||||
|
||||
options.addOption("l", "list", false, "lists the metadata fields that can be deleted");
|
||||
options.getOption("l").setType(boolean.class);
|
||||
|
||||
super.options = options;
|
||||
}
|
||||
return options;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Class<T> getDspaceRunnableClass() {
|
||||
return dspaceRunnableClass;
|
||||
}
|
||||
|
||||
/**
|
||||
* Generic setter for the dspaceRunnableClass
|
||||
* @param dspaceRunnableClass The dspaceRunnableClass to be set on this MetadataDeletionScriptConfiguration
|
||||
*/
|
||||
@Override
|
||||
public void setDspaceRunnableClass(Class<T> dspaceRunnableClass) {
|
||||
this.dspaceRunnableClass = dspaceRunnableClass;
|
||||
}
|
||||
|
||||
}
|
@@ -8,271 +8,115 @@
|
||||
package org.dspace.app.bulkedit;
|
||||
|
||||
import java.sql.SQLException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
import java.util.UUID;
|
||||
|
||||
import com.google.common.collect.Iterators;
|
||||
import org.apache.commons.cli.CommandLine;
|
||||
import org.apache.commons.cli.CommandLineParser;
|
||||
import org.apache.commons.cli.HelpFormatter;
|
||||
import org.apache.commons.cli.Options;
|
||||
import org.apache.commons.cli.ParseException;
|
||||
import org.apache.commons.cli.PosixParser;
|
||||
import org.dspace.content.Collection;
|
||||
import org.dspace.content.Community;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.dspace.app.util.factory.UtilServiceFactory;
|
||||
import org.dspace.app.util.service.DSpaceObjectUtils;
|
||||
import org.dspace.content.DSpaceObject;
|
||||
import org.dspace.content.Item;
|
||||
import org.dspace.content.factory.ContentServiceFactory;
|
||||
import org.dspace.content.service.ItemService;
|
||||
import org.dspace.core.Constants;
|
||||
import org.dspace.content.service.MetadataDSpaceCsvExportService;
|
||||
import org.dspace.core.Context;
|
||||
import org.dspace.eperson.factory.EPersonServiceFactory;
|
||||
import org.dspace.eperson.service.EPersonService;
|
||||
import org.dspace.handle.factory.HandleServiceFactory;
|
||||
import org.dspace.scripts.DSpaceRunnable;
|
||||
import org.dspace.utils.DSpace;
|
||||
|
||||
/**
|
||||
* Metadata exporter to allow the batch export of metadata into a file
|
||||
*
|
||||
* @author Stuart Lewis
|
||||
*/
|
||||
public class MetadataExport {
|
||||
/**
|
||||
* The items to export
|
||||
*/
|
||||
protected Iterator<Item> toExport;
|
||||
public class MetadataExport extends DSpaceRunnable<MetadataExportScriptConfiguration> {
|
||||
|
||||
protected ItemService itemService;
|
||||
private boolean help = false;
|
||||
private String filename = null;
|
||||
private String identifier = null;
|
||||
private boolean exportAllMetadata = false;
|
||||
private boolean exportAllItems = false;
|
||||
|
||||
protected Context context;
|
||||
private static final String EXPORT_CSV = "exportCSV";
|
||||
|
||||
/**
|
||||
* Whether to export all metadata, or just normally edited metadata
|
||||
*/
|
||||
protected boolean exportAll;
|
||||
private MetadataDSpaceCsvExportService metadataDSpaceCsvExportService = new DSpace().getServiceManager()
|
||||
.getServicesByType(MetadataDSpaceCsvExportService.class).get(0);
|
||||
|
||||
protected MetadataExport() {
|
||||
itemService = ContentServiceFactory.getInstance().getItemService();
|
||||
}
|
||||
private EPersonService ePersonService = EPersonServiceFactory.getInstance().getEPersonService();
|
||||
|
||||
/**
|
||||
* Set up a new metadata export
|
||||
*
|
||||
* @param c The Context
|
||||
* @param toExport The ItemIterator of items to export
|
||||
* @param exportAll whether to export all metadata or not (include handle, provenance etc)
|
||||
*/
|
||||
public MetadataExport(Context c, Iterator<Item> toExport, boolean exportAll) {
|
||||
itemService = ContentServiceFactory.getInstance().getItemService();
|
||||
private DSpaceObjectUtils dSpaceObjectUtils = UtilServiceFactory.getInstance().getDSpaceObjectUtils();
|
||||
|
||||
// Store the export settings
|
||||
this.toExport = toExport;
|
||||
this.exportAll = exportAll;
|
||||
this.context = c;
|
||||
}
|
||||
|
||||
/**
|
||||
* Method to export a community (and sub-communities and collections)
|
||||
*
|
||||
* @param c The Context
|
||||
* @param toExport The Community to export
|
||||
* @param exportAll whether to export all metadata or not (include handle, provenance etc)
|
||||
*/
|
||||
public MetadataExport(Context c, Community toExport, boolean exportAll) {
|
||||
itemService = ContentServiceFactory.getInstance().getItemService();
|
||||
@Override
|
||||
public void internalRun() throws Exception {
|
||||
|
||||
if (help) {
|
||||
logHelpInfo();
|
||||
printHelp();
|
||||
return;
|
||||
}
|
||||
Context context = new Context();
|
||||
context.turnOffAuthorisationSystem();
|
||||
try {
|
||||
// Try to export the community
|
||||
this.toExport = buildFromCommunity(c, toExport, 0);
|
||||
this.exportAll = exportAll;
|
||||
this.context = c;
|
||||
} catch (SQLException sqle) {
|
||||
// Something went wrong...
|
||||
System.err.println("Error running exporter:");
|
||||
sqle.printStackTrace(System.err);
|
||||
System.exit(1);
|
||||
context.setCurrentUser(ePersonService.find(context, this.getEpersonIdentifier()));
|
||||
} catch (SQLException e) {
|
||||
handler.handleException(e);
|
||||
}
|
||||
DSpaceCSV dSpaceCSV = metadataDSpaceCsvExportService
|
||||
.handleExport(context, exportAllItems, exportAllMetadata, identifier,
|
||||
handler);
|
||||
handler.writeFilestream(context, filename, dSpaceCSV.getInputStream(), EXPORT_CSV);
|
||||
context.restoreAuthSystemState();
|
||||
context.complete();
|
||||
}
|
||||
|
||||
/**
|
||||
* Build an array list of item ids that are in a community (include sub-communities and collections)
|
||||
*
|
||||
* @param context DSpace context
|
||||
* @param community The community to build from
|
||||
* @param indent How many spaces to use when writing out the names of items added
|
||||
* @return The list of item ids
|
||||
* @throws SQLException if database error
|
||||
*/
|
||||
protected Iterator<Item> buildFromCommunity(Context context, Community community, int indent)
|
||||
throws SQLException {
|
||||
// Add all the collections
|
||||
List<Collection> collections = community.getCollections();
|
||||
Iterator<Item> result = null;
|
||||
for (Collection collection : collections) {
|
||||
for (int i = 0; i < indent; i++) {
|
||||
System.out.print(" ");
|
||||
}
|
||||
|
||||
Iterator<Item> items = itemService.findByCollection(context, collection);
|
||||
result = addItemsToResult(result, items);
|
||||
|
||||
}
|
||||
// Add all the sub-communities
|
||||
List<Community> communities = community.getSubcommunities();
|
||||
for (Community subCommunity : communities) {
|
||||
for (int i = 0; i < indent; i++) {
|
||||
System.out.print(" ");
|
||||
}
|
||||
Iterator<Item> items = buildFromCommunity(context, subCommunity, indent + 1);
|
||||
result = addItemsToResult(result, items);
|
||||
}
|
||||
|
||||
return result;
|
||||
protected void logHelpInfo() {
|
||||
handler.logInfo("\nfull export: metadata-export");
|
||||
handler.logInfo("partial export: metadata-export -i handle/UUID");
|
||||
}
|
||||
|
||||
private Iterator<Item> addItemsToResult(Iterator<Item> result, Iterator<Item> items) {
|
||||
if (result == null) {
|
||||
result = items;
|
||||
} else {
|
||||
result = Iterators.concat(result, items);
|
||||
}
|
||||
|
||||
return result;
|
||||
@Override
|
||||
public MetadataExportScriptConfiguration getScriptConfiguration() {
|
||||
return new DSpace().getServiceManager().getServiceByName("metadata-export",
|
||||
MetadataExportScriptConfiguration.class);
|
||||
}
|
||||
|
||||
/**
|
||||
* Run the export
|
||||
*
|
||||
* @return the exported CSV lines
|
||||
*/
|
||||
public DSpaceCSV export() {
|
||||
@Override
|
||||
public void setup() throws ParseException {
|
||||
|
||||
if (commandLine.hasOption('h')) {
|
||||
help = true;
|
||||
return;
|
||||
}
|
||||
|
||||
if (!commandLine.hasOption('i')) {
|
||||
exportAllItems = true;
|
||||
}
|
||||
identifier = commandLine.getOptionValue('i');
|
||||
filename = getFileNameForExportFile();
|
||||
|
||||
exportAllMetadata = commandLine.hasOption('a');
|
||||
|
||||
}
|
||||
|
||||
protected String getFileNameForExportFile() throws ParseException {
|
||||
Context context = new Context();
|
||||
try {
|
||||
Context.Mode originalMode = context.getCurrentMode();
|
||||
context.setMode(Context.Mode.READ_ONLY);
|
||||
|
||||
// Process each item
|
||||
DSpaceCSV csv = new DSpaceCSV(exportAll);
|
||||
while (toExport.hasNext()) {
|
||||
Item item = toExport.next();
|
||||
csv.addItem(item);
|
||||
context.uncacheEntity(item);
|
||||
}
|
||||
|
||||
context.setMode(originalMode);
|
||||
// Return the results
|
||||
return csv;
|
||||
} catch (Exception e) {
|
||||
// Something went wrong...
|
||||
System.err.println("Error exporting to CSV:");
|
||||
e.printStackTrace();
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Print the help message
|
||||
*
|
||||
* @param options The command line options the user gave
|
||||
* @param exitCode the system exit code to use
|
||||
*/
|
||||
private static void printHelp(Options options, int exitCode) {
|
||||
// print the help message
|
||||
HelpFormatter myhelp = new HelpFormatter();
|
||||
myhelp.printHelp("MetadataExport\n", options);
|
||||
System.out.println("\nfull export: metadataexport -f filename");
|
||||
System.out.println("partial export: metadataexport -i handle -f filename");
|
||||
System.exit(exitCode);
|
||||
}
|
||||
|
||||
/**
|
||||
* main method to run the metadata exporter
|
||||
*
|
||||
* @param argv the command line arguments given
|
||||
* @throws Exception if error occurs
|
||||
*/
|
||||
public static void main(String[] argv) throws Exception {
|
||||
// Create an options object and populate it
|
||||
CommandLineParser parser = new PosixParser();
|
||||
|
||||
Options options = new Options();
|
||||
|
||||
options.addOption("i", "id", true, "ID or handle of thing to export (item, collection, or community)");
|
||||
options.addOption("f", "file", true, "destination where you want file written");
|
||||
options.addOption("a", "all", false,
|
||||
"include all metadata fields that are not normally changed (e.g. provenance)");
|
||||
options.addOption("h", "help", false, "help");
|
||||
|
||||
CommandLine line = null;
|
||||
|
||||
try {
|
||||
line = parser.parse(options, argv);
|
||||
} catch (ParseException pe) {
|
||||
System.err.println("Error with commands.");
|
||||
printHelp(options, 1);
|
||||
System.exit(0);
|
||||
}
|
||||
|
||||
if (line.hasOption('h')) {
|
||||
printHelp(options, 0);
|
||||
}
|
||||
|
||||
// Check a filename is given
|
||||
if (!line.hasOption('f')) {
|
||||
System.err.println("Required parameter -f missing!");
|
||||
printHelp(options, 1);
|
||||
}
|
||||
String filename = line.getOptionValue('f');
|
||||
|
||||
// Create a context
|
||||
Context c = new Context(Context.Mode.READ_ONLY);
|
||||
c.turnOffAuthorisationSystem();
|
||||
|
||||
// The things we'll export
|
||||
Iterator<Item> toExport = null;
|
||||
MetadataExport exporter = null;
|
||||
|
||||
// Export everything?
|
||||
boolean exportAll = line.hasOption('a');
|
||||
|
||||
ContentServiceFactory contentServiceFactory = ContentServiceFactory.getInstance();
|
||||
// Check we have an item OK
|
||||
ItemService itemService = contentServiceFactory.getItemService();
|
||||
if (!line.hasOption('i')) {
|
||||
System.out.println("Exporting whole repository WARNING: May take some time!");
|
||||
exporter = new MetadataExport(c, itemService.findAll(c), exportAll);
|
||||
} else {
|
||||
String handle = line.getOptionValue('i');
|
||||
DSpaceObject dso = HandleServiceFactory.getInstance().getHandleService().resolveToObject(c, handle);
|
||||
if (dso == null) {
|
||||
System.err.println("Item '" + handle + "' does not resolve to an item in your repository!");
|
||||
printHelp(options, 1);
|
||||
}
|
||||
|
||||
if (dso.getType() == Constants.ITEM) {
|
||||
System.out.println("Exporting item '" + dso.getName() + "' (" + handle + ")");
|
||||
List<Item> item = new ArrayList<>();
|
||||
item.add((Item) dso);
|
||||
exporter = new MetadataExport(c, item.iterator(), exportAll);
|
||||
} else if (dso.getType() == Constants.COLLECTION) {
|
||||
System.out.println("Exporting collection '" + dso.getName() + "' (" + handle + ")");
|
||||
Collection collection = (Collection) dso;
|
||||
toExport = itemService.findByCollection(c, collection);
|
||||
exporter = new MetadataExport(c, toExport, exportAll);
|
||||
} else if (dso.getType() == Constants.COMMUNITY) {
|
||||
System.out.println("Exporting community '" + dso.getName() + "' (" + handle + ")");
|
||||
exporter = new MetadataExport(c, (Community) dso, exportAll);
|
||||
DSpaceObject dso = null;
|
||||
if (StringUtils.isNotBlank(identifier)) {
|
||||
dso = HandleServiceFactory.getInstance().getHandleService().resolveToObject(context, identifier);
|
||||
if (dso == null) {
|
||||
dso = dSpaceObjectUtils.findDSpaceObject(context, UUID.fromString(identifier));
|
||||
}
|
||||
} else {
|
||||
System.err.println("Error identifying '" + handle + "'");
|
||||
System.exit(1);
|
||||
dso = ContentServiceFactory.getInstance().getSiteService().findSite(context);
|
||||
}
|
||||
if (dso == null) {
|
||||
throw new ParseException("An identifier was given that wasn't able to be parsed to a DSpaceObject");
|
||||
}
|
||||
return dso.getID().toString() + ".csv";
|
||||
} catch (SQLException e) {
|
||||
handler.handleException("Something went wrong trying to retrieve DSO for identifier: " + identifier, e);
|
||||
}
|
||||
|
||||
// Perform the export
|
||||
DSpaceCSV csv = exporter.export();
|
||||
|
||||
// Save the files to the file
|
||||
csv.save(filename);
|
||||
|
||||
// Finish off and tidy up
|
||||
c.restoreAuthSystemState();
|
||||
c.complete();
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
@@ -0,0 +1,33 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.app.bulkedit;
|
||||
|
||||
import org.apache.commons.cli.ParseException;
|
||||
|
||||
public class MetadataExportCli extends MetadataExport {
|
||||
|
||||
@Override
|
||||
protected String getFileNameForExportFile() {
|
||||
return commandLine.getOptionValue('f');
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setup() throws ParseException {
|
||||
super.setup();
|
||||
// Check a filename is given
|
||||
if (!commandLine.hasOption('f')) {
|
||||
throw new ParseException("Required parameter -f missing!");
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void logHelpInfo() {
|
||||
handler.logInfo("\nfull export: metadata-export -f filename");
|
||||
handler.logInfo("partial export: metadata-export -i handle -f filename");
|
||||
}
|
||||
}
|
@@ -0,0 +1,26 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.app.bulkedit;
|
||||
|
||||
import java.io.OutputStream;
|
||||
|
||||
import org.apache.commons.cli.Options;
|
||||
|
||||
public class MetadataExportCliScriptConfiguration extends MetadataExportScriptConfiguration<MetadataExportCli> {
|
||||
|
||||
|
||||
@Override
|
||||
public Options getOptions() {
|
||||
Options options = super.getOptions();
|
||||
options.addOption("f", "file", true, "destination where you want file written");
|
||||
options.getOption("f").setType(OutputStream .class);
|
||||
options.getOption("f").setRequired(true);
|
||||
super.options = options;
|
||||
return options;
|
||||
}
|
||||
}
|
@@ -0,0 +1,70 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.app.bulkedit;
|
||||
|
||||
import java.sql.SQLException;
|
||||
|
||||
import org.apache.commons.cli.Options;
|
||||
import org.dspace.authorize.service.AuthorizeService;
|
||||
import org.dspace.core.Context;
|
||||
import org.dspace.scripts.configuration.ScriptConfiguration;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
|
||||
/**
|
||||
* The {@link ScriptConfiguration} for the {@link MetadataExport} script
|
||||
*/
|
||||
public class MetadataExportScriptConfiguration<T extends MetadataExport> extends ScriptConfiguration<T> {
|
||||
|
||||
@Autowired
|
||||
private AuthorizeService authorizeService;
|
||||
|
||||
private Class<T> dspaceRunnableClass;
|
||||
|
||||
@Override
|
||||
public Class<T> getDspaceRunnableClass() {
|
||||
return dspaceRunnableClass;
|
||||
}
|
||||
|
||||
/**
|
||||
* Generic setter for the dspaceRunnableClass
|
||||
* @param dspaceRunnableClass The dspaceRunnableClass to be set on this MetadataExportScriptConfiguration
|
||||
*/
|
||||
@Override
|
||||
public void setDspaceRunnableClass(Class<T> dspaceRunnableClass) {
|
||||
this.dspaceRunnableClass = dspaceRunnableClass;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isAllowedToExecute(Context context) {
|
||||
try {
|
||||
return authorizeService.isAdmin(context);
|
||||
} catch (SQLException e) {
|
||||
throw new RuntimeException("SQLException occurred when checking if the current user is an admin", e);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public Options getOptions() {
|
||||
if (options == null) {
|
||||
Options options = new Options();
|
||||
|
||||
options.addOption("i", "id", true, "ID or handle of thing to export (item, collection, or community)");
|
||||
options.getOption("i").setType(String.class);
|
||||
options.addOption("a", "all", false,
|
||||
"include all metadata fields that are not normally changed (e.g. provenance)");
|
||||
options.getOption("a").setType(boolean.class);
|
||||
options.addOption("h", "help", false, "help");
|
||||
options.getOption("h").setType(boolean.class);
|
||||
|
||||
|
||||
super.options = options;
|
||||
}
|
||||
return options;
|
||||
}
|
||||
|
||||
}
|
File diff suppressed because it is too large
Load Diff
@@ -0,0 +1,68 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.app.bulkedit;
|
||||
|
||||
import java.io.BufferedReader;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStreamReader;
|
||||
import java.util.UUID;
|
||||
|
||||
import org.apache.commons.cli.ParseException;
|
||||
import org.dspace.core.Context;
|
||||
import org.dspace.eperson.EPerson;
|
||||
import org.dspace.eperson.factory.EPersonServiceFactory;
|
||||
import org.dspace.scripts.handler.DSpaceRunnableHandler;
|
||||
|
||||
/**
|
||||
* CLI variant for the {@link MetadataImport} class
|
||||
* This has been made so that we can specify the behaviour of the determineChanges method to be specific for the CLI
|
||||
*/
|
||||
public class MetadataImportCLI extends MetadataImport {
|
||||
|
||||
@Override
|
||||
protected boolean determineChange(DSpaceRunnableHandler handler) throws IOException {
|
||||
handler.logInfo("Do you want to make these changes? [y/n] ");
|
||||
try (BufferedReader bufferedReader = new BufferedReader(new InputStreamReader(System.in))) {
|
||||
String yn = bufferedReader.readLine();
|
||||
if ("y".equalsIgnoreCase(yn)) {
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void assignCurrentUserInContext(Context context) throws ParseException {
|
||||
try {
|
||||
if (commandLine.hasOption('e')) {
|
||||
EPerson eperson;
|
||||
String e = commandLine.getOptionValue('e');
|
||||
if (e.indexOf('@') != -1) {
|
||||
eperson = EPersonServiceFactory.getInstance().getEPersonService().findByEmail(context, e);
|
||||
} else {
|
||||
eperson = EPersonServiceFactory.getInstance().getEPersonService().find(context, UUID.fromString(e));
|
||||
}
|
||||
|
||||
if (eperson == null) {
|
||||
throw new ParseException("Error, eperson cannot be found: " + e);
|
||||
}
|
||||
context.setCurrentUser(eperson);
|
||||
}
|
||||
} catch (Exception e) {
|
||||
throw new ParseException("Unable to find DSpace user: " + e.getMessage());
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setup() throws ParseException {
|
||||
super.setup();
|
||||
if (!commandLine.hasOption('e')) {
|
||||
throw new ParseException("Required parameter -e missing!");
|
||||
}
|
||||
}
|
||||
}
|
@@ -0,0 +1,27 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.app.bulkedit;
|
||||
|
||||
import org.apache.commons.cli.Options;
|
||||
import org.dspace.scripts.configuration.ScriptConfiguration;
|
||||
|
||||
/**
|
||||
* The {@link ScriptConfiguration} for the {@link org.dspace.app.bulkedit.MetadataImportCLI} CLI script
|
||||
*/
|
||||
public class MetadataImportCliScriptConfiguration extends MetadataImportScriptConfiguration<MetadataImportCLI> {
|
||||
|
||||
@Override
|
||||
public Options getOptions() {
|
||||
Options options = super.getOptions();
|
||||
options.addOption("e", "email", true, "email address or user id of user (required if adding new items)");
|
||||
options.getOption("e").setType(String.class);
|
||||
options.getOption("e").setRequired(true);
|
||||
super.options = options;
|
||||
return options;
|
||||
}
|
||||
}
|
@@ -0,0 +1,81 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.app.bulkedit;
|
||||
|
||||
import java.io.InputStream;
|
||||
import java.sql.SQLException;
|
||||
|
||||
import org.apache.commons.cli.Options;
|
||||
import org.dspace.authorize.service.AuthorizeService;
|
||||
import org.dspace.core.Context;
|
||||
import org.dspace.scripts.configuration.ScriptConfiguration;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
|
||||
/**
|
||||
* The {@link ScriptConfiguration} for the {@link MetadataImport} script
|
||||
*/
|
||||
public class MetadataImportScriptConfiguration<T extends MetadataImport> extends ScriptConfiguration<T> {
|
||||
|
||||
@Autowired
|
||||
private AuthorizeService authorizeService;
|
||||
|
||||
private Class<T> dspaceRunnableClass;
|
||||
|
||||
@Override
|
||||
public Class<T> getDspaceRunnableClass() {
|
||||
return dspaceRunnableClass;
|
||||
}
|
||||
|
||||
/**
|
||||
* Generic setter for the dspaceRunnableClass
|
||||
* @param dspaceRunnableClass The dspaceRunnableClass to be set on this MetadataImportScriptConfiguration
|
||||
*/
|
||||
@Override
|
||||
public void setDspaceRunnableClass(Class<T> dspaceRunnableClass) {
|
||||
this.dspaceRunnableClass = dspaceRunnableClass;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isAllowedToExecute(Context context) {
|
||||
try {
|
||||
return authorizeService.isAdmin(context);
|
||||
} catch (SQLException e) {
|
||||
throw new RuntimeException("SQLException occurred when checking if the current user is an admin", e);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public Options getOptions() {
|
||||
if (options == null) {
|
||||
Options options = new Options();
|
||||
|
||||
options.addOption("f", "file", true, "source file");
|
||||
options.getOption("f").setType(InputStream.class);
|
||||
options.getOption("f").setRequired(true);
|
||||
options.addOption("s", "silent", false,
|
||||
"silent operation - doesn't request confirmation of changes USE WITH CAUTION");
|
||||
options.getOption("s").setType(boolean.class);
|
||||
options.addOption("w", "workflow", false, "workflow - when adding new items, use collection workflow");
|
||||
options.getOption("w").setType(boolean.class);
|
||||
options.addOption("n", "notify", false,
|
||||
"notify - when adding new items using a workflow, send notification emails");
|
||||
options.getOption("n").setType(boolean.class);
|
||||
options.addOption("v", "validate-only", false,
|
||||
"validate - just validate the csv, don't run the import");
|
||||
options.getOption("v").setType(boolean.class);
|
||||
options.addOption("t", "template", false,
|
||||
"template - when adding new items, use the collection template (if it exists)");
|
||||
options.getOption("t").setType(boolean.class);
|
||||
options.addOption("h", "help", false, "help");
|
||||
options.getOption("h").setType(boolean.class);
|
||||
|
||||
super.options = options;
|
||||
}
|
||||
return options;
|
||||
}
|
||||
}
|
@@ -17,13 +17,13 @@ import java.util.UUID;
|
||||
|
||||
import org.apache.commons.cli.CommandLine;
|
||||
import org.apache.commons.cli.CommandLineParser;
|
||||
import org.apache.commons.cli.DefaultParser;
|
||||
import org.apache.commons.cli.HelpFormatter;
|
||||
import org.apache.commons.cli.Option;
|
||||
import org.apache.commons.cli.OptionBuilder;
|
||||
import org.apache.commons.cli.Options;
|
||||
import org.apache.commons.cli.ParseException;
|
||||
import org.apache.commons.cli.PosixParser;
|
||||
import org.apache.log4j.Logger;
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.dspace.checker.BitstreamDispatcher;
|
||||
import org.dspace.checker.CheckerCommand;
|
||||
import org.dspace.checker.HandleDispatcher;
|
||||
@@ -48,7 +48,7 @@ import org.dspace.core.Utils;
|
||||
* @author Nathan Sarr
|
||||
*/
|
||||
public final class ChecksumChecker {
|
||||
private static final Logger LOG = Logger.getLogger(ChecksumChecker.class);
|
||||
private static final Logger LOG = LogManager.getLogger(ChecksumChecker.class);
|
||||
|
||||
private static final BitstreamService bitstreamService = ContentServiceFactory.getInstance().getBitstreamService();
|
||||
|
||||
@@ -86,7 +86,7 @@ public final class ChecksumChecker {
|
||||
*/
|
||||
public static void main(String[] args) throws SQLException {
|
||||
// set up command line parser
|
||||
CommandLineParser parser = new PosixParser();
|
||||
CommandLineParser parser = new DefaultParser();
|
||||
CommandLine line = null;
|
||||
|
||||
// create an options object and populate it
|
||||
@@ -101,19 +101,21 @@ public final class ChecksumChecker {
|
||||
options.addOption("a", "handle", true, "Specify a handle to check");
|
||||
options.addOption("v", "verbose", false, "Report all processing");
|
||||
|
||||
OptionBuilder.withArgName("bitstream-ids").hasArgs().withDescription(
|
||||
"Space separated list of bitstream ids");
|
||||
Option useBitstreamIds = OptionBuilder.create('b');
|
||||
Option option;
|
||||
|
||||
options.addOption(useBitstreamIds);
|
||||
option = Option.builder("b")
|
||||
.longOpt("bitstream-ids")
|
||||
.hasArgs()
|
||||
.desc("Space separated list of bitstream ids")
|
||||
.build();
|
||||
options.addOption(option);
|
||||
|
||||
options.addOption("p", "prune", false, "Prune configuration file");
|
||||
options.addOption(OptionBuilder
|
||||
.withArgName("prune")
|
||||
.hasOptionalArgs(1)
|
||||
.withDescription(
|
||||
"Prune old results (optionally using specified properties file for configuration)")
|
||||
.create('p'));
|
||||
option = Option.builder("p")
|
||||
.longOpt("prune")
|
||||
.optionalArg(true)
|
||||
.desc("Prune old results (optionally using specified properties file for configuration)")
|
||||
.build();
|
||||
options.addOption(option);
|
||||
|
||||
try {
|
||||
line = parser.parse(options, args);
|
||||
|
@@ -15,9 +15,9 @@ import java.util.UUID;
|
||||
|
||||
import org.apache.commons.cli.CommandLine;
|
||||
import org.apache.commons.cli.CommandLineParser;
|
||||
import org.apache.commons.cli.DefaultParser;
|
||||
import org.apache.commons.cli.HelpFormatter;
|
||||
import org.apache.commons.cli.Options;
|
||||
import org.apache.commons.cli.PosixParser;
|
||||
import org.dspace.authorize.AuthorizeException;
|
||||
import org.dspace.content.Collection;
|
||||
import org.dspace.content.DSpaceObject;
|
||||
@@ -53,7 +53,7 @@ public class Harvest {
|
||||
|
||||
public static void main(String[] argv) throws Exception {
|
||||
// create an options object and populate it
|
||||
CommandLineParser parser = new PosixParser();
|
||||
CommandLineParser parser = new DefaultParser();
|
||||
|
||||
Options options = new Options();
|
||||
|
||||
@@ -272,9 +272,8 @@ public class Harvest {
|
||||
targetCollection = (Collection) dso;
|
||||
}
|
||||
} else {
|
||||
// not a handle, try and treat it as an integer collection database ID
|
||||
System.out.println("Looking up by id: " + collectionID + ", parsed as '" + Integer
|
||||
.parseInt(collectionID) + "', " + "in context: " + context);
|
||||
// not a handle, try and treat it as an collection database UUID
|
||||
System.out.println("Looking up by UUID: " + collectionID + ", " + "in context: " + context);
|
||||
targetCollection = collectionService.find(context, UUID.fromString(collectionID));
|
||||
}
|
||||
}
|
||||
@@ -403,11 +402,7 @@ public class Harvest {
|
||||
context.setCurrentUser(eperson);
|
||||
harvester.runHarvest();
|
||||
context.complete();
|
||||
} catch (SQLException e) {
|
||||
throw new IllegalStateException("Failed to run harvester", e);
|
||||
} catch (AuthorizeException e) {
|
||||
throw new IllegalStateException("Failed to run harvester", e);
|
||||
} catch (IOException e) {
|
||||
} catch (SQLException | AuthorizeException | IOException e) {
|
||||
throw new IllegalStateException("Failed to run harvester", e);
|
||||
}
|
||||
|
||||
@@ -460,7 +455,7 @@ public class Harvest {
|
||||
List<String> errors;
|
||||
|
||||
System.out.print("Testing basic PMH access: ");
|
||||
errors = OAIHarvester.verifyOAIharvester(server, set,
|
||||
errors = harvestedCollectionService.verifyOAIharvester(server, set,
|
||||
(null != metadataFormat) ? metadataFormat : "dc", false);
|
||||
if (errors.isEmpty()) {
|
||||
System.out.println("OK");
|
||||
@@ -471,7 +466,7 @@ public class Harvest {
|
||||
}
|
||||
|
||||
System.out.print("Testing ORE support: ");
|
||||
errors = OAIHarvester.verifyOAIharvester(server, set,
|
||||
errors = harvestedCollectionService.verifyOAIharvester(server, set,
|
||||
(null != metadataFormat) ? metadataFormat : "dc", true);
|
||||
if (errors.isEmpty()) {
|
||||
System.out.println("OK");
|
||||
|
@@ -15,9 +15,9 @@ import java.util.UUID;
|
||||
|
||||
import org.apache.commons.cli.CommandLine;
|
||||
import org.apache.commons.cli.CommandLineParser;
|
||||
import org.apache.commons.cli.DefaultParser;
|
||||
import org.apache.commons.cli.HelpFormatter;
|
||||
import org.apache.commons.cli.Options;
|
||||
import org.apache.commons.cli.PosixParser;
|
||||
import org.dspace.app.itemexport.factory.ItemExportServiceFactory;
|
||||
import org.dspace.app.itemexport.service.ItemExportService;
|
||||
import org.dspace.content.Collection;
|
||||
@@ -69,7 +69,7 @@ public class ItemExportCLITool {
|
||||
*/
|
||||
public static void main(String[] argv) throws Exception {
|
||||
// create an options object and populate it
|
||||
CommandLineParser parser = new PosixParser();
|
||||
CommandLineParser parser = new DefaultParser();
|
||||
|
||||
Options options = new Options();
|
||||
|
||||
|
@@ -32,8 +32,8 @@ import java.util.zip.ZipEntry;
|
||||
import java.util.zip.ZipOutputStream;
|
||||
import javax.mail.MessagingException;
|
||||
|
||||
import org.apache.commons.lang.StringUtils;
|
||||
import org.apache.log4j.Logger;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.dspace.app.itemexport.service.ItemExportService;
|
||||
import org.dspace.content.Bitstream;
|
||||
import org.dspace.content.Bundle;
|
||||
@@ -42,12 +42,11 @@ import org.dspace.content.Community;
|
||||
import org.dspace.content.DSpaceObject;
|
||||
import org.dspace.content.Item;
|
||||
import org.dspace.content.MetadataField;
|
||||
import org.dspace.content.MetadataSchema;
|
||||
import org.dspace.content.MetadataSchemaEnum;
|
||||
import org.dspace.content.MetadataValue;
|
||||
import org.dspace.content.service.BitstreamService;
|
||||
import org.dspace.content.service.CommunityService;
|
||||
import org.dspace.content.service.ItemService;
|
||||
import org.dspace.core.ConfigurationManager;
|
||||
import org.dspace.core.Constants;
|
||||
import org.dspace.core.Context;
|
||||
import org.dspace.core.Email;
|
||||
@@ -57,6 +56,7 @@ import org.dspace.core.Utils;
|
||||
import org.dspace.eperson.EPerson;
|
||||
import org.dspace.eperson.service.EPersonService;
|
||||
import org.dspace.handle.service.HandleService;
|
||||
import org.dspace.services.ConfigurationService;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
|
||||
/**
|
||||
@@ -93,12 +93,14 @@ public class ItemExportServiceImpl implements ItemExportService {
|
||||
protected ItemService itemService;
|
||||
@Autowired(required = true)
|
||||
protected HandleService handleService;
|
||||
@Autowired(required = true)
|
||||
protected ConfigurationService configurationService;
|
||||
|
||||
|
||||
/**
|
||||
* log4j logger
|
||||
*/
|
||||
private Logger log = Logger.getLogger(ItemExportServiceImpl.class);
|
||||
private final Logger log = org.apache.logging.log4j.LogManager.getLogger(ItemExportServiceImpl.class);
|
||||
|
||||
protected ItemExportServiceImpl() {
|
||||
|
||||
@@ -214,7 +216,7 @@ public class ItemExportServiceImpl implements ItemExportService {
|
||||
protected void writeMetadata(Context c, String schema, Item i,
|
||||
File destDir, boolean migrate) throws Exception {
|
||||
String filename;
|
||||
if (schema.equals(MetadataSchema.DC_SCHEMA)) {
|
||||
if (schema.equals(MetadataSchemaEnum.DC.getName())) {
|
||||
filename = "dublin_core.xml";
|
||||
} else {
|
||||
filename = "metadata_" + schema + ".xml";
|
||||
@@ -271,9 +273,8 @@ public class ItemExportServiceImpl implements ItemExportService {
|
||||
("date".equals(metadataField.getElement()) && "accessioned".equals(qualifier)) ||
|
||||
("date".equals(metadataField.getElement()) && "available".equals(qualifier)) ||
|
||||
("identifier".equals(metadataField.getElement()) && "uri".equals(qualifier) &&
|
||||
(dcv.getValue() != null && dcv.getValue().startsWith("http://hdl.handle.net/" +
|
||||
handleService
|
||||
.getPrefix() + "/"))) ||
|
||||
(dcv.getValue() != null && dcv.getValue().startsWith(
|
||||
handleService.getCanonicalPrefix() + handleService.getPrefix() + "/"))) ||
|
||||
("description".equals(metadataField.getElement()) && "provenance".equals(qualifier)) ||
|
||||
("format".equals(metadataField.getElement()) && "extent".equals(qualifier)) ||
|
||||
("format".equals(metadataField.getElement()) && "mimetype".equals(qualifier))))) {
|
||||
@@ -547,7 +548,7 @@ public class ItemExportServiceImpl implements ItemExportService {
|
||||
List<Bitstream> bitstreams = bundle.getBitstreams();
|
||||
for (Bitstream bitstream : bitstreams) {
|
||||
// add up the size
|
||||
size += bitstream.getSize();
|
||||
size += bitstream.getSizeBytes();
|
||||
}
|
||||
}
|
||||
items.add(item.getID());
|
||||
@@ -574,7 +575,7 @@ public class ItemExportServiceImpl implements ItemExportService {
|
||||
List<Bitstream> bitstreams = bundle.getBitstreams();
|
||||
for (Bitstream bitstream : bitstreams) {
|
||||
// add up the size
|
||||
size += bitstream.getSize();
|
||||
size += bitstream.getSizeBytes();
|
||||
}
|
||||
}
|
||||
items.add(item.getID());
|
||||
@@ -593,7 +594,7 @@ public class ItemExportServiceImpl implements ItemExportService {
|
||||
List<Bitstream> bitstreams = bundle.getBitstreams();
|
||||
for (Bitstream bitstream : bitstreams) {
|
||||
// add up the size
|
||||
size += bitstream.getSize();
|
||||
size += bitstream.getSizeBytes();
|
||||
}
|
||||
}
|
||||
ArrayList<UUID> items = new ArrayList<>();
|
||||
@@ -606,7 +607,7 @@ public class ItemExportServiceImpl implements ItemExportService {
|
||||
|
||||
// check the size of all the bitstreams against the configuration file
|
||||
// entry if it exists
|
||||
String megaBytes = ConfigurationManager
|
||||
String megaBytes = configurationService
|
||||
.getProperty("org.dspace.app.itemexport.max.size");
|
||||
if (megaBytes != null) {
|
||||
float maxSize = 0;
|
||||
@@ -731,7 +732,7 @@ public class ItemExportServiceImpl implements ItemExportService {
|
||||
@Override
|
||||
public String getExportDownloadDirectory(EPerson ePerson)
|
||||
throws Exception {
|
||||
String downloadDir = ConfigurationManager
|
||||
String downloadDir = configurationService
|
||||
.getProperty("org.dspace.app.itemexport.download.dir");
|
||||
if (downloadDir == null) {
|
||||
throw new Exception(
|
||||
@@ -748,7 +749,7 @@ public class ItemExportServiceImpl implements ItemExportService {
|
||||
|
||||
@Override
|
||||
public String getExportWorkDirectory() throws Exception {
|
||||
String exportDir = ConfigurationManager
|
||||
String exportDir = configurationService
|
||||
.getProperty("org.dspace.app.itemexport.work.dir");
|
||||
if (exportDir == null) {
|
||||
throw new Exception(
|
||||
@@ -854,7 +855,7 @@ public class ItemExportServiceImpl implements ItemExportService {
|
||||
return null;
|
||||
}
|
||||
|
||||
List<String> fileNames = new ArrayList<String>();
|
||||
List<String> fileNames = new ArrayList<>();
|
||||
|
||||
for (String fileName : downloadDir.list()) {
|
||||
if (fileName.contains("export") && fileName.endsWith(".zip")) {
|
||||
@@ -871,7 +872,7 @@ public class ItemExportServiceImpl implements ItemExportService {
|
||||
|
||||
@Override
|
||||
public void deleteOldExportArchives(EPerson eperson) throws Exception {
|
||||
int hours = ConfigurationManager
|
||||
int hours = configurationService
|
||||
.getIntProperty("org.dspace.app.itemexport.life.span.hours");
|
||||
Calendar now = Calendar.getInstance();
|
||||
now.setTime(new Date());
|
||||
@@ -892,11 +893,11 @@ public class ItemExportServiceImpl implements ItemExportService {
|
||||
|
||||
@Override
|
||||
public void deleteOldExportArchives() throws Exception {
|
||||
int hours = ConfigurationManager.getIntProperty("org.dspace.app.itemexport.life.span.hours");
|
||||
int hours = configurationService.getIntProperty("org.dspace.app.itemexport.life.span.hours");
|
||||
Calendar now = Calendar.getInstance();
|
||||
now.setTime(new Date());
|
||||
now.add(Calendar.HOUR, (-hours));
|
||||
File downloadDir = new File(ConfigurationManager.getProperty("org.dspace.app.itemexport.download.dir"));
|
||||
File downloadDir = new File(configurationService.getProperty("org.dspace.app.itemexport.download.dir"));
|
||||
if (downloadDir.exists()) {
|
||||
// Get a list of all the sub-directories, potentially one for each ePerson.
|
||||
File[] dirs = downloadDir.listFiles();
|
||||
@@ -930,8 +931,8 @@ public class ItemExportServiceImpl implements ItemExportService {
|
||||
Locale supportedLocale = I18nUtil.getEPersonLocale(eperson);
|
||||
Email email = Email.getEmail(I18nUtil.getEmailFilename(supportedLocale, "export_success"));
|
||||
email.addRecipient(eperson.getEmail());
|
||||
email.addArgument(ConfigurationManager.getProperty("dspace.url") + "/exportdownload/" + fileName);
|
||||
email.addArgument(ConfigurationManager.getProperty("org.dspace.app.itemexport.life.span.hours"));
|
||||
email.addArgument(configurationService.getProperty("dspace.ui.url") + "/exportdownload/" + fileName);
|
||||
email.addArgument(configurationService.getProperty("org.dspace.app.itemexport.life.span.hours"));
|
||||
|
||||
email.send();
|
||||
} catch (Exception e) {
|
||||
@@ -948,7 +949,7 @@ public class ItemExportServiceImpl implements ItemExportService {
|
||||
Email email = Email.getEmail(I18nUtil.getEmailFilename(supportedLocale, "export_error"));
|
||||
email.addRecipient(eperson.getEmail());
|
||||
email.addArgument(error);
|
||||
email.addArgument(ConfigurationManager.getProperty("dspace.url") + "/feedback");
|
||||
email.addArgument(configurationService.getProperty("dspace.ui.url") + "/feedback");
|
||||
|
||||
email.send();
|
||||
} catch (Exception e) {
|
||||
|
@@ -1,106 +0,0 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.app.itemimport;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import gr.ekt.bte.core.DataLoader;
|
||||
import gr.ekt.bte.core.TransformationEngine;
|
||||
import gr.ekt.bte.dataloader.FileDataLoader;
|
||||
|
||||
|
||||
/**
|
||||
* This class acts as a Service in the procedure to batch import using the Biblio-Transformation-Engine
|
||||
*/
|
||||
public class BTEBatchImportService {
|
||||
|
||||
TransformationEngine transformationEngine;
|
||||
Map<String, DataLoader> dataLoaders = new HashMap<String, DataLoader>();
|
||||
Map<String, String> outputMap = new HashMap<String, String>();
|
||||
|
||||
/**
|
||||
* Default constructor
|
||||
*/
|
||||
public BTEBatchImportService() {
|
||||
super();
|
||||
}
|
||||
|
||||
/**
|
||||
* Setter method for dataLoaders parameter
|
||||
*
|
||||
* @param dataLoaders map of data loaders
|
||||
*/
|
||||
public void setDataLoaders(Map<String, DataLoader> dataLoaders) {
|
||||
this.dataLoaders = dataLoaders;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get data loaders
|
||||
*
|
||||
* @return the map of DataLoaders
|
||||
*/
|
||||
public Map<String, DataLoader> getDataLoaders() {
|
||||
return dataLoaders;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get output map
|
||||
*
|
||||
* @return the outputMapping
|
||||
*/
|
||||
public Map<String, String> getOutputMap() {
|
||||
return outputMap;
|
||||
}
|
||||
|
||||
/**
|
||||
* Setter method for the outputMapping
|
||||
*
|
||||
* @param outputMap the output mapping
|
||||
*/
|
||||
public void setOutputMap(Map<String, String> outputMap) {
|
||||
this.outputMap = outputMap;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get transformation engine
|
||||
*
|
||||
* @return transformation engine
|
||||
*/
|
||||
public TransformationEngine getTransformationEngine() {
|
||||
return transformationEngine;
|
||||
}
|
||||
|
||||
/**
|
||||
* set transformation engine
|
||||
*
|
||||
* @param transformationEngine transformation engine
|
||||
*/
|
||||
public void setTransformationEngine(TransformationEngine transformationEngine) {
|
||||
this.transformationEngine = transformationEngine;
|
||||
}
|
||||
|
||||
/**
|
||||
* Getter of file data loaders
|
||||
*
|
||||
* @return List of file data loaders
|
||||
*/
|
||||
public List<String> getFileDataLoaders() {
|
||||
List<String> result = new ArrayList<String>();
|
||||
|
||||
for (String key : dataLoaders.keySet()) {
|
||||
DataLoader dl = dataLoaders.get(key);
|
||||
if (dl instanceof FileDataLoader) {
|
||||
result.add(key);
|
||||
}
|
||||
}
|
||||
return result;
|
||||
}
|
||||
}
|
@@ -8,6 +8,7 @@
|
||||
package org.dspace.app.itemimport;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Date;
|
||||
import java.util.List;
|
||||
@@ -15,9 +16,9 @@ import java.util.UUID;
|
||||
|
||||
import org.apache.commons.cli.CommandLine;
|
||||
import org.apache.commons.cli.CommandLineParser;
|
||||
import org.apache.commons.cli.DefaultParser;
|
||||
import org.apache.commons.cli.HelpFormatter;
|
||||
import org.apache.commons.cli.Options;
|
||||
import org.apache.commons.cli.PosixParser;
|
||||
import org.dspace.app.itemimport.factory.ItemImportServiceFactory;
|
||||
import org.dspace.app.itemimport.service.ItemImportService;
|
||||
import org.dspace.content.Collection;
|
||||
@@ -67,12 +68,11 @@ public class ItemImportCLITool {
|
||||
|
||||
try {
|
||||
// create an options object and populate it
|
||||
CommandLineParser parser = new PosixParser();
|
||||
CommandLineParser parser = new DefaultParser();
|
||||
|
||||
Options options = new Options();
|
||||
|
||||
options.addOption("a", "add", false, "add items to DSpace");
|
||||
options.addOption("b", "add-bte", false, "add items to DSpace via Biblio-Transformation-Engine (BTE)");
|
||||
options.addOption("r", "replace", false, "replace items in mapfile");
|
||||
options.addOption("d", "delete", false,
|
||||
"delete items listed in mapfile");
|
||||
@@ -387,8 +387,6 @@ public class ItemImportCLITool {
|
||||
myloader.replaceItems(c, mycollections, sourcedir, mapfile, template);
|
||||
} else if ("delete".equals(command)) {
|
||||
myloader.deleteItems(c, mapfile);
|
||||
} else if ("add-bte".equals(command)) {
|
||||
myloader.addBTEItems(c, mycollections, sourcedir, mapfile, template, bteInputType, null);
|
||||
}
|
||||
|
||||
// complete all transactions
|
||||
@@ -408,7 +406,7 @@ public class ItemImportCLITool {
|
||||
"Deleting temporary zip directory: " + myloader.getTempWorkDirFile().getAbsolutePath());
|
||||
myloader.cleanupZipTemp();
|
||||
}
|
||||
} catch (Exception ex) {
|
||||
} catch (IOException ex) {
|
||||
System.out.println("Unable to delete temporary zip archive location: " + myloader.getTempWorkDirFile()
|
||||
.getAbsolutePath());
|
||||
}
|
||||
|
@@ -45,20 +45,13 @@ import javax.xml.parsers.DocumentBuilderFactory;
|
||||
import javax.xml.parsers.ParserConfigurationException;
|
||||
import javax.xml.transform.TransformerException;
|
||||
|
||||
import gr.ekt.bte.core.DataLoader;
|
||||
import gr.ekt.bte.core.TransformationEngine;
|
||||
import gr.ekt.bte.core.TransformationResult;
|
||||
import gr.ekt.bte.core.TransformationSpec;
|
||||
import gr.ekt.bte.dataloader.FileDataLoader;
|
||||
import gr.ekt.bteio.generators.DSpaceOutputGenerator;
|
||||
import gr.ekt.bteio.loaders.OAIPMHDataLoader;
|
||||
import org.apache.commons.collections.ComparatorUtils;
|
||||
import org.apache.commons.collections4.ComparatorUtils;
|
||||
import org.apache.commons.io.FileDeleteStrategy;
|
||||
import org.apache.commons.io.FileUtils;
|
||||
import org.apache.commons.lang.RandomStringUtils;
|
||||
import org.apache.commons.lang.StringUtils;
|
||||
import org.apache.commons.lang3.RandomStringUtils;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.apache.commons.lang3.exception.ExceptionUtils;
|
||||
import org.apache.log4j.Logger;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.apache.xpath.XPathAPI;
|
||||
import org.dspace.app.itemimport.service.ItemImportService;
|
||||
import org.dspace.app.util.LocalSchemaFilenameFilter;
|
||||
@@ -74,6 +67,7 @@ import org.dspace.content.DSpaceObject;
|
||||
import org.dspace.content.Item;
|
||||
import org.dspace.content.MetadataField;
|
||||
import org.dspace.content.MetadataSchema;
|
||||
import org.dspace.content.MetadataSchemaEnum;
|
||||
import org.dspace.content.WorkspaceItem;
|
||||
import org.dspace.content.service.BitstreamFormatService;
|
||||
import org.dspace.content.service.BitstreamService;
|
||||
@@ -84,7 +78,6 @@ import org.dspace.content.service.ItemService;
|
||||
import org.dspace.content.service.MetadataFieldService;
|
||||
import org.dspace.content.service.MetadataSchemaService;
|
||||
import org.dspace.content.service.WorkspaceItemService;
|
||||
import org.dspace.core.ConfigurationManager;
|
||||
import org.dspace.core.Constants;
|
||||
import org.dspace.core.Context;
|
||||
import org.dspace.core.Email;
|
||||
@@ -95,7 +88,7 @@ import org.dspace.eperson.Group;
|
||||
import org.dspace.eperson.service.EPersonService;
|
||||
import org.dspace.eperson.service.GroupService;
|
||||
import org.dspace.handle.service.HandleService;
|
||||
import org.dspace.utils.DSpace;
|
||||
import org.dspace.services.ConfigurationService;
|
||||
import org.dspace.workflow.WorkflowItem;
|
||||
import org.dspace.workflow.WorkflowService;
|
||||
import org.springframework.beans.factory.InitializingBean;
|
||||
@@ -124,7 +117,7 @@ import org.xml.sax.SAXException;
|
||||
* allow the registration of files (bitstreams) into DSpace.
|
||||
*/
|
||||
public class ItemImportServiceImpl implements ItemImportService, InitializingBean {
|
||||
private final Logger log = Logger.getLogger(ItemImportServiceImpl.class);
|
||||
private final Logger log = org.apache.logging.log4j.LogManager.getLogger(ItemImportServiceImpl.class);
|
||||
|
||||
@Autowired(required = true)
|
||||
protected AuthorizeService authorizeService;
|
||||
@@ -156,8 +149,10 @@ public class ItemImportServiceImpl implements ItemImportService, InitializingBea
|
||||
protected WorkspaceItemService workspaceItemService;
|
||||
@Autowired(required = true)
|
||||
protected WorkflowService workflowService;
|
||||
@Autowired(required = true)
|
||||
protected ConfigurationService configurationService;
|
||||
|
||||
protected final String tempWorkDir = ConfigurationManager.getProperty("org.dspace.app.batchitemimport.work.dir");
|
||||
protected String tempWorkDir;
|
||||
|
||||
protected boolean isTest = false;
|
||||
protected boolean isResume = false;
|
||||
@@ -167,6 +162,7 @@ public class ItemImportServiceImpl implements ItemImportService, InitializingBea
|
||||
|
||||
@Override
|
||||
public void afterPropertiesSet() throws Exception {
|
||||
tempWorkDir = configurationService.getProperty("org.dspace.app.batchitemimport.work.dir");
|
||||
//Ensure tempWorkDir exists
|
||||
File tempWorkDirFile = new File(tempWorkDir);
|
||||
if (!tempWorkDirFile.exists()) {
|
||||
@@ -196,100 +192,6 @@ public class ItemImportServiceImpl implements ItemImportService, InitializingBea
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* In this method, the BTE is instantiated. THe workflow generates the DSpace files
|
||||
* necessary for the upload, and the default item import method is called
|
||||
*
|
||||
* @param c The contect
|
||||
* @param mycollections The collections the items are inserted to
|
||||
* @param sourceDir The filepath to the file to read data from
|
||||
* @param mapFile The filepath to mapfile to be generated
|
||||
* @param template whether to use collection template item as starting point
|
||||
* @param inputType The type of the input data (bibtex, csv, etc.)
|
||||
* @param workingDir The path to create temporary files (for command line or UI based)
|
||||
* @throws Exception if error occurs
|
||||
*/
|
||||
@Override
|
||||
public void addBTEItems(Context c, List<Collection> mycollections,
|
||||
String sourceDir, String mapFile, boolean template, String inputType, String workingDir)
|
||||
throws Exception {
|
||||
//Determine the folder where BTE will output the results
|
||||
String outputFolder = null;
|
||||
if (workingDir == null) { //This indicates a command line import, create a random path
|
||||
File importDir = new File(ConfigurationManager.getProperty("org.dspace.app.batchitemimport.work.dir"));
|
||||
if (!importDir.exists()) {
|
||||
boolean success = importDir.mkdir();
|
||||
if (!success) {
|
||||
log.info("Cannot create batch import directory!");
|
||||
throw new Exception("Cannot create batch import directory!");
|
||||
}
|
||||
}
|
||||
//Get a random folder in case two admins batch import data at the same time
|
||||
outputFolder = importDir + File.separator + generateRandomFilename(true);
|
||||
} else { //This indicates a UI import, working dir is preconfigured
|
||||
outputFolder = workingDir;
|
||||
}
|
||||
|
||||
BTEBatchImportService dls = new DSpace().getSingletonService(BTEBatchImportService.class);
|
||||
DataLoader dataLoader = dls.getDataLoaders().get(inputType);
|
||||
Map<String, String> outputMap = dls.getOutputMap();
|
||||
TransformationEngine te = dls.getTransformationEngine();
|
||||
|
||||
if (dataLoader == null) {
|
||||
System.out.println(
|
||||
"ERROR: The key used in -i parameter must match a valid DataLoader in the BTE Spring XML " +
|
||||
"configuration file!");
|
||||
return;
|
||||
}
|
||||
|
||||
if (outputMap == null) {
|
||||
System.out.println(
|
||||
"ERROR: The key used in -i parameter must match a valid outputMapping in the BTE Spring XML " +
|
||||
"configuration file!");
|
||||
return;
|
||||
}
|
||||
|
||||
if (dataLoader instanceof FileDataLoader) {
|
||||
FileDataLoader fdl = (FileDataLoader) dataLoader;
|
||||
if (!StringUtils.isBlank(sourceDir)) {
|
||||
System.out.println(
|
||||
"INFO: Dataloader will load data from the file specified in the command prompt (and not from the " +
|
||||
"Spring XML configuration file)");
|
||||
fdl.setFilename(sourceDir);
|
||||
}
|
||||
} else if (dataLoader instanceof OAIPMHDataLoader) {
|
||||
OAIPMHDataLoader fdl = (OAIPMHDataLoader) dataLoader;
|
||||
System.out.println(sourceDir);
|
||||
if (!StringUtils.isBlank(sourceDir)) {
|
||||
System.out.println(
|
||||
"INFO: Dataloader will load data from the address specified in the command prompt (and not from " +
|
||||
"the Spring XML configuration file)");
|
||||
fdl.setServerAddress(sourceDir);
|
||||
}
|
||||
}
|
||||
if (dataLoader != null) {
|
||||
System.out.println("INFO: Dataloader " + dataLoader.toString() + " will be used for the import!");
|
||||
|
||||
te.setDataLoader(dataLoader);
|
||||
|
||||
DSpaceOutputGenerator outputGenerator = new DSpaceOutputGenerator(outputMap);
|
||||
outputGenerator.setOutputDirectory(outputFolder);
|
||||
|
||||
te.setOutputGenerator(outputGenerator);
|
||||
|
||||
try {
|
||||
TransformationResult res = te.transform(new TransformationSpec());
|
||||
List<String> output = res.getOutput();
|
||||
outputGenerator.writeOutput(output);
|
||||
} catch (Exception e) {
|
||||
System.err.println("Exception");
|
||||
e.printStackTrace();
|
||||
throw e;
|
||||
}
|
||||
addItems(c, mycollections, outputFolder, mapFile, template);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void addItemsAtomic(Context c, List<Collection> mycollections, String sourceDir, String mapFile,
|
||||
boolean template) throws Exception {
|
||||
@@ -677,7 +579,7 @@ public class ItemImportServiceImpl implements ItemImportService, InitializingBea
|
||||
Node schemaAttr = metadata.item(0).getAttributes().getNamedItem(
|
||||
"schema");
|
||||
if (schemaAttr == null) {
|
||||
schema = MetadataSchema.DC_SCHEMA;
|
||||
schema = MetadataSchemaEnum.DC.getName();
|
||||
} else {
|
||||
schema = schemaAttr.getNodeValue();
|
||||
}
|
||||
@@ -1480,7 +1382,7 @@ public class ItemImportServiceImpl implements ItemImportService, InitializingBea
|
||||
|
||||
File tempdir = new File(destinationDir);
|
||||
if (!tempdir.isDirectory()) {
|
||||
log.error("'" + ConfigurationManager.getProperty("org.dspace.app.itemexport.work.dir") +
|
||||
log.error("'" + configurationService.getProperty("org.dspace.app.itemexport.work.dir") +
|
||||
"' as defined by the key 'org.dspace.app.itemexport.work.dir' in dspace.cfg " +
|
||||
"is not a valid directory");
|
||||
}
|
||||
@@ -1505,47 +1407,54 @@ public class ItemImportServiceImpl implements ItemImportService, InitializingBea
|
||||
log.error("Unable to create contents directory: " + zipDir + entry.getName());
|
||||
}
|
||||
} else {
|
||||
System.out.println("Extracting file: " + entry.getName());
|
||||
log.info("Extracting file: " + entry.getName());
|
||||
String entryName = entry.getName();
|
||||
File outFile = new File(zipDir + entryName);
|
||||
// Verify that this file will be extracted into our zipDir (and not somewhere else!)
|
||||
if (!outFile.toPath().normalize().startsWith(zipDir)) {
|
||||
throw new IOException("Bad zip entry: '" + entryName
|
||||
+ "' in file '" + zipfile.getAbsolutePath() + "'!"
|
||||
+ " Cannot process this file.");
|
||||
} else {
|
||||
System.out.println("Extracting file: " + entryName);
|
||||
log.info("Extracting file: " + entryName);
|
||||
|
||||
int index = entry.getName().lastIndexOf('/');
|
||||
if (index == -1) {
|
||||
// Was it created on Windows instead?
|
||||
index = entry.getName().lastIndexOf('\\');
|
||||
}
|
||||
if (index > 0) {
|
||||
File dir = new File(zipDir + entry.getName().substring(0, index));
|
||||
if (!dir.exists() && !dir.mkdirs()) {
|
||||
log.error("Unable to create directory: " + dir.getAbsolutePath());
|
||||
int index = entryName.lastIndexOf('/');
|
||||
if (index == -1) {
|
||||
// Was it created on Windows instead?
|
||||
index = entryName.lastIndexOf('\\');
|
||||
}
|
||||
if (index > 0) {
|
||||
File dir = new File(zipDir + entryName.substring(0, index));
|
||||
if (!dir.exists() && !dir.mkdirs()) {
|
||||
log.error("Unable to create directory: " + dir.getAbsolutePath());
|
||||
}
|
||||
|
||||
//Entries could have too many directories, and we need to adjust the sourcedir
|
||||
// file1.zip (SimpleArchiveFormat / item1 / contents|dublin_core|...
|
||||
// SimpleArchiveFormat / item2 / contents|dublin_core|...
|
||||
// or
|
||||
// file2.zip (item1 / contents|dublin_core|...
|
||||
// item2 / contents|dublin_core|...
|
||||
//Entries could have too many directories, and we need to adjust the sourcedir
|
||||
// file1.zip (SimpleArchiveFormat / item1 / contents|dublin_core|...
|
||||
// SimpleArchiveFormat / item2 / contents|dublin_core|...
|
||||
// or
|
||||
// file2.zip (item1 / contents|dublin_core|...
|
||||
// item2 / contents|dublin_core|...
|
||||
|
||||
//regex supports either windows or *nix file paths
|
||||
String[] entryChunks = entry.getName().split("/|\\\\");
|
||||
if (entryChunks.length > 2) {
|
||||
if (StringUtils.equals(sourceDirForZip, sourcedir)) {
|
||||
sourceDirForZip = sourcedir + "/" + entryChunks[0];
|
||||
//regex supports either windows or *nix file paths
|
||||
String[] entryChunks = entryName.split("/|\\\\");
|
||||
if (entryChunks.length > 2) {
|
||||
if (StringUtils.equals(sourceDirForZip, sourcedir)) {
|
||||
sourceDirForZip = sourcedir + "/" + entryChunks[0];
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
byte[] buffer = new byte[1024];
|
||||
int len;
|
||||
InputStream in = zf.getInputStream(entry);
|
||||
BufferedOutputStream out = new BufferedOutputStream(
|
||||
new FileOutputStream(outFile));
|
||||
while ((len = in.read(buffer)) >= 0) {
|
||||
out.write(buffer, 0, len);
|
||||
}
|
||||
in.close();
|
||||
out.close();
|
||||
}
|
||||
byte[] buffer = new byte[1024];
|
||||
int len;
|
||||
InputStream in = zf.getInputStream(entry);
|
||||
BufferedOutputStream out = new BufferedOutputStream(
|
||||
new FileOutputStream(zipDir + entry.getName()));
|
||||
while ((len = in.read(buffer)) >= 0) {
|
||||
out.write(buffer, 0, len);
|
||||
}
|
||||
in.close();
|
||||
out.close();
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1638,7 +1547,7 @@ public class ItemImportServiceImpl implements ItemImportService, InitializingBea
|
||||
}
|
||||
}
|
||||
|
||||
importDir = ConfigurationManager.getProperty(
|
||||
importDir = configurationService.getProperty(
|
||||
"org.dspace.app.batchitemimport.work.dir") + File.separator + "batchuploads" + File.separator
|
||||
+ context
|
||||
.getCurrentUser()
|
||||
@@ -1728,9 +1637,6 @@ public class ItemImportServiceImpl implements ItemImportService, InitializingBea
|
||||
if (theInputType.equals("saf") || theInputType
|
||||
.equals("safupload")) { //In case of Simple Archive Format import
|
||||
addItems(context, finalCollections, dataDir, mapFilePath, template);
|
||||
} else { // For all other imports (via BTE)
|
||||
addBTEItems(context, finalCollections, theFilePath, mapFilePath, useTemplateItem, theInputType,
|
||||
dataDir);
|
||||
}
|
||||
|
||||
// email message letting user know the file is ready for
|
||||
@@ -1796,7 +1702,7 @@ public class ItemImportServiceImpl implements ItemImportService, InitializingBea
|
||||
Email email = Email.getEmail(I18nUtil.getEmailFilename(supportedLocale, "bte_batch_import_error"));
|
||||
email.addRecipient(eperson.getEmail());
|
||||
email.addArgument(error);
|
||||
email.addArgument(ConfigurationManager.getProperty("dspace.url") + "/feedback");
|
||||
email.addArgument(configurationService.getProperty("dspace.ui.url") + "/feedback");
|
||||
|
||||
email.send();
|
||||
} catch (Exception e) {
|
||||
@@ -1834,7 +1740,7 @@ public class ItemImportServiceImpl implements ItemImportService, InitializingBea
|
||||
@Override
|
||||
public String getImportUploadableDirectory(EPerson ePerson)
|
||||
throws Exception {
|
||||
String uploadDir = ConfigurationManager.getProperty("org.dspace.app.batchitemimport.work.dir");
|
||||
String uploadDir = configurationService.getProperty("org.dspace.app.batchitemimport.work.dir");
|
||||
if (uploadDir == null) {
|
||||
throw new Exception(
|
||||
"A dspace.cfg entry for 'org.dspace.app.batchitemimport.work.dir' does not exist.");
|
||||
|
@@ -183,21 +183,6 @@ public interface ItemImportService {
|
||||
*/
|
||||
public void deleteItems(Context c, String mapfile) throws Exception;
|
||||
|
||||
/**
|
||||
* Add items
|
||||
*
|
||||
* @param c DSpace Context
|
||||
* @param mycollections List of Collections
|
||||
* @param sourcedir source directory
|
||||
* @param mapfile map file
|
||||
* @param template whether to use template item
|
||||
* @param bteInputType The input type of the data (bibtex, csv, etc.), in case of local file
|
||||
* @param workingDir working directory
|
||||
* @throws Exception if error
|
||||
*/
|
||||
public void addBTEItems(Context c, List<Collection> mycollections, String sourcedir, String mapfile,
|
||||
boolean template, String bteInputType, String workingDir) throws Exception;
|
||||
|
||||
/**
|
||||
* Get temporary work directory
|
||||
*
|
||||
|
@@ -7,6 +7,7 @@
|
||||
*/
|
||||
package org.dspace.app.itemupdate;
|
||||
|
||||
import java.lang.reflect.InvocationTargetException;
|
||||
import java.util.Iterator;
|
||||
import java.util.LinkedHashMap;
|
||||
import java.util.Map;
|
||||
@@ -20,22 +21,25 @@ import java.util.Map;
|
||||
public class ActionManager implements Iterable<UpdateAction> {
|
||||
|
||||
protected Map<Class<? extends UpdateAction>, UpdateAction> registry
|
||||
= new LinkedHashMap<Class<? extends UpdateAction>, UpdateAction>();
|
||||
= new LinkedHashMap<>();
|
||||
|
||||
/**
|
||||
* Get update action
|
||||
* Get update action.
|
||||
*
|
||||
* @param actionClass UpdateAction class
|
||||
* @return instantiation of UpdateAction class
|
||||
* @throws InstantiationException if instantiation error
|
||||
* @throws IllegalAccessException if illegal access error
|
||||
* @throws NoSuchMethodException passed through.
|
||||
* @throws InvocationTargetException passed through.
|
||||
*/
|
||||
public UpdateAction getUpdateAction(Class<? extends UpdateAction> actionClass)
|
||||
throws InstantiationException, IllegalAccessException {
|
||||
throws InstantiationException, IllegalAccessException,
|
||||
NoSuchMethodException, IllegalArgumentException, InvocationTargetException {
|
||||
UpdateAction action = registry.get(actionClass);
|
||||
|
||||
if (action == null) {
|
||||
action = actionClass.newInstance();
|
||||
action = actionClass.getDeclaredConstructor().newInstance();
|
||||
registry.put(actionClass, action);
|
||||
}
|
||||
|
||||
@@ -58,7 +62,8 @@ public class ActionManager implements Iterable<UpdateAction> {
|
||||
@Override
|
||||
public Iterator<UpdateAction> iterator() {
|
||||
return new Iterator<UpdateAction>() {
|
||||
private Iterator<Class<? extends UpdateAction>> itr = registry.keySet().iterator();
|
||||
private final Iterator<Class<? extends UpdateAction>> itr
|
||||
= registry.keySet().iterator();
|
||||
|
||||
@Override
|
||||
public boolean hasNext() {
|
||||
|
@@ -30,7 +30,7 @@ import javax.xml.transform.TransformerConfigurationException;
|
||||
import javax.xml.transform.TransformerException;
|
||||
import javax.xml.transform.TransformerFactory;
|
||||
|
||||
import org.apache.log4j.Logger;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.dspace.app.util.LocalSchemaFilenameFilter;
|
||||
import org.dspace.authorize.AuthorizeException;
|
||||
import org.dspace.content.DSpaceObject;
|
||||
@@ -47,7 +47,7 @@ import org.w3c.dom.Document;
|
||||
* Encapsulates the Item in the context of the DSpace Archive Format
|
||||
*/
|
||||
public class ItemArchive {
|
||||
private static final Logger log = Logger.getLogger(ItemArchive.class);
|
||||
private static final Logger log = org.apache.logging.log4j.LogManager.getLogger(ItemArchive.class);
|
||||
|
||||
public static final String DUBLIN_CORE_XML = "dublin_core.xml";
|
||||
|
||||
|
@@ -24,18 +24,19 @@ import java.util.UUID;
|
||||
|
||||
import org.apache.commons.cli.CommandLine;
|
||||
import org.apache.commons.cli.CommandLineParser;
|
||||
import org.apache.commons.cli.DefaultParser;
|
||||
import org.apache.commons.cli.HelpFormatter;
|
||||
import org.apache.commons.cli.Option;
|
||||
import org.apache.commons.cli.Options;
|
||||
import org.apache.commons.cli.PosixParser;
|
||||
import org.dspace.content.Item;
|
||||
import org.dspace.content.factory.ContentServiceFactory;
|
||||
import org.dspace.content.service.ItemService;
|
||||
import org.dspace.core.ConfigurationManager;
|
||||
import org.dspace.core.Context;
|
||||
import org.dspace.eperson.EPerson;
|
||||
import org.dspace.eperson.factory.EPersonServiceFactory;
|
||||
import org.dspace.eperson.service.EPersonService;
|
||||
import org.dspace.handle.factory.HandleServiceFactory;
|
||||
import org.dspace.handle.service.HandleService;
|
||||
|
||||
/**
|
||||
* Provides some batch editing capabilities for items in DSpace:
|
||||
@@ -78,6 +79,7 @@ public class ItemUpdate {
|
||||
|
||||
protected static final EPersonService epersonService = EPersonServiceFactory.getInstance().getEPersonService();
|
||||
protected static final ItemService itemService = ContentServiceFactory.getInstance().getItemService();
|
||||
protected static final HandleService handleService = HandleServiceFactory.getInstance().getHandleService();
|
||||
|
||||
static {
|
||||
filterAliases.put("ORIGINAL", "org.dspace.app.itemupdate.OriginalBitstreamFilter");
|
||||
@@ -107,7 +109,7 @@ public class ItemUpdate {
|
||||
|
||||
// instance variables
|
||||
protected ActionManager actionMgr = new ActionManager();
|
||||
protected List<String> undoActionList = new ArrayList<String>();
|
||||
protected List<String> undoActionList = new ArrayList<>();
|
||||
protected String eperson;
|
||||
|
||||
/**
|
||||
@@ -115,7 +117,7 @@ public class ItemUpdate {
|
||||
*/
|
||||
public static void main(String[] argv) {
|
||||
// create an options object and populate it
|
||||
CommandLineParser parser = new PosixParser();
|
||||
CommandLineParser parser = new DefaultParser();
|
||||
|
||||
Options options = new Options();
|
||||
|
||||
@@ -273,7 +275,8 @@ public class ItemUpdate {
|
||||
Class<?> cfilter = Class.forName(filterClassname);
|
||||
pr("BitstreamFilter class to instantiate: " + cfilter.toString());
|
||||
|
||||
filter = (BitstreamFilter) cfilter.newInstance(); //unfortunate cast, an erasure consequence
|
||||
filter = (BitstreamFilter) cfilter.getDeclaredConstructor()
|
||||
.newInstance(); //unfortunate cast, an erasure consequence
|
||||
} catch (Exception e) {
|
||||
pr("Error: Failure instantiating bitstream filter class: " + filterClassname);
|
||||
System.exit(1);
|
||||
@@ -330,10 +333,7 @@ public class ItemUpdate {
|
||||
iu.setEPerson(context, iu.eperson);
|
||||
context.turnOffAuthorisationSystem();
|
||||
|
||||
HANDLE_PREFIX = ConfigurationManager.getProperty("handle.canonical.prefix");
|
||||
if (HANDLE_PREFIX == null || HANDLE_PREFIX.length() == 0) {
|
||||
HANDLE_PREFIX = "http://hdl.handle.net/";
|
||||
}
|
||||
HANDLE_PREFIX = handleService.getCanonicalPrefix();
|
||||
|
||||
iu.processArchive(context, sourcedir, itemField, metadataIndexName, alterProvenance, isTest);
|
||||
|
||||
|
@@ -28,17 +28,18 @@ import javax.xml.transform.TransformerException;
|
||||
import javax.xml.transform.dom.DOMSource;
|
||||
import javax.xml.transform.stream.StreamResult;
|
||||
|
||||
import org.apache.commons.lang.StringUtils;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.apache.xpath.XPathAPI;
|
||||
import org.dspace.authorize.AuthorizeException;
|
||||
import org.dspace.content.Item;
|
||||
import org.dspace.content.MetadataField;
|
||||
import org.dspace.content.MetadataSchema;
|
||||
import org.dspace.content.MetadataSchemaEnum;
|
||||
import org.dspace.content.MetadataValue;
|
||||
import org.dspace.content.factory.ContentServiceFactory;
|
||||
import org.dspace.content.service.ItemService;
|
||||
import org.dspace.core.ConfigurationManager;
|
||||
import org.dspace.core.Context;
|
||||
import org.dspace.services.factory.DSpaceServicesFactory;
|
||||
import org.w3c.dom.Document;
|
||||
import org.w3c.dom.Element;
|
||||
import org.w3c.dom.NamedNodeMap;
|
||||
@@ -189,7 +190,7 @@ public class MetadataUtilities {
|
||||
NodeList metadata = XPathAPI.selectNodeList(document, "/dublin_core");
|
||||
Node schemaAttr = metadata.item(0).getAttributes().getNamedItem("schema");
|
||||
if (schemaAttr == null) {
|
||||
schema = MetadataSchema.DC_SCHEMA;
|
||||
schema = MetadataSchemaEnum.DC.getName();
|
||||
} else {
|
||||
schema = schemaAttr.getNodeValue();
|
||||
}
|
||||
@@ -225,7 +226,9 @@ public class MetadataUtilities {
|
||||
if (language == null) {
|
||||
language = "en";
|
||||
} else if ("".equals(language)) {
|
||||
language = ConfigurationManager.getProperty("default.language");
|
||||
language = DSpaceServicesFactory.getInstance()
|
||||
.getConfigurationService()
|
||||
.getProperty("default.language");
|
||||
}
|
||||
|
||||
DtoMetadata dtom = DtoMetadata.create(schema, element, qualifier, language, value);
|
||||
|
@@ -10,9 +10,22 @@ package org.dspace.app.launcher;
|
||||
import java.io.FileNotFoundException;
|
||||
import java.io.IOException;
|
||||
import java.lang.reflect.Method;
|
||||
import java.sql.SQLException;
|
||||
import java.util.Collection;
|
||||
import java.util.Comparator;
|
||||
import java.util.List;
|
||||
import java.util.TreeMap;
|
||||
|
||||
import org.apache.commons.cli.ParseException;
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.dspace.core.Context;
|
||||
import org.dspace.scripts.DSpaceRunnable;
|
||||
import org.dspace.scripts.configuration.ScriptConfiguration;
|
||||
import org.dspace.scripts.factory.ScriptServiceFactory;
|
||||
import org.dspace.scripts.handler.DSpaceRunnableHandler;
|
||||
import org.dspace.scripts.handler.impl.CommandLineDSpaceRunnableHandler;
|
||||
import org.dspace.scripts.service.ScriptService;
|
||||
import org.dspace.servicemanager.DSpaceKernelImpl;
|
||||
import org.dspace.servicemanager.DSpaceKernelInit;
|
||||
import org.dspace.services.RequestService;
|
||||
@@ -27,6 +40,9 @@ import org.jdom.input.SAXBuilder;
|
||||
* @author Mark Diggory
|
||||
*/
|
||||
public class ScriptLauncher {
|
||||
|
||||
private static final Logger log = LogManager.getLogger();
|
||||
|
||||
/**
|
||||
* The service manager kernel
|
||||
*/
|
||||
@@ -35,7 +51,8 @@ public class ScriptLauncher {
|
||||
/**
|
||||
* Default constructor
|
||||
*/
|
||||
private ScriptLauncher() { }
|
||||
private ScriptLauncher() {
|
||||
}
|
||||
|
||||
/**
|
||||
* Execute the DSpace script launcher
|
||||
@@ -45,7 +62,7 @@ public class ScriptLauncher {
|
||||
* @throws FileNotFoundException if file doesn't exist
|
||||
*/
|
||||
public static void main(String[] args)
|
||||
throws FileNotFoundException, IOException {
|
||||
throws FileNotFoundException, IOException, IllegalAccessException, InstantiationException {
|
||||
// Initialise the service manager kernel
|
||||
try {
|
||||
kernelImpl = DSpaceKernelInit.getKernel(null);
|
||||
@@ -76,8 +93,9 @@ public class ScriptLauncher {
|
||||
}
|
||||
|
||||
// Look up command in the configuration, and execute.
|
||||
int status;
|
||||
status = runOneCommand(commandConfigs, args);
|
||||
|
||||
CommandLineDSpaceRunnableHandler commandLineDSpaceRunnableHandler = new CommandLineDSpaceRunnableHandler();
|
||||
int status = handleScript(args, commandConfigs, commandLineDSpaceRunnableHandler, kernelImpl);
|
||||
|
||||
// Destroy the service kernel if it is still alive
|
||||
if (kernelImpl != null) {
|
||||
@@ -86,6 +104,55 @@ public class ScriptLauncher {
|
||||
}
|
||||
|
||||
System.exit(status);
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* This method will take the arguments from a commandline input and it'll find the script that the first argument
|
||||
* refers to and it'll execute this script.
|
||||
* It can return a 1 or a 0 depending on whether the script failed or passed respectively
|
||||
* @param args The arguments for the script and the script as first one in the array
|
||||
* @param commandConfigs The Document
|
||||
* @param dSpaceRunnableHandler The DSpaceRunnableHandler for this execution
|
||||
* @param kernelImpl The relevant DSpaceKernelImpl
|
||||
* @return A 1 or 0 depending on whether the script failed or passed respectively
|
||||
*/
|
||||
public static int handleScript(String[] args, Document commandConfigs,
|
||||
DSpaceRunnableHandler dSpaceRunnableHandler,
|
||||
DSpaceKernelImpl kernelImpl) throws InstantiationException, IllegalAccessException {
|
||||
int status;
|
||||
ScriptService scriptService = ScriptServiceFactory.getInstance().getScriptService();
|
||||
ScriptConfiguration scriptConfiguration = scriptService.getScriptConfiguration(args[0]);
|
||||
DSpaceRunnable script = null;
|
||||
if (scriptConfiguration != null) {
|
||||
script = scriptService.createDSpaceRunnableForScriptConfiguration(scriptConfiguration);
|
||||
}
|
||||
if (script != null) {
|
||||
status = executeScript(args, dSpaceRunnableHandler, script);
|
||||
} else {
|
||||
status = runOneCommand(commandConfigs, args, kernelImpl);
|
||||
}
|
||||
return status;
|
||||
}
|
||||
|
||||
/**
|
||||
* This method will simply execute the script
|
||||
* @param args The arguments of the script with the script name as first place in the array
|
||||
* @param dSpaceRunnableHandler The relevant DSpaceRunnableHandler
|
||||
* @param script The script to be executed
|
||||
* @return A 1 or 0 depending on whether the script failed or passed respectively
|
||||
*/
|
||||
private static int executeScript(String[] args, DSpaceRunnableHandler dSpaceRunnableHandler,
|
||||
DSpaceRunnable script) {
|
||||
try {
|
||||
script.initialize(args, dSpaceRunnableHandler, null);
|
||||
script.run();
|
||||
return 0;
|
||||
} catch (ParseException e) {
|
||||
script.printHelp();
|
||||
e.printStackTrace();
|
||||
return 1;
|
||||
}
|
||||
}
|
||||
|
||||
protected static int runOneCommand(Document commandConfigs, String[] args) {
|
||||
@@ -98,7 +165,7 @@ public class ScriptLauncher {
|
||||
* @param commandConfigs Document
|
||||
* @param args the command line arguments given
|
||||
*/
|
||||
public static int runOneCommand(Document commandConfigs, String[] args, DSpaceKernelImpl kernelImpl) {
|
||||
protected static int runOneCommand(Document commandConfigs, String[] args, DSpaceKernelImpl kernelImpl) {
|
||||
String request = args[0];
|
||||
Element root = commandConfigs.getRootElement();
|
||||
List<Element> commands = root.getChildren("command");
|
||||
@@ -255,11 +322,53 @@ public class ScriptLauncher {
|
||||
}
|
||||
|
||||
/**
|
||||
* Display the commands that the current launcher config file knows about
|
||||
*
|
||||
* Display the commands that are defined in launcher.xml and/or the script service.
|
||||
* @param commandConfigs configs as Document
|
||||
*/
|
||||
private static void display(Document commandConfigs) {
|
||||
// usage
|
||||
System.out.println("Usage: dspace [command-name] {parameters}");
|
||||
|
||||
// commands from launcher.xml
|
||||
Collection<Element> launcherCommands = getLauncherCommands(commandConfigs);
|
||||
if (launcherCommands.size() > 0) {
|
||||
System.out.println("\nCommands from launcher.xml");
|
||||
for (Element command : launcherCommands) {
|
||||
displayCommand(
|
||||
command.getChild("name").getValue(),
|
||||
command.getChild("description").getValue()
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
// commands from script service
|
||||
Collection<ScriptConfiguration> serviceCommands = getServiceCommands();
|
||||
if (serviceCommands.size() > 0) {
|
||||
System.out.println("\nCommands from script service");
|
||||
for (ScriptConfiguration command : serviceCommands) {
|
||||
displayCommand(
|
||||
command.getName(),
|
||||
command.getDescription()
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Display a single command using a fixed format. Used by {@link #display}.
|
||||
* @param name the name that can be used to invoke the command
|
||||
* @param description the description of the command
|
||||
*/
|
||||
private static void displayCommand(String name, String description) {
|
||||
System.out.format(" - %s: %s\n", name, description);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get a sorted collection of the commands that are specified in launcher.xml. Used by {@link #display}.
|
||||
* @param commandConfigs the contexts of launcher.xml
|
||||
* @return sorted collection of commands
|
||||
*/
|
||||
private static Collection<Element> getLauncherCommands(Document commandConfigs) {
|
||||
// List all command elements
|
||||
List<Element> commands = commandConfigs.getRootElement().getChildren("command");
|
||||
|
||||
@@ -271,11 +380,32 @@ public class ScriptLauncher {
|
||||
sortedCommands.put(command.getChild("name").getValue(), command);
|
||||
}
|
||||
|
||||
// Display the sorted list
|
||||
System.out.println("Usage: dspace [command-name] {parameters}");
|
||||
for (Element command : sortedCommands.values()) {
|
||||
System.out.println(" - " + command.getChild("name").getValue() +
|
||||
": " + command.getChild("description").getValue());
|
||||
}
|
||||
return sortedCommands.values();
|
||||
}
|
||||
|
||||
/**
|
||||
* Get a sorted collection of the commands that are defined as beans. Used by {@link #display}.
|
||||
* @return sorted collection of commands
|
||||
*/
|
||||
private static Collection<ScriptConfiguration> getServiceCommands() {
|
||||
ScriptService scriptService = ScriptServiceFactory.getInstance().getScriptService();
|
||||
|
||||
Context throwAwayContext = new Context();
|
||||
|
||||
throwAwayContext.turnOffAuthorisationSystem();
|
||||
List<ScriptConfiguration> scriptConfigurations = scriptService.getScriptConfigurations(throwAwayContext);
|
||||
throwAwayContext.restoreAuthSystemState();
|
||||
|
||||
try {
|
||||
throwAwayContext.complete();
|
||||
} catch (SQLException exception) {
|
||||
exception.printStackTrace();
|
||||
throwAwayContext.abort();
|
||||
}
|
||||
|
||||
scriptConfigurations.sort(Comparator.comparing(ScriptConfiguration::getName));
|
||||
|
||||
return scriptConfigurations;
|
||||
}
|
||||
|
||||
}
|
||||
|
@@ -12,7 +12,8 @@ import java.io.InputStream;
|
||||
import javax.imageio.ImageIO;
|
||||
|
||||
import org.dspace.content.Item;
|
||||
import org.dspace.core.ConfigurationManager;
|
||||
import org.dspace.services.ConfigurationService;
|
||||
import org.dspace.services.factory.DSpaceServicesFactory;
|
||||
|
||||
/**
|
||||
* Filter image bitstreams, scaling the image to be within the bounds of
|
||||
@@ -66,17 +67,19 @@ public class BrandedPreviewJPEGFilter extends MediaFilter {
|
||||
BufferedImage buf = ImageIO.read(source);
|
||||
|
||||
// get config params
|
||||
float xmax = (float) ConfigurationManager
|
||||
ConfigurationService configurationService
|
||||
= DSpaceServicesFactory.getInstance().getConfigurationService();
|
||||
float xmax = (float) configurationService
|
||||
.getIntProperty("webui.preview.maxwidth");
|
||||
float ymax = (float) ConfigurationManager
|
||||
float ymax = (float) configurationService
|
||||
.getIntProperty("webui.preview.maxheight");
|
||||
boolean blurring = (boolean) ConfigurationManager
|
||||
boolean blurring = (boolean) configurationService
|
||||
.getBooleanProperty("webui.preview.blurring");
|
||||
boolean hqscaling = (boolean) ConfigurationManager
|
||||
boolean hqscaling = (boolean) configurationService
|
||||
.getBooleanProperty("webui.preview.hqscaling");
|
||||
int brandHeight = ConfigurationManager.getIntProperty("webui.preview.brand.height");
|
||||
String brandFont = ConfigurationManager.getProperty("webui.preview.brand.font");
|
||||
int brandFontPoint = ConfigurationManager.getIntProperty("webui.preview.brand.fontpoint");
|
||||
int brandHeight = configurationService.getIntProperty("webui.preview.brand.height");
|
||||
String brandFont = configurationService.getProperty("webui.preview.brand.font");
|
||||
int brandFontPoint = configurationService.getIntProperty("webui.preview.brand.fontpoint");
|
||||
|
||||
JPEGFilter jpegFilter = new JPEGFilter();
|
||||
return jpegFilter
|
||||
|
@@ -11,7 +11,7 @@ import java.io.InputStream;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
|
||||
import org.apache.commons.io.IOUtils;
|
||||
import org.apache.log4j.Logger;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.apache.poi.POITextExtractor;
|
||||
import org.apache.poi.extractor.ExtractorFactory;
|
||||
import org.apache.poi.hssf.extractor.ExcelExtractor;
|
||||
@@ -36,7 +36,7 @@ import org.dspace.content.Item;
|
||||
*/
|
||||
public class ExcelFilter extends MediaFilter {
|
||||
|
||||
private static Logger log = Logger.getLogger(ExcelFilter.class);
|
||||
private static Logger log = org.apache.logging.log4j.LogManager.getLogger(ExcelFilter.class);
|
||||
|
||||
public String getFilteredName(String oldFilename) {
|
||||
return oldFilename + ".txt";
|
||||
|
@@ -19,8 +19,9 @@ import org.dspace.content.Bundle;
|
||||
import org.dspace.content.Item;
|
||||
import org.dspace.content.factory.ContentServiceFactory;
|
||||
import org.dspace.content.service.ItemService;
|
||||
import org.dspace.core.ConfigurationManager;
|
||||
import org.dspace.core.Context;
|
||||
import org.dspace.services.ConfigurationService;
|
||||
import org.dspace.services.factory.DSpaceServicesFactory;
|
||||
import org.im4java.core.ConvertCmd;
|
||||
import org.im4java.core.IM4JavaException;
|
||||
import org.im4java.core.IMOperation;
|
||||
@@ -33,36 +34,18 @@ import org.im4java.process.ProcessStarter;
|
||||
* no bigger than. Creates only JPEGs.
|
||||
*/
|
||||
public abstract class ImageMagickThumbnailFilter extends MediaFilter {
|
||||
protected static int width = 180;
|
||||
protected static int height = 120;
|
||||
private static boolean flatten = true;
|
||||
static String bitstreamDescription = "IM Thumbnail";
|
||||
static final String defaultPattern = "Generated Thumbnail";
|
||||
static Pattern replaceRegex = Pattern.compile(defaultPattern);
|
||||
private static final int DEFAULT_WIDTH = 180;
|
||||
private static final int DEFAULT_HEIGHT = 120;
|
||||
static final String DEFAULT_PATTERN = "Generated Thumbnail";
|
||||
protected final ItemService itemService = ContentServiceFactory.getInstance().getItemService();
|
||||
protected static final ConfigurationService configurationService
|
||||
= DSpaceServicesFactory.getInstance().getConfigurationService();
|
||||
|
||||
static String cmyk_profile;
|
||||
static String srgb_profile;
|
||||
protected static final String PRE = ImageMagickThumbnailFilter.class.getName();
|
||||
|
||||
static {
|
||||
String pre = ImageMagickThumbnailFilter.class.getName();
|
||||
String s = ConfigurationManager.getProperty(pre + ".ProcessStarter");
|
||||
String s = configurationService.getProperty(PRE + ".ProcessStarter");
|
||||
ProcessStarter.setGlobalSearchPath(s);
|
||||
width = ConfigurationManager.getIntProperty("thumbnail.maxwidth", width);
|
||||
height = ConfigurationManager.getIntProperty("thumbnail.maxheight", height);
|
||||
flatten = ConfigurationManager.getBooleanProperty(pre + ".flatten", flatten);
|
||||
String description = ConfigurationManager.getProperty(pre + ".bitstreamDescription");
|
||||
cmyk_profile = ConfigurationManager.getProperty(pre + ".cmyk_profile");
|
||||
srgb_profile = ConfigurationManager.getProperty(pre + ".srgb_profile");
|
||||
if (description != null) {
|
||||
bitstreamDescription = description;
|
||||
}
|
||||
try {
|
||||
String patt = ConfigurationManager.getProperty(pre + ".replaceRegex");
|
||||
replaceRegex = Pattern.compile(patt == null ? defaultPattern : patt);
|
||||
} catch (PatternSyntaxException e) {
|
||||
System.err.println("Invalid thumbnail replacement pattern: " + e.getMessage());
|
||||
}
|
||||
}
|
||||
|
||||
public ImageMagickThumbnailFilter() {
|
||||
@@ -94,7 +77,7 @@ public abstract class ImageMagickThumbnailFilter extends MediaFilter {
|
||||
*/
|
||||
@Override
|
||||
public String getDescription() {
|
||||
return bitstreamDescription;
|
||||
return configurationService.getProperty(PRE + ".bitstreamDescription", "IM Thumbnail");
|
||||
}
|
||||
|
||||
public File inputStreamToTempFile(InputStream source, String prefix, String suffix) throws IOException {
|
||||
@@ -120,7 +103,8 @@ public abstract class ImageMagickThumbnailFilter extends MediaFilter {
|
||||
IMOperation op = new IMOperation();
|
||||
op.autoOrient();
|
||||
op.addImage(f.getAbsolutePath());
|
||||
op.thumbnail(width, height);
|
||||
op.thumbnail(configurationService.getIntProperty("thumbnail.maxwidth", DEFAULT_WIDTH),
|
||||
configurationService.getIntProperty("thumbnail.maxheight", DEFAULT_HEIGHT));
|
||||
op.addImage(f2.getAbsolutePath());
|
||||
if (verbose) {
|
||||
System.out.println("IM Thumbnail Param: " + op);
|
||||
@@ -137,13 +121,16 @@ public abstract class ImageMagickThumbnailFilter extends MediaFilter {
|
||||
IMOperation op = new IMOperation();
|
||||
String s = "[" + page + "]";
|
||||
op.addImage(f.getAbsolutePath() + s);
|
||||
if (flatten) {
|
||||
if (configurationService.getBooleanProperty(PRE + ".flatten", true)) {
|
||||
op.flatten();
|
||||
}
|
||||
|
||||
// PDFs using the CMYK color system can be handled specially if
|
||||
// profiles are defined
|
||||
String cmyk_profile = configurationService.getProperty(PRE + ".cmyk_profile");
|
||||
String srgb_profile = configurationService.getProperty(PRE + ".srgb_profile");
|
||||
if (cmyk_profile != null && srgb_profile != null) {
|
||||
Info imageInfo = new Info(f.getAbsolutePath(), true);
|
||||
Info imageInfo = new Info(f.getAbsolutePath() + s, true);
|
||||
String imageClass = imageInfo.getImageClass();
|
||||
if (imageClass.contains("CMYK")) {
|
||||
op.profile(cmyk_profile);
|
||||
@@ -174,24 +161,32 @@ public abstract class ImageMagickThumbnailFilter extends MediaFilter {
|
||||
String description = bit.getDescription();
|
||||
// If anything other than a generated thumbnail
|
||||
// is found, halt processing
|
||||
Pattern replaceRegex;
|
||||
try {
|
||||
String patt = configurationService.getProperty(PRE + ".replaceRegex", DEFAULT_PATTERN);
|
||||
replaceRegex = Pattern.compile(patt == null ? DEFAULT_PATTERN : patt);
|
||||
} catch (PatternSyntaxException e) {
|
||||
System.err.println("Invalid thumbnail replacement pattern: " + e.getMessage());
|
||||
throw e;
|
||||
}
|
||||
if (description != null) {
|
||||
if (replaceRegex.matcher(description).matches()) {
|
||||
if (verbose) {
|
||||
System.out.println(description + " " + nsrc
|
||||
+ " matches pattern and is replacable.");
|
||||
System.out.format("%s %s matches pattern and is replacable.%n",
|
||||
description, nsrc);
|
||||
}
|
||||
continue;
|
||||
}
|
||||
if (description.equals(bitstreamDescription)) {
|
||||
if (description.equals(getDescription())) {
|
||||
if (verbose) {
|
||||
System.out.println(bitstreamDescription + " " + nsrc
|
||||
+ " is replacable.");
|
||||
System.out.format("%s %s is replaceable.%n",
|
||||
getDescription(), nsrc);
|
||||
}
|
||||
continue;
|
||||
}
|
||||
}
|
||||
System.out.println("Custom Thumbnail exists for " + nsrc + " for item "
|
||||
+ item.getHandle() + ". Thumbnail will not be generated. ");
|
||||
System.out.format("Custom Thumbnail exists for %s for item %s. Thumbnail will not be generated.%n",
|
||||
nsrc, item.getHandle());
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
@@ -22,7 +22,8 @@ import java.io.InputStream;
|
||||
import javax.imageio.ImageIO;
|
||||
|
||||
import org.dspace.content.Item;
|
||||
import org.dspace.core.ConfigurationManager;
|
||||
import org.dspace.services.ConfigurationService;
|
||||
import org.dspace.services.factory.DSpaceServicesFactory;
|
||||
|
||||
/**
|
||||
* Filter image bitstreams, scaling the image to be within the bounds of
|
||||
@@ -80,13 +81,15 @@ public class JPEGFilter extends MediaFilter implements SelfRegisterInputFormats
|
||||
public InputStream getThumb(Item currentItem, BufferedImage buf, boolean verbose)
|
||||
throws Exception {
|
||||
// get config params
|
||||
float xmax = (float) ConfigurationManager
|
||||
final ConfigurationService configurationService
|
||||
= DSpaceServicesFactory.getInstance().getConfigurationService();
|
||||
float xmax = (float) configurationService
|
||||
.getIntProperty("thumbnail.maxwidth");
|
||||
float ymax = (float) ConfigurationManager
|
||||
float ymax = (float) configurationService
|
||||
.getIntProperty("thumbnail.maxheight");
|
||||
boolean blurring = (boolean) ConfigurationManager
|
||||
boolean blurring = (boolean) configurationService
|
||||
.getBooleanProperty("thumbnail.blurring");
|
||||
boolean hqscaling = (boolean) ConfigurationManager
|
||||
boolean hqscaling = (boolean) configurationService
|
||||
.getBooleanProperty("thumbnail.hqscaling");
|
||||
|
||||
return getThumbDim(currentItem, buf, verbose, xmax, ymax, blurring, hqscaling, 0, 0, null);
|
||||
@@ -169,9 +172,11 @@ public class JPEGFilter extends MediaFilter implements SelfRegisterInputFormats
|
||||
g2d.drawImage(buf, 0, 0, (int) xsize, (int) ysize, null);
|
||||
|
||||
if (brandHeight != 0) {
|
||||
ConfigurationService configurationService
|
||||
= DSpaceServicesFactory.getInstance().getConfigurationService();
|
||||
Brand brand = new Brand((int) xsize, brandHeight, new Font(brandFont, Font.PLAIN, brandFontPoint), 5);
|
||||
BufferedImage brandImage = brand.create(ConfigurationManager.getProperty("webui.preview.brand"),
|
||||
ConfigurationManager.getProperty("webui.preview.brand.abbrev"),
|
||||
BufferedImage brandImage = brand.create(configurationService.getProperty("webui.preview.brand"),
|
||||
configurationService.getProperty("webui.preview.brand.abbrev"),
|
||||
currentItem == null ? "" : "hdl:" + currentItem.getHandle());
|
||||
|
||||
g2d.drawImage(brandImage, (int) 0, (int) ysize, (int) xsize, (int) 20, null);
|
||||
|
@@ -16,13 +16,12 @@ import java.util.Map;
|
||||
|
||||
import org.apache.commons.cli.CommandLine;
|
||||
import org.apache.commons.cli.CommandLineParser;
|
||||
import org.apache.commons.cli.DefaultParser;
|
||||
import org.apache.commons.cli.HelpFormatter;
|
||||
import org.apache.commons.cli.MissingArgumentException;
|
||||
import org.apache.commons.cli.Option;
|
||||
import org.apache.commons.cli.OptionBuilder;
|
||||
import org.apache.commons.cli.Options;
|
||||
import org.apache.commons.cli.PosixParser;
|
||||
import org.apache.commons.lang.ArrayUtils;
|
||||
import org.apache.commons.lang3.ArrayUtils;
|
||||
import org.dspace.app.mediafilter.factory.MediaFilterServiceFactory;
|
||||
import org.dspace.app.mediafilter.service.MediaFilterService;
|
||||
import org.dspace.content.Collection;
|
||||
@@ -66,7 +65,7 @@ public class MediaFilterCLITool {
|
||||
System.setProperty("java.awt.headless", "true");
|
||||
|
||||
// create an options object and populate it
|
||||
CommandLineParser parser = new PosixParser();
|
||||
CommandLineParser parser = new DefaultParser();
|
||||
|
||||
int status = 0;
|
||||
|
||||
@@ -85,26 +84,30 @@ public class MediaFilterCLITool {
|
||||
options.addOption("h", "help", false, "help");
|
||||
|
||||
//create a "plugin" option (to specify specific MediaFilter plugins to run)
|
||||
OptionBuilder.withLongOpt("plugins");
|
||||
OptionBuilder.withValueSeparator(',');
|
||||
OptionBuilder.withDescription(
|
||||
"ONLY run the specified Media Filter plugin(s)\n" +
|
||||
"listed from '" + MEDIA_FILTER_PLUGINS_KEY + "' in dspace.cfg.\n" +
|
||||
"Separate multiple with a comma (,)\n" +
|
||||
"(e.g. MediaFilterManager -p \n\"Word Text Extractor\",\"PDF Text Extractor\")");
|
||||
Option pluginOption = OptionBuilder.create('p');
|
||||
pluginOption.setArgs(Option.UNLIMITED_VALUES); //unlimited number of args
|
||||
Option pluginOption = Option.builder("p")
|
||||
.longOpt("plugins")
|
||||
.hasArg()
|
||||
.hasArgs()
|
||||
.valueSeparator(',')
|
||||
.desc(
|
||||
"ONLY run the specified Media Filter plugin(s)\n" +
|
||||
"listed from '" + MEDIA_FILTER_PLUGINS_KEY + "' in dspace.cfg.\n" +
|
||||
"Separate multiple with a comma (,)\n" +
|
||||
"(e.g. MediaFilterManager -p \n\"Word Text Extractor\",\"PDF Text Extractor\")")
|
||||
.build();
|
||||
options.addOption(pluginOption);
|
||||
|
||||
//create a "skip" option (to specify communities/collections/items to skip)
|
||||
OptionBuilder.withLongOpt("skip");
|
||||
OptionBuilder.withValueSeparator(',');
|
||||
OptionBuilder.withDescription(
|
||||
"SKIP the bitstreams belonging to identifier\n" +
|
||||
"Separate multiple identifiers with a comma (,)\n" +
|
||||
"(e.g. MediaFilterManager -s \n 123456789/34,123456789/323)");
|
||||
Option skipOption = OptionBuilder.create('s');
|
||||
skipOption.setArgs(Option.UNLIMITED_VALUES); //unlimited number of args
|
||||
Option skipOption = Option.builder("s")
|
||||
.longOpt("skip")
|
||||
.hasArg()
|
||||
.hasArgs()
|
||||
.valueSeparator(',')
|
||||
.desc(
|
||||
"SKIP the bitstreams belonging to identifier\n" +
|
||||
"Separate multiple identifiers with a comma (,)\n" +
|
||||
"(e.g. MediaFilterManager -s \n 123456789/34,123456789/323)")
|
||||
.build();
|
||||
options.addOption(skipOption);
|
||||
|
||||
boolean isVerbose = false;
|
||||
@@ -179,7 +182,7 @@ public class MediaFilterCLITool {
|
||||
mediaFilterService.setMax2Process(max2Process);
|
||||
|
||||
//initialize an array of our enabled filters
|
||||
List<FormatFilter> filterList = new ArrayList<FormatFilter>();
|
||||
List<FormatFilter> filterList = new ArrayList<>();
|
||||
|
||||
//set up each filter
|
||||
for (int i = 0; i < filterNames.length; i++) {
|
||||
|
@@ -220,7 +220,7 @@ public class MediaFilterServiceImpl implements MediaFilterService, InitializingB
|
||||
} catch (Exception e) {
|
||||
String handle = myItem.getHandle();
|
||||
List<Bundle> bundles = myBitstream.getBundles();
|
||||
long size = myBitstream.getSize();
|
||||
long size = myBitstream.getSizeBytes();
|
||||
String checksum = myBitstream.getChecksum() + " (" + myBitstream.getChecksumAlgorithm() + ")";
|
||||
int assetstore = myBitstream.getStoreNumber();
|
||||
|
||||
@@ -310,12 +310,11 @@ public class MediaFilterServiceImpl implements MediaFilterService, InitializingB
|
||||
// get bitstream filename, calculate destination filename
|
||||
String newName = formatFilter.getFilteredName(source.getName());
|
||||
|
||||
Bitstream existingBitstream = null; // is there an existing rendition?
|
||||
Bundle targetBundle = null; // bundle we're modifying
|
||||
|
||||
// check if destination bitstream exists
|
||||
Bundle existingBundle = null;
|
||||
Bitstream existingBitstream = null;
|
||||
List<Bundle> bundles = itemService.getBundles(item, formatFilter.getBundleName());
|
||||
|
||||
// check if destination bitstream exists
|
||||
if (bundles.size() > 0) {
|
||||
// only finds the last match (FIXME?)
|
||||
for (Bundle bundle : bundles) {
|
||||
@@ -323,7 +322,7 @@ public class MediaFilterServiceImpl implements MediaFilterService, InitializingB
|
||||
|
||||
for (Bitstream bitstream : bitstreams) {
|
||||
if (bitstream.getName().trim().equals(newName.trim())) {
|
||||
targetBundle = bundle;
|
||||
existingBundle = bundle;
|
||||
existingBitstream = bitstream;
|
||||
}
|
||||
}
|
||||
@@ -345,63 +344,71 @@ public class MediaFilterServiceImpl implements MediaFilterService, InitializingB
|
||||
+ " (item: " + item.getHandle() + ")");
|
||||
}
|
||||
|
||||
InputStream destStream;
|
||||
try {
|
||||
System.out.println("File: " + newName);
|
||||
destStream = formatFilter.getDestinationStream(item, bitstreamService.retrieve(context, source), isVerbose);
|
||||
System.out.println("File: " + newName);
|
||||
|
||||
// start filtering of the bitstream, using try with resource to close all InputStreams properly
|
||||
try (
|
||||
// get the source stream
|
||||
InputStream srcStream = bitstreamService.retrieve(context, source);
|
||||
// filter the source stream to produce the destination stream
|
||||
// this is the hard work, check for OutOfMemoryErrors at the end of the try clause.
|
||||
InputStream destStream = formatFilter.getDestinationStream(item, srcStream, isVerbose);
|
||||
) {
|
||||
if (destStream == null) {
|
||||
if (!isQuiet) {
|
||||
System.out.println("SKIPPED: bitstream " + source.getID()
|
||||
+ " (item: " + item.getHandle() + ") because filtering was unsuccessful");
|
||||
+ " (item: " + item.getHandle() + ") because filtering was unsuccessful");
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
Bundle targetBundle; // bundle we're modifying
|
||||
if (bundles.size() < 1) {
|
||||
// create new bundle if needed
|
||||
targetBundle = bundleService.create(context, item, formatFilter.getBundleName());
|
||||
} else {
|
||||
// take the first match as we already looked out for the correct bundle name
|
||||
targetBundle = bundles.get(0);
|
||||
}
|
||||
|
||||
// create bitstream to store the filter result
|
||||
Bitstream b = bitstreamService.create(context, targetBundle, destStream);
|
||||
// set the name, source and description of the bitstream
|
||||
b.setName(context, newName);
|
||||
b.setSource(context, "Written by FormatFilter " + formatFilter.getClass().getName() +
|
||||
" on " + DCDate.getCurrent() + " (GMT).");
|
||||
b.setDescription(context, formatFilter.getDescription());
|
||||
// Set the format of the bitstream
|
||||
BitstreamFormat bf = bitstreamFormatService.findByShortDescription(context,
|
||||
formatFilter.getFormatString());
|
||||
bitstreamService.setFormat(context, b, bf);
|
||||
bitstreamService.update(context, b);
|
||||
|
||||
//Set permissions on the derivative bitstream
|
||||
//- First remove any existing policies
|
||||
authorizeService.removeAllPolicies(context, b);
|
||||
|
||||
//- Determine if this is a public-derivative format
|
||||
if (publicFiltersClasses.contains(formatFilter.getClass().getSimpleName())) {
|
||||
//- Set derivative bitstream to be publicly accessible
|
||||
Group anonymous = groupService.findByName(context, Group.ANONYMOUS);
|
||||
authorizeService.addPolicy(context, b, Constants.READ, anonymous);
|
||||
} else {
|
||||
//- Inherit policies from the source bitstream
|
||||
authorizeService.inheritPolicies(context, source, b);
|
||||
}
|
||||
|
||||
//do post-processing of the generated bitstream
|
||||
formatFilter.postProcessBitstream(context, item, b);
|
||||
|
||||
} catch (OutOfMemoryError oome) {
|
||||
System.out.println("!!! OutOfMemoryError !!!");
|
||||
return false;
|
||||
}
|
||||
|
||||
// create new bundle if needed
|
||||
if (bundles.size() < 1) {
|
||||
targetBundle = bundleService.create(context, item, formatFilter.getBundleName());
|
||||
} else {
|
||||
// take the first match
|
||||
targetBundle = bundles.get(0);
|
||||
}
|
||||
|
||||
Bitstream b = bitstreamService.create(context, targetBundle, destStream);
|
||||
|
||||
// Now set the format and name of the bitstream
|
||||
b.setName(context, newName);
|
||||
b.setSource(context, "Written by FormatFilter " + formatFilter.getClass().getName() +
|
||||
" on " + DCDate.getCurrent() + " (GMT).");
|
||||
b.setDescription(context, formatFilter.getDescription());
|
||||
|
||||
// Find the proper format
|
||||
BitstreamFormat bf = bitstreamFormatService.findByShortDescription(context,
|
||||
formatFilter.getFormatString());
|
||||
bitstreamService.setFormat(context, b, bf);
|
||||
bitstreamService.update(context, b);
|
||||
|
||||
//Set permissions on the derivative bitstream
|
||||
//- First remove any existing policies
|
||||
authorizeService.removeAllPolicies(context, b);
|
||||
|
||||
//- Determine if this is a public-derivative format
|
||||
if (publicFiltersClasses.contains(formatFilter.getClass().getSimpleName())) {
|
||||
//- Set derivative bitstream to be publicly accessible
|
||||
Group anonymous = groupService.findByName(context, Group.ANONYMOUS);
|
||||
authorizeService.addPolicy(context, b, Constants.READ, anonymous);
|
||||
} else {
|
||||
//- Inherit policies from the source bitstream
|
||||
authorizeService.inheritPolicies(context, source, b);
|
||||
}
|
||||
|
||||
// fixme - set date?
|
||||
// we are overwriting, so remove old bitstream
|
||||
if (existingBitstream != null) {
|
||||
bundleService.removeBitstream(context, targetBundle, existingBitstream);
|
||||
bundleService.removeBitstream(context, existingBundle, existingBitstream);
|
||||
}
|
||||
|
||||
if (!isQuiet) {
|
||||
@@ -409,9 +416,6 @@ public class MediaFilterServiceImpl implements MediaFilterService, InitializingB
|
||||
+ " (item: " + item.getHandle() + ") and created '" + newName + "'");
|
||||
}
|
||||
|
||||
//do post-processing of the generated bitstream
|
||||
formatFilter.postProcessBitstream(context, item, b);
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
|
@@ -10,7 +10,9 @@ package org.dspace.app.mediafilter;
|
||||
import java.awt.image.BufferedImage;
|
||||
import java.io.InputStream;
|
||||
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.apache.pdfbox.pdmodel.PDDocument;
|
||||
import org.apache.pdfbox.pdmodel.encryption.InvalidPasswordException;
|
||||
import org.apache.pdfbox.rendering.PDFRenderer;
|
||||
import org.dspace.content.Item;
|
||||
|
||||
@@ -25,6 +27,8 @@ import org.dspace.content.Item;
|
||||
* @author Jason Sherman jsherman@usao.edu
|
||||
*/
|
||||
public class PDFBoxThumbnail extends MediaFilter {
|
||||
private static Logger log = org.apache.logging.log4j.LogManager.getLogger(PDFBoxThumbnail.class);
|
||||
|
||||
@Override
|
||||
public String getFilteredName(String oldFilename) {
|
||||
return oldFilename + ".jpg";
|
||||
@@ -64,12 +68,18 @@ public class PDFBoxThumbnail extends MediaFilter {
|
||||
@Override
|
||||
public InputStream getDestinationStream(Item currentItem, InputStream source, boolean verbose)
|
||||
throws Exception {
|
||||
PDDocument doc = PDDocument.load(source);
|
||||
PDFRenderer renderer = new PDFRenderer(doc);
|
||||
BufferedImage buf = renderer.renderImage(0);
|
||||
// ImageIO.write(buf, "PNG", new File("custom-render.png"));
|
||||
doc.close();
|
||||
BufferedImage buf;
|
||||
|
||||
// Render the page image.
|
||||
try ( PDDocument doc = PDDocument.load(source); ) {
|
||||
PDFRenderer renderer = new PDFRenderer(doc);
|
||||
buf = renderer.renderImage(0);
|
||||
} catch (InvalidPasswordException ex) {
|
||||
log.error("PDF is encrypted. Cannot create thumbnail (item: {})", currentItem::getHandle);
|
||||
return null;
|
||||
}
|
||||
|
||||
// Generate thumbnail derivative and return as IO stream.
|
||||
JPEGFilter jpegFilter = new JPEGFilter();
|
||||
return jpegFilter.getThumb(currentItem, buf, verbose);
|
||||
}
|
||||
|
@@ -16,11 +16,13 @@ import java.io.InputStream;
|
||||
import java.io.OutputStreamWriter;
|
||||
import java.io.Writer;
|
||||
|
||||
import org.apache.log4j.Logger;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.apache.pdfbox.pdmodel.PDDocument;
|
||||
import org.apache.pdfbox.pdmodel.encryption.InvalidPasswordException;
|
||||
import org.apache.pdfbox.text.PDFTextStripper;
|
||||
import org.dspace.content.Item;
|
||||
import org.dspace.core.ConfigurationManager;
|
||||
import org.dspace.services.ConfigurationService;
|
||||
import org.dspace.services.factory.DSpaceServicesFactory;
|
||||
|
||||
/*
|
||||
*
|
||||
@@ -30,7 +32,7 @@ import org.dspace.core.ConfigurationManager;
|
||||
*/
|
||||
public class PDFFilter extends MediaFilter {
|
||||
|
||||
private static Logger log = Logger.getLogger(PDFFilter.class);
|
||||
private static Logger log = org.apache.logging.log4j.LogManager.getLogger(PDFFilter.class);
|
||||
|
||||
@Override
|
||||
public String getFilteredName(String oldFilename) {
|
||||
@@ -71,8 +73,10 @@ public class PDFFilter extends MediaFilter {
|
||||
@Override
|
||||
public InputStream getDestinationStream(Item currentItem, InputStream source, boolean verbose)
|
||||
throws Exception {
|
||||
ConfigurationService configurationService
|
||||
= DSpaceServicesFactory.getInstance().getConfigurationService();
|
||||
try {
|
||||
boolean useTemporaryFile = ConfigurationManager.getBooleanProperty("pdffilter.largepdfs", false);
|
||||
boolean useTemporaryFile = configurationService.getBooleanProperty("pdffilter.largepdfs", false);
|
||||
|
||||
// get input stream from bitstream
|
||||
// pass to filter, get string back
|
||||
@@ -95,6 +99,10 @@ public class PDFFilter extends MediaFilter {
|
||||
try {
|
||||
pdfDoc = PDDocument.load(source);
|
||||
pts.writeText(pdfDoc, writer);
|
||||
} catch (InvalidPasswordException ex) {
|
||||
log.error("PDF is encrypted. Cannot extract text (item: {})",
|
||||
() -> currentItem.getHandle());
|
||||
return null;
|
||||
} finally {
|
||||
try {
|
||||
if (pdfDoc != null) {
|
||||
@@ -119,7 +127,7 @@ public class PDFFilter extends MediaFilter {
|
||||
}
|
||||
} catch (OutOfMemoryError oome) {
|
||||
log.error("Error parsing PDF document " + oome.getMessage(), oome);
|
||||
if (!ConfigurationManager.getBooleanProperty("pdffilter.skiponmemoryexception", false)) {
|
||||
if (!configurationService.getBooleanProperty("pdffilter.skiponmemoryexception", false)) {
|
||||
throw oome;
|
||||
}
|
||||
}
|
||||
|
@@ -10,7 +10,7 @@ package org.dspace.app.mediafilter;
|
||||
import java.io.ByteArrayInputStream;
|
||||
import java.io.InputStream;
|
||||
|
||||
import org.apache.log4j.Logger;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.apache.poi.POITextExtractor;
|
||||
import org.apache.poi.extractor.ExtractorFactory;
|
||||
import org.apache.poi.hslf.extractor.PowerPointExtractor;
|
||||
@@ -23,7 +23,7 @@ import org.dspace.content.Item;
|
||||
*/
|
||||
public class PowerPointFilter extends MediaFilter {
|
||||
|
||||
private static Logger log = Logger.getLogger(PowerPointFilter.class);
|
||||
private static Logger log = org.apache.logging.log4j.LogManager.getLogger(PowerPointFilter.class);
|
||||
|
||||
@Override
|
||||
public String getFilteredName(String oldFilename) {
|
||||
|
@@ -1,93 +0,0 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.app.mediafilter;
|
||||
|
||||
import java.io.ByteArrayInputStream;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
|
||||
import org.apache.log4j.Logger;
|
||||
import org.dspace.content.Item;
|
||||
import org.textmining.extraction.TextExtractor;
|
||||
import org.textmining.extraction.word.WordTextExtractorFactory;
|
||||
|
||||
/*
|
||||
*
|
||||
* to do: helpful error messages - can't find mediafilter.cfg - can't
|
||||
* instantiate filter - bitstream format doesn't exist.
|
||||
*
|
||||
*/
|
||||
public class WordFilter extends MediaFilter {
|
||||
|
||||
private static Logger log = Logger.getLogger(WordFilter.class);
|
||||
|
||||
@Override
|
||||
public String getFilteredName(String oldFilename) {
|
||||
return oldFilename + ".txt";
|
||||
}
|
||||
|
||||
/**
|
||||
* @return String bundle name
|
||||
*/
|
||||
@Override
|
||||
public String getBundleName() {
|
||||
return "TEXT";
|
||||
}
|
||||
|
||||
/**
|
||||
* @return String bitstreamformat
|
||||
*/
|
||||
@Override
|
||||
public String getFormatString() {
|
||||
return "Text";
|
||||
}
|
||||
|
||||
/**
|
||||
* @return String description
|
||||
*/
|
||||
@Override
|
||||
public String getDescription() {
|
||||
return "Extracted text";
|
||||
}
|
||||
|
||||
/**
|
||||
* @param currentItem item
|
||||
* @param source source input stream
|
||||
* @param verbose verbose mode
|
||||
* @return InputStream the resulting input stream
|
||||
* @throws Exception if error
|
||||
*/
|
||||
@Override
|
||||
public InputStream getDestinationStream(Item currentItem, InputStream source, boolean verbose)
|
||||
throws Exception {
|
||||
// get input stream from bitstream
|
||||
// pass to filter, get string back
|
||||
try {
|
||||
WordTextExtractorFactory factory = new WordTextExtractorFactory();
|
||||
TextExtractor e = factory.textExtractor(source);
|
||||
String extractedText = e.getText();
|
||||
|
||||
// if verbose flag is set, print out extracted text
|
||||
// to STDOUT
|
||||
if (verbose) {
|
||||
System.out.println(extractedText);
|
||||
}
|
||||
|
||||
// generate an input stream with the extracted text
|
||||
byte[] textBytes = extractedText.getBytes();
|
||||
ByteArrayInputStream bais = new ByteArrayInputStream(textBytes);
|
||||
|
||||
return bais; // will this work? or will the byte array be out of scope?
|
||||
} catch (IOException ioe) {
|
||||
System.out.println("Invalid Word Format");
|
||||
log.error("Error detected - Word File format not recognized: "
|
||||
+ ioe.getMessage(), ioe);
|
||||
throw ioe;
|
||||
}
|
||||
}
|
||||
}
|
@@ -17,9 +17,9 @@ import java.util.List;
|
||||
|
||||
import org.apache.commons.cli.CommandLine;
|
||||
import org.apache.commons.cli.CommandLineParser;
|
||||
import org.apache.commons.cli.DefaultParser;
|
||||
import org.apache.commons.cli.HelpFormatter;
|
||||
import org.apache.commons.cli.Options;
|
||||
import org.apache.commons.cli.PosixParser;
|
||||
import org.dspace.authorize.AuthorizeException;
|
||||
import org.dspace.content.DSpaceObject;
|
||||
import org.dspace.content.crosswalk.CrosswalkException;
|
||||
@@ -175,7 +175,7 @@ public class Packager {
|
||||
"flag can be used if you want to save (pipe) a report of all changes to a file, and " +
|
||||
"therefore need to bypass all user interaction.");
|
||||
|
||||
CommandLineParser parser = new PosixParser();
|
||||
CommandLineParser parser = new DefaultParser();
|
||||
CommandLine line = parser.parse(options, argv);
|
||||
|
||||
String sourceFile = null;
|
||||
|
@@ -78,7 +78,7 @@ public class RequestItem implements ReloadableEntity<Integer> {
|
||||
private Date request_date = null;
|
||||
|
||||
@Column(name = "accept_request")
|
||||
private Boolean accept_request = null;
|
||||
private boolean accept_request;
|
||||
|
||||
/**
|
||||
* Protected constructor, create object using:
|
||||
@@ -88,6 +88,7 @@ public class RequestItem implements ReloadableEntity<Integer> {
|
||||
protected RequestItem() {
|
||||
}
|
||||
|
||||
@Override
|
||||
public Integer getID() {
|
||||
return requestitem_id;
|
||||
}
|
||||
|
@@ -19,6 +19,15 @@ import org.dspace.core.Context;
|
||||
* @author Andrea Bollini
|
||||
*/
|
||||
public interface RequestItemAuthorExtractor {
|
||||
public RequestItemAuthor getRequestItemAuthor(Context context, Item item)
|
||||
throws SQLException;
|
||||
|
||||
/**
|
||||
* Retrieve the auhtor to contact for a request copy of the give item.
|
||||
*
|
||||
* @param context DSpace context object
|
||||
* @param item item to request
|
||||
* @return An object containing name an email address to send the request to
|
||||
* or null if no valid email address was found.
|
||||
* @throws SQLException if database error
|
||||
*/
|
||||
public RequestItemAuthor getRequestItemAuthor(Context context, Item item) throws SQLException;
|
||||
}
|
||||
|
@@ -9,14 +9,14 @@ package org.dspace.app.requestitem;
|
||||
|
||||
import java.sql.SQLException;
|
||||
|
||||
import org.apache.commons.lang.StringUtils;
|
||||
import org.apache.log4j.Logger;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.dspace.content.Item;
|
||||
import org.dspace.core.ConfigurationManager;
|
||||
import org.dspace.core.Context;
|
||||
import org.dspace.core.I18nUtil;
|
||||
import org.dspace.eperson.EPerson;
|
||||
import org.dspace.eperson.service.EPersonService;
|
||||
import org.dspace.services.ConfigurationService;
|
||||
import org.dspace.services.factory.DSpaceServicesFactory;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
|
||||
/**
|
||||
@@ -30,9 +30,6 @@ import org.springframework.beans.factory.annotation.Autowired;
|
||||
* @author Peter Dietz
|
||||
*/
|
||||
public class RequestItemHelpdeskStrategy extends RequestItemSubmitterStrategy {
|
||||
|
||||
private Logger log = Logger.getLogger(RequestItemHelpdeskStrategy.class);
|
||||
|
||||
@Autowired(required = true)
|
||||
protected EPersonService ePersonService;
|
||||
|
||||
@@ -41,9 +38,11 @@ public class RequestItemHelpdeskStrategy extends RequestItemSubmitterStrategy {
|
||||
|
||||
@Override
|
||||
public RequestItemAuthor getRequestItemAuthor(Context context, Item item) throws SQLException {
|
||||
boolean helpdeskOverridesSubmitter = ConfigurationManager
|
||||
ConfigurationService configurationService
|
||||
= DSpaceServicesFactory.getInstance().getConfigurationService();
|
||||
boolean helpdeskOverridesSubmitter = configurationService
|
||||
.getBooleanProperty("request.item.helpdesk.override", false);
|
||||
String helpDeskEmail = ConfigurationManager.getProperty("mail.helpdesk");
|
||||
String helpDeskEmail = configurationService.getProperty("mail.helpdesk");
|
||||
|
||||
if (helpdeskOverridesSubmitter && StringUtils.isNotBlank(helpDeskEmail)) {
|
||||
return getHelpDeskPerson(context, helpDeskEmail);
|
||||
@@ -64,18 +63,16 @@ public class RequestItemHelpdeskStrategy extends RequestItemSubmitterStrategy {
|
||||
* @throws SQLException if database error
|
||||
*/
|
||||
public RequestItemAuthor getHelpDeskPerson(Context context, String helpDeskEmail) throws SQLException {
|
||||
EPerson helpdeskEPerson = null;
|
||||
|
||||
context.turnOffAuthorisationSystem();
|
||||
helpdeskEPerson = ePersonService.findByEmail(context, helpDeskEmail);
|
||||
EPerson helpdeskEPerson = ePersonService.findByEmail(context, helpDeskEmail);
|
||||
context.restoreAuthSystemState();
|
||||
|
||||
if (helpdeskEPerson != null) {
|
||||
return new RequestItemAuthor(helpdeskEPerson);
|
||||
} else {
|
||||
String helpdeskName = I18nUtil.getMessage(
|
||||
"org.dspace.app.requestitem.RequestItemHelpdeskStrategy.helpdeskname",
|
||||
context);
|
||||
"org.dspace.app.requestitem.RequestItemHelpdeskStrategy.helpdeskname",
|
||||
context);
|
||||
return new RequestItemAuthor(helpdeskName, helpDeskEmail);
|
||||
}
|
||||
}
|
||||
|
@@ -10,12 +10,13 @@ package org.dspace.app.requestitem;
|
||||
import java.sql.SQLException;
|
||||
import java.util.List;
|
||||
|
||||
import org.apache.commons.lang.StringUtils;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.dspace.content.Item;
|
||||
import org.dspace.content.MetadataValue;
|
||||
import org.dspace.content.service.ItemService;
|
||||
import org.dspace.core.Context;
|
||||
import org.dspace.core.I18nUtil;
|
||||
import org.dspace.services.factory.DSpaceServicesFactory;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
|
||||
/**
|
||||
@@ -38,6 +39,7 @@ public class RequestItemMetadataStrategy extends RequestItemSubmitterStrategy {
|
||||
@Override
|
||||
public RequestItemAuthor getRequestItemAuthor(Context context, Item item)
|
||||
throws SQLException {
|
||||
RequestItemAuthor author = null;
|
||||
if (emailMetadata != null) {
|
||||
List<MetadataValue> vals = itemService.getMetadataByMetadataString(item, emailMetadata);
|
||||
if (vals.size() > 0) {
|
||||
@@ -49,19 +51,38 @@ public class RequestItemMetadataStrategy extends RequestItemSubmitterStrategy {
|
||||
fullname = nameVals.iterator().next().getValue();
|
||||
}
|
||||
}
|
||||
|
||||
if (StringUtils.isBlank(fullname)) {
|
||||
fullname = I18nUtil
|
||||
.getMessage(
|
||||
"org.dspace.app.requestitem.RequestItemMetadataStrategy.unnamed",
|
||||
context);
|
||||
.getMessage(
|
||||
"org.dspace.app.requestitem.RequestItemMetadataStrategy.unnamed",
|
||||
context);
|
||||
}
|
||||
RequestItemAuthor author = new RequestItemAuthor(
|
||||
fullname, email);
|
||||
author = new RequestItemAuthor(fullname, email);
|
||||
return author;
|
||||
}
|
||||
} else {
|
||||
// Uses the basic strategy to look for the original submitter
|
||||
author = super.getRequestItemAuthor(context, item);
|
||||
// Is the author or his email null, so get the help desk or admin name and email
|
||||
if (null == author || null == author.getEmail()) {
|
||||
String email = null;
|
||||
String name = null;
|
||||
//First get help desk name and email
|
||||
email = DSpaceServicesFactory.getInstance()
|
||||
.getConfigurationService().getProperty("mail.helpdesk");
|
||||
name = DSpaceServicesFactory.getInstance()
|
||||
.getConfigurationService().getProperty("mail.helpdesk.name");
|
||||
// If help desk mail is null get the mail and name of admin
|
||||
if (email == null) {
|
||||
email = DSpaceServicesFactory.getInstance()
|
||||
.getConfigurationService().getProperty("mail.admin");
|
||||
name = DSpaceServicesFactory.getInstance()
|
||||
.getConfigurationService().getProperty("mail.admin.name");
|
||||
}
|
||||
author = new RequestItemAuthor(name, email);
|
||||
}
|
||||
}
|
||||
return super.getRequestItemAuthor(context, item);
|
||||
return author;
|
||||
}
|
||||
|
||||
public void setEmailMetadata(String emailMetadata) {
|
||||
|
@@ -10,7 +10,7 @@ package org.dspace.app.requestitem;
|
||||
import java.sql.SQLException;
|
||||
import java.util.Date;
|
||||
|
||||
import org.apache.log4j.Logger;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.dspace.app.requestitem.dao.RequestItemDAO;
|
||||
import org.dspace.app.requestitem.service.RequestItemService;
|
||||
import org.dspace.content.Bitstream;
|
||||
@@ -28,7 +28,7 @@ import org.springframework.beans.factory.annotation.Autowired;
|
||||
*/
|
||||
public class RequestItemServiceImpl implements RequestItemService {
|
||||
|
||||
private final Logger log = Logger.getLogger(RequestItemServiceImpl.class);
|
||||
private final Logger log = org.apache.logging.log4j.LogManager.getLogger(RequestItemServiceImpl.class);
|
||||
|
||||
@Autowired(required = true)
|
||||
protected RequestItemDAO requestItemDAO;
|
||||
|
@@ -23,13 +23,22 @@ public class RequestItemSubmitterStrategy implements RequestItemAuthorExtractor
|
||||
public RequestItemSubmitterStrategy() {
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the submitter of an Item as RequestItemAuthor or null if the
|
||||
* Submitter is deleted.
|
||||
*
|
||||
* @return The submitter of the item or null if the submitter is deleted
|
||||
* @throws SQLException if database error
|
||||
*/
|
||||
@Override
|
||||
public RequestItemAuthor getRequestItemAuthor(Context context, Item item)
|
||||
throws SQLException {
|
||||
EPerson submitter = item.getSubmitter();
|
||||
RequestItemAuthor author = new RequestItemAuthor(
|
||||
submitter.getFullName(), submitter.getEmail());
|
||||
RequestItemAuthor author = null;
|
||||
if (null != submitter) {
|
||||
author = new RequestItemAuthor(
|
||||
submitter.getFullName(), submitter.getEmail());
|
||||
}
|
||||
return author;
|
||||
}
|
||||
|
||||
}
|
||||
|
@@ -8,13 +8,15 @@
|
||||
package org.dspace.app.requestitem.dao.impl;
|
||||
|
||||
import java.sql.SQLException;
|
||||
import javax.persistence.criteria.CriteriaBuilder;
|
||||
import javax.persistence.criteria.CriteriaQuery;
|
||||
import javax.persistence.criteria.Root;
|
||||
|
||||
import org.dspace.app.requestitem.RequestItem;
|
||||
import org.dspace.app.requestitem.RequestItem_;
|
||||
import org.dspace.app.requestitem.dao.RequestItemDAO;
|
||||
import org.dspace.core.AbstractHibernateDAO;
|
||||
import org.dspace.core.Context;
|
||||
import org.hibernate.Criteria;
|
||||
import org.hibernate.criterion.Restrictions;
|
||||
|
||||
/**
|
||||
* Hibernate implementation of the Database Access Object interface class for the RequestItem object.
|
||||
@@ -30,9 +32,12 @@ public class RequestItemDAOImpl extends AbstractHibernateDAO<RequestItem> implem
|
||||
|
||||
@Override
|
||||
public RequestItem findByToken(Context context, String token) throws SQLException {
|
||||
Criteria criteria = createCriteria(context, RequestItem.class);
|
||||
criteria.add(Restrictions.eq("token", token));
|
||||
return uniqueResult(criteria);
|
||||
CriteriaBuilder criteriaBuilder = getCriteriaBuilder(context);
|
||||
CriteriaQuery criteriaQuery = getCriteriaQuery(criteriaBuilder, RequestItem.class);
|
||||
Root<RequestItem> requestItemRoot = criteriaQuery.from(RequestItem.class);
|
||||
criteriaQuery.select(requestItemRoot);
|
||||
criteriaQuery.where(criteriaBuilder.equal(requestItemRoot.get(RequestItem_.token), token));
|
||||
return uniqueResult(context, criteriaQuery, false, RequestItem.class, -1, -1);
|
||||
}
|
||||
|
||||
|
||||
|
@@ -15,8 +15,8 @@ import javax.xml.parsers.DocumentBuilder;
|
||||
import javax.xml.parsers.DocumentBuilderFactory;
|
||||
import javax.xml.parsers.ParserConfigurationException;
|
||||
|
||||
import org.apache.commons.lang.StringUtils;
|
||||
import org.apache.log4j.Logger;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.dspace.app.sfx.service.SFXFileReaderService;
|
||||
import org.dspace.content.DCPersonName;
|
||||
import org.dspace.content.Item;
|
||||
@@ -58,7 +58,7 @@ public class SFXFileReaderServiceImpl implements SFXFileReaderService {
|
||||
/**
|
||||
* log4j logger
|
||||
*/
|
||||
private final Logger log = Logger.getLogger(SFXFileReaderServiceImpl.class);
|
||||
private final Logger log = org.apache.logging.log4j.LogManager.getLogger(SFXFileReaderServiceImpl.class);
|
||||
|
||||
protected SFXFileReaderServiceImpl() {
|
||||
}
|
||||
|
@@ -8,9 +8,7 @@
|
||||
|
||||
/**
|
||||
* <p>SFX/OpenURL link server support.</p>
|
||||
*
|
||||
* @see org.dspace.app.webui.jsptag.SFXLinkTag
|
||||
* @see org.dspace.app.xmlui.aspect.artifactbrowser.ItemViewer
|
||||
*
|
||||
*/
|
||||
|
||||
package org.dspace.app.sfx;
|
||||
|
@@ -1,49 +0,0 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.app.sherpa;
|
||||
|
||||
/**
|
||||
* POJO representation for a SHERPA journal
|
||||
*
|
||||
* @author Andrea Bollini
|
||||
*/
|
||||
public class SHERPAJournal {
|
||||
private String title;
|
||||
|
||||
private String issn;
|
||||
|
||||
private String zetopub;
|
||||
|
||||
private String romeopub;
|
||||
|
||||
public SHERPAJournal(String title, String issn, String zetopub,
|
||||
String romeopub) {
|
||||
super();
|
||||
this.title = title;
|
||||
this.issn = issn;
|
||||
this.zetopub = zetopub;
|
||||
this.romeopub = romeopub;
|
||||
}
|
||||
|
||||
public String getTitle() {
|
||||
return title;
|
||||
}
|
||||
|
||||
public String getIssn() {
|
||||
return issn;
|
||||
}
|
||||
|
||||
public String getZetopub() {
|
||||
return zetopub;
|
||||
}
|
||||
|
||||
public String getRomeopub() {
|
||||
return romeopub;
|
||||
}
|
||||
|
||||
}
|
@@ -1,163 +0,0 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.app.sherpa;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
* POJO representation for a SHERPA Publisher record
|
||||
*
|
||||
* @author Andrea Bollini
|
||||
*/
|
||||
public class SHERPAPublisher {
|
||||
private String name;
|
||||
|
||||
private String alias;
|
||||
|
||||
private String homeurl;
|
||||
|
||||
private String prearchiving;
|
||||
|
||||
private List<String> prerestriction;
|
||||
|
||||
private String postarchiving;
|
||||
|
||||
private List<String> postrestriction;
|
||||
|
||||
private String pubarchiving;
|
||||
|
||||
private List<String> pubrestriction;
|
||||
|
||||
private List<String> condition;
|
||||
|
||||
private String paidaccessurl;
|
||||
|
||||
private String paidaccessname;
|
||||
|
||||
private String paidaccessnotes;
|
||||
|
||||
private List<String[]> copyright;
|
||||
|
||||
private String romeocolour;
|
||||
|
||||
private String dateadded;
|
||||
|
||||
private String dateupdated;
|
||||
|
||||
public SHERPAPublisher(String name, String alias, String homeurl,
|
||||
String prearchiving, List<String> prerestriction,
|
||||
String postarchiving, List<String> postrestriction,
|
||||
String pubarchiving, List<String> pubrestriction,
|
||||
List<String> condition, String paidaccessurl,
|
||||
String paidaccessname, String paidaccessnotes,
|
||||
List<String[]> copyright, String romeocolour, String datedded,
|
||||
String dateupdated) {
|
||||
this.name = name;
|
||||
|
||||
this.alias = alias;
|
||||
|
||||
this.homeurl = homeurl;
|
||||
|
||||
this.prearchiving = prearchiving;
|
||||
|
||||
this.prerestriction = prerestriction;
|
||||
|
||||
this.postarchiving = postarchiving;
|
||||
|
||||
this.postrestriction = postrestriction;
|
||||
|
||||
this.pubarchiving = pubarchiving;
|
||||
|
||||
this.pubrestriction = pubrestriction;
|
||||
|
||||
this.condition = condition;
|
||||
|
||||
this.paidaccessurl = paidaccessurl;
|
||||
|
||||
this.paidaccessname = paidaccessname;
|
||||
|
||||
this.paidaccessnotes = paidaccessnotes;
|
||||
|
||||
this.copyright = copyright;
|
||||
|
||||
this.romeocolour = romeocolour;
|
||||
|
||||
this.dateadded = datedded;
|
||||
|
||||
this.dateupdated = dateupdated;
|
||||
}
|
||||
|
||||
public String getName() {
|
||||
return name;
|
||||
}
|
||||
|
||||
public String getAlias() {
|
||||
return alias;
|
||||
}
|
||||
|
||||
public String getHomeurl() {
|
||||
return homeurl;
|
||||
}
|
||||
|
||||
public String getPrearchiving() {
|
||||
return prearchiving;
|
||||
}
|
||||
|
||||
public List<String> getPrerestriction() {
|
||||
return prerestriction;
|
||||
}
|
||||
|
||||
public String getPostarchiving() {
|
||||
return postarchiving;
|
||||
}
|
||||
|
||||
public List<String> getPostrestriction() {
|
||||
return postrestriction;
|
||||
}
|
||||
|
||||
public String getPubarchiving() {
|
||||
return pubarchiving;
|
||||
}
|
||||
|
||||
public List<String> getPubrestriction() {
|
||||
return pubrestriction;
|
||||
}
|
||||
|
||||
public List<String> getCondition() {
|
||||
return condition;
|
||||
}
|
||||
|
||||
public String getPaidaccessurl() {
|
||||
return paidaccessurl;
|
||||
}
|
||||
|
||||
public String getPaidaccessname() {
|
||||
return paidaccessname;
|
||||
}
|
||||
|
||||
public String getPaidaccessnotes() {
|
||||
return paidaccessnotes;
|
||||
}
|
||||
|
||||
public List<String[]> getCopyright() {
|
||||
return copyright;
|
||||
}
|
||||
|
||||
public String getRomeocolour() {
|
||||
return romeocolour;
|
||||
}
|
||||
|
||||
public String getDatedded() {
|
||||
return dateadded;
|
||||
}
|
||||
|
||||
public String getDateupdated() {
|
||||
return dateupdated;
|
||||
}
|
||||
|
||||
}
|
@@ -1,201 +0,0 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.app.sherpa;
|
||||
|
||||
import java.io.InputStream;
|
||||
import java.util.LinkedList;
|
||||
import java.util.List;
|
||||
import javax.xml.parsers.DocumentBuilder;
|
||||
import javax.xml.parsers.DocumentBuilderFactory;
|
||||
|
||||
import org.apache.commons.lang.StringUtils;
|
||||
import org.dspace.app.util.XMLUtils;
|
||||
import org.w3c.dom.Document;
|
||||
import org.w3c.dom.Element;
|
||||
|
||||
/**
|
||||
* JAVA representation for a SHERPA API Response
|
||||
*
|
||||
* @author Andrea Bollini
|
||||
*/
|
||||
public class SHERPAResponse {
|
||||
private boolean error;
|
||||
|
||||
private String message;
|
||||
|
||||
private String license;
|
||||
|
||||
private String licenseURL;
|
||||
|
||||
private String disclaimer;
|
||||
|
||||
private List<SHERPAJournal> journals;
|
||||
|
||||
private List<SHERPAPublisher> publishers;
|
||||
|
||||
public SHERPAResponse(InputStream xmlData) {
|
||||
try {
|
||||
DocumentBuilderFactory factory = DocumentBuilderFactory
|
||||
.newInstance();
|
||||
factory.setValidating(false);
|
||||
factory.setIgnoringComments(true);
|
||||
factory.setIgnoringElementContentWhitespace(true);
|
||||
|
||||
DocumentBuilder db = factory.newDocumentBuilder();
|
||||
Document inDoc = db.parse(xmlData);
|
||||
|
||||
Element xmlRoot = inDoc.getDocumentElement();
|
||||
Element headersElement = XMLUtils.getSingleElement(xmlRoot,
|
||||
"header");
|
||||
Element journalsElement = XMLUtils.getSingleElement(xmlRoot,
|
||||
"journals");
|
||||
Element publishersElement = XMLUtils.getSingleElement(xmlRoot,
|
||||
"publishers");
|
||||
|
||||
message = XMLUtils.getElementValue(headersElement, "message");
|
||||
|
||||
if (StringUtils.isNotBlank(message)) {
|
||||
error = true;
|
||||
return;
|
||||
}
|
||||
|
||||
license = XMLUtils.getElementValue(headersElement, "license");
|
||||
licenseURL = XMLUtils.getElementValue(headersElement, "licenseurl");
|
||||
disclaimer = XMLUtils.getElementValue(headersElement, "disclaimer");
|
||||
|
||||
List<Element> journalsList = XMLUtils.getElementList(
|
||||
journalsElement, "journal");
|
||||
List<Element> publishersList = XMLUtils.getElementList(
|
||||
publishersElement, "publisher");
|
||||
|
||||
if (journalsList != null) {
|
||||
journals = new LinkedList<SHERPAJournal>();
|
||||
for (Element journalElement : journalsList) {
|
||||
journals.add(new SHERPAJournal(
|
||||
XMLUtils.getElementValue(journalElement, "jtitle"),
|
||||
XMLUtils.getElementValue(journalElement, "issn"),
|
||||
XMLUtils.getElementValue(journalElement, "zetopub"),
|
||||
XMLUtils.getElementValue(journalElement, "romeopub")));
|
||||
}
|
||||
}
|
||||
|
||||
if (publishersList != null) {
|
||||
publishers = new LinkedList<SHERPAPublisher>();
|
||||
for (Element publisherElement : publishersList) {
|
||||
Element preprintsElement = XMLUtils.getSingleElement(
|
||||
publisherElement, "preprints");
|
||||
Element preprintsRestrictionElement = XMLUtils
|
||||
.getSingleElement(publisherElement,
|
||||
"prerestrictions");
|
||||
|
||||
Element postprintsElement = XMLUtils.getSingleElement(
|
||||
publisherElement, "postprints");
|
||||
Element postprintsRestrictionElement = XMLUtils
|
||||
.getSingleElement(publisherElement,
|
||||
"postrestrictions");
|
||||
|
||||
Element pdfversionElement = XMLUtils.getSingleElement(
|
||||
publisherElement, "pdfversion");
|
||||
Element pdfversionRestrictionElement = XMLUtils
|
||||
.getSingleElement(publisherElement,
|
||||
"pdfrestrictions");
|
||||
|
||||
Element conditionsElement = XMLUtils.getSingleElement(
|
||||
publisherElement, "conditions");
|
||||
Element paidaccessElement = XMLUtils.getSingleElement(
|
||||
publisherElement, "paidaccess");
|
||||
|
||||
Element copyrightlinksElement = XMLUtils.getSingleElement(
|
||||
publisherElement, "copyrightlinks");
|
||||
|
||||
publishers
|
||||
.add(new SHERPAPublisher(XMLUtils.getElementValue(
|
||||
publisherElement, "name"),
|
||||
XMLUtils.getElementValue(publisherElement,
|
||||
"alias"), XMLUtils.getElementValue(
|
||||
publisherElement, "homeurl"),
|
||||
|
||||
XMLUtils.getElementValue(preprintsElement,
|
||||
"prearchiving"),
|
||||
XMLUtils.getElementValueList(
|
||||
preprintsRestrictionElement,
|
||||
"prerestriction"),
|
||||
|
||||
XMLUtils.getElementValue(postprintsElement,
|
||||
"postarchiving"),
|
||||
XMLUtils.getElementValueList(
|
||||
postprintsRestrictionElement,
|
||||
"postrestriction"),
|
||||
|
||||
XMLUtils.getElementValue(pdfversionElement,
|
||||
"pdfarchiving"),
|
||||
XMLUtils.getElementValueList(
|
||||
pdfversionRestrictionElement,
|
||||
"pdfrestriction"),
|
||||
|
||||
XMLUtils
|
||||
.getElementValueList(
|
||||
conditionsElement,
|
||||
"condition"), XMLUtils
|
||||
.getElementValue(paidaccessElement,
|
||||
"paidaccessurl"), XMLUtils
|
||||
.getElementValue(paidaccessElement,
|
||||
"paidaccessname"), XMLUtils
|
||||
.getElementValue(paidaccessElement,
|
||||
"paidaccessnotes"),
|
||||
XMLUtils.getElementValueArrayList(
|
||||
copyrightlinksElement,
|
||||
"copyrightlink",
|
||||
"copyrightlinktext",
|
||||
"copyrightlinkurl"), XMLUtils
|
||||
.getElementValue(publisherElement,
|
||||
"romeocolour"), XMLUtils
|
||||
.getElementValue(publisherElement,
|
||||
"dateadded"), XMLUtils
|
||||
.getElementValue(publisherElement,
|
||||
"dateupdated")));
|
||||
}
|
||||
}
|
||||
} catch (Exception e) {
|
||||
error = true;
|
||||
}
|
||||
}
|
||||
|
||||
public SHERPAResponse(String message) {
|
||||
this.message = message;
|
||||
this.error = true;
|
||||
}
|
||||
|
||||
public boolean isError() {
|
||||
return error;
|
||||
}
|
||||
|
||||
public String getMessage() {
|
||||
return message;
|
||||
}
|
||||
|
||||
public String getLicense() {
|
||||
return license;
|
||||
}
|
||||
|
||||
public String getLicenseURL() {
|
||||
return licenseURL;
|
||||
}
|
||||
|
||||
public String getDisclaimer() {
|
||||
return disclaimer;
|
||||
}
|
||||
|
||||
public List<SHERPAJournal> getJournals() {
|
||||
return journals;
|
||||
}
|
||||
|
||||
public List<SHERPAPublisher> getPublishers() {
|
||||
return publishers;
|
||||
}
|
||||
}
|
@@ -7,7 +7,15 @@
|
||||
*/
|
||||
package org.dspace.app.sherpa;
|
||||
|
||||
import org.apache.commons.lang.StringUtils;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.net.URI;
|
||||
import java.net.URISyntaxException;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import javax.annotation.PostConstruct;
|
||||
|
||||
import org.apache.commons.io.IOUtils;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.apache.http.HttpEntity;
|
||||
import org.apache.http.HttpResponse;
|
||||
import org.apache.http.HttpStatus;
|
||||
@@ -16,21 +24,42 @@ import org.apache.http.client.methods.HttpGet;
|
||||
import org.apache.http.client.utils.URIBuilder;
|
||||
import org.apache.http.impl.client.CloseableHttpClient;
|
||||
import org.apache.http.impl.client.HttpClientBuilder;
|
||||
import org.apache.log4j.Logger;
|
||||
import org.dspace.core.ConfigurationManager;
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.dspace.app.sherpa.v2.SHERPAPublisherResponse;
|
||||
import org.dspace.app.sherpa.v2.SHERPAResponse;
|
||||
import org.dspace.app.sherpa.v2.SHERPAUtils;
|
||||
import org.dspace.services.ConfigurationService;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
|
||||
/**
|
||||
* SHERPAService is responsible for making the HTTP call to the SHERPA v2 API
|
||||
* for SHERPASubmitService.
|
||||
* Note, this service is ported from DSpace 6 for the ability to search policies by ISSN
|
||||
* There are also new DataProvider implementations provided for use as 'external sources'
|
||||
* of journal and publisher data
|
||||
* @see org.dspace.external.provider.impl.SHERPAv2JournalDataProvider
|
||||
* @see org.dspace.external.provider.impl.SHERPAv2PublisherDataProvider
|
||||
* @author Kim Shepherd
|
||||
*/
|
||||
public class SHERPAService {
|
||||
private CloseableHttpClient client = null;
|
||||
|
||||
private int maxNumberOfTries;
|
||||
private long sleepBetweenTimeouts;
|
||||
private int timeout = 5000;
|
||||
private String endpoint = null;
|
||||
private String apiKey = null;
|
||||
|
||||
/** log4j category */
|
||||
private static final Logger log = LogManager.getLogger(SHERPAService.class);
|
||||
|
||||
@Autowired
|
||||
ConfigurationService configurationService;
|
||||
|
||||
/**
|
||||
* log4j category
|
||||
* Create a new HTTP builder with sensible defaults in constructor
|
||||
*/
|
||||
private static final Logger log = Logger.getLogger(SHERPAService.class);
|
||||
|
||||
public SHERPAService() {
|
||||
HttpClientBuilder builder = HttpClientBuilder.create();
|
||||
// httpclient 4.3+ doesn't appear to have any sensible defaults any more. Setting conservative defaults as
|
||||
@@ -41,62 +70,117 @@ public class SHERPAService {
|
||||
.build();
|
||||
}
|
||||
|
||||
/**
|
||||
* Complete initialization of the Bean.
|
||||
*/
|
||||
@PostConstruct
|
||||
private void init() {
|
||||
// Get endoint and API key from configuration
|
||||
endpoint = configurationService.getProperty("sherpa.romeo.url",
|
||||
"https://v2.sherpa.ac.uk/cgi/retrieve");
|
||||
apiKey = configurationService.getProperty("sherpa.romeo.apikey");
|
||||
}
|
||||
|
||||
/**
|
||||
* Search the SHERPA v2 API for a journal policy data using the supplied ISSN.
|
||||
* If the API key is missing, or the HTTP response is non-OK or does not complete
|
||||
* successfully, a simple error response will be returned.
|
||||
* Otherwise, the response body will be passed to SHERPAResponse for parsing as JSON
|
||||
* and the final result returned to the calling method
|
||||
* @param query ISSN string to pass in an "issn equals" API query
|
||||
* @return SHERPAResponse containing an error or journal policies
|
||||
*/
|
||||
public SHERPAResponse searchByJournalISSN(String query) {
|
||||
String endpoint = ConfigurationManager.getProperty("sherpa.romeo.url");
|
||||
String apiKey = ConfigurationManager.getProperty("sherpa.romeo.apikey");
|
||||
return performRequest("publication", "issn", "equals", query, 0, 1);
|
||||
}
|
||||
|
||||
/**
|
||||
* Perform an API request to the SHERPA v2 API - this could be a search or a get for any entity type
|
||||
* but the return object here must be a SHERPAPublisherResponse not the journal-centric SHERPAResponse
|
||||
* For more information about the type, field and predicate arguments, see the SHERPA v2 API documentation
|
||||
* @param type entity type eg "publisher"
|
||||
* @param field field eg "issn" or "title"
|
||||
* @param predicate predicate eg "equals" or "contains-word"
|
||||
* @param value the actual value to search for (eg an ISSN or partial title)
|
||||
* @param start start / offset of search results
|
||||
* @param limit maximum search results to return
|
||||
* @return SHERPAPublisherResponse object
|
||||
*/
|
||||
public SHERPAPublisherResponse performPublisherRequest(String type, String field, String predicate, String value,
|
||||
int start, int limit) {
|
||||
// API Key is *required* for v2 API calls
|
||||
if (null == apiKey) {
|
||||
log.error("SHERPA ROMeO API Key missing: please register for an API key and set sherpa.romeo.apikey");
|
||||
return new SHERPAPublisherResponse("SHERPA/RoMEO configuration invalid or missing");
|
||||
}
|
||||
|
||||
HttpGet method = null;
|
||||
SHERPAResponse sherpaResponse = null;
|
||||
SHERPAPublisherResponse sherpaResponse = null;
|
||||
int numberOfTries = 0;
|
||||
|
||||
while (numberOfTries < maxNumberOfTries && sherpaResponse == null) {
|
||||
numberOfTries++;
|
||||
|
||||
if (log.isDebugEnabled()) {
|
||||
log.debug(String.format(
|
||||
"Trying to contact SHERPA/RoMEO - attempt %d of %d; timeout is %d; sleep between timeouts is %d",
|
||||
numberOfTries,
|
||||
maxNumberOfTries,
|
||||
timeout,
|
||||
sleepBetweenTimeouts));
|
||||
}
|
||||
log.debug(String.format(
|
||||
"Trying to contact SHERPA/RoMEO - attempt %d of %d; timeout is %d; sleep between timeouts is %d",
|
||||
numberOfTries,
|
||||
maxNumberOfTries,
|
||||
timeout,
|
||||
sleepBetweenTimeouts));
|
||||
|
||||
try {
|
||||
Thread.sleep(sleepBetweenTimeouts);
|
||||
|
||||
URIBuilder uriBuilder = new URIBuilder(endpoint);
|
||||
uriBuilder.addParameter("issn", query);
|
||||
uriBuilder.addParameter("versions", "all");
|
||||
if (StringUtils.isNotBlank(apiKey)) {
|
||||
uriBuilder.addParameter("ak", apiKey);
|
||||
}
|
||||
|
||||
method = new HttpGet(uriBuilder.build());
|
||||
method.setConfig(RequestConfig.custom()
|
||||
.setConnectionRequestTimeout(timeout)
|
||||
.setConnectTimeout(timeout)
|
||||
.setSocketTimeout(timeout)
|
||||
.build());
|
||||
// Execute the method.
|
||||
// Construct a default HTTP method (first result)
|
||||
method = constructHttpGet(type, field, predicate, value, start, limit);
|
||||
|
||||
// Execute the method
|
||||
HttpResponse response = client.execute(method);
|
||||
int statusCode = response.getStatusLine().getStatusCode();
|
||||
|
||||
log.debug(response.getStatusLine().getStatusCode() + ": "
|
||||
+ response.getStatusLine().getReasonPhrase());
|
||||
|
||||
if (statusCode != HttpStatus.SC_OK) {
|
||||
sherpaResponse = new SHERPAResponse("SHERPA/RoMEO return not OK status: "
|
||||
+ statusCode);
|
||||
sherpaResponse = new SHERPAPublisherResponse("SHERPA/RoMEO return not OK status: "
|
||||
+ statusCode);
|
||||
String errorBody = IOUtils.toString(response.getEntity().getContent(), StandardCharsets.UTF_8);
|
||||
log.error("Error from SHERPA HTTP request: " + errorBody);
|
||||
}
|
||||
|
||||
HttpEntity responseBody = response.getEntity();
|
||||
|
||||
// If the response body is valid, pass to SHERPAResponse for parsing as JSON
|
||||
if (null != responseBody) {
|
||||
sherpaResponse = new SHERPAResponse(responseBody.getContent());
|
||||
log.debug("Non-null SHERPA resonse received for query of " + value);
|
||||
InputStream content = null;
|
||||
try {
|
||||
content = responseBody.getContent();
|
||||
sherpaResponse =
|
||||
new SHERPAPublisherResponse(content, SHERPAPublisherResponse.SHERPAFormat.JSON);
|
||||
} catch (IOException e) {
|
||||
log.error("Encountered exception while contacting SHERPA/RoMEO: " + e.getMessage(), e);
|
||||
} finally {
|
||||
if (content != null) {
|
||||
content.close();
|
||||
}
|
||||
}
|
||||
} else {
|
||||
sherpaResponse = new SHERPAResponse("SHERPA/RoMEO returned no response");
|
||||
log.debug("Empty SHERPA response body for query on " + value);
|
||||
sherpaResponse = new SHERPAPublisherResponse("SHERPA/RoMEO returned no response");
|
||||
}
|
||||
} catch (Exception e) {
|
||||
log.warn("Encountered exception while contacting SHERPA/RoMEO: " + e.getMessage(), e);
|
||||
} catch (URISyntaxException e) {
|
||||
String errorMessage = "Error building SHERPA v2 API URI: " + e.getMessage();
|
||||
log.error(errorMessage, e);
|
||||
sherpaResponse = new SHERPAPublisherResponse(errorMessage);
|
||||
} catch (IOException e) {
|
||||
String errorMessage = "Encountered exception while contacting SHERPA/RoMEO: " + e.getMessage();
|
||||
log.error(errorMessage, e);
|
||||
sherpaResponse = new SHERPAPublisherResponse(errorMessage);
|
||||
} catch (InterruptedException e) {
|
||||
String errorMessage = "Encountered exception while sleeping thread: " + e.getMessage();
|
||||
log.error(errorMessage, e);
|
||||
sherpaResponse = new SHERPAPublisherResponse(errorMessage);
|
||||
} finally {
|
||||
if (method != null) {
|
||||
method.releaseConnection();
|
||||
@@ -105,13 +189,218 @@ public class SHERPAService {
|
||||
}
|
||||
|
||||
if (sherpaResponse == null) {
|
||||
log.debug("SHERPA response is still null");
|
||||
sherpaResponse = new SHERPAPublisherResponse(
|
||||
"Error processing the SHERPA/RoMEO answer");
|
||||
}
|
||||
|
||||
// Return the final response
|
||||
return sherpaResponse;
|
||||
}
|
||||
|
||||
/**
|
||||
* Perform an API request to the SHERPA v2 API - this could be a search or a get for any entity type
|
||||
* For more information about the type, field and predicate arguments, see the SHERPA v2 API documentation
|
||||
* @param type entity type eg "publication" or "publisher"
|
||||
* @param field field eg "issn" or "title"
|
||||
* @param predicate predicate eg "equals" or "contains-word"
|
||||
* @param value the actual value to search for (eg an ISSN or partial title)
|
||||
* @param start start / offset of search results
|
||||
* @param limit maximum search results to return
|
||||
* @return SHERPAResponse object
|
||||
*/
|
||||
public SHERPAResponse performRequest(String type, String field, String predicate, String value,
|
||||
int start, int limit) {
|
||||
// API Key is *required* for v2 API calls
|
||||
if (null == apiKey) {
|
||||
log.error("SHERPA ROMeO API Key missing: please register for an API key and set sherpa.romeo.apikey");
|
||||
return new SHERPAResponse("SHERPA/RoMEO configuration invalid or missing");
|
||||
}
|
||||
|
||||
HttpGet method = null;
|
||||
SHERPAResponse sherpaResponse = null;
|
||||
int numberOfTries = 0;
|
||||
|
||||
while (numberOfTries < maxNumberOfTries && sherpaResponse == null) {
|
||||
numberOfTries++;
|
||||
|
||||
log.debug(String.format(
|
||||
"Trying to contact SHERPA/RoMEO - attempt %d of %d; timeout is %d; sleep between timeouts is %d",
|
||||
numberOfTries,
|
||||
maxNumberOfTries,
|
||||
timeout,
|
||||
sleepBetweenTimeouts));
|
||||
|
||||
try {
|
||||
Thread.sleep(sleepBetweenTimeouts);
|
||||
|
||||
// Construct a default HTTP method (first result)
|
||||
method = constructHttpGet(type, field, predicate, value, start, limit);
|
||||
|
||||
// Execute the method
|
||||
HttpResponse response = client.execute(method);
|
||||
int statusCode = response.getStatusLine().getStatusCode();
|
||||
|
||||
log.debug(response.getStatusLine().getStatusCode() + ": "
|
||||
+ response.getStatusLine().getReasonPhrase());
|
||||
|
||||
if (statusCode != HttpStatus.SC_OK) {
|
||||
sherpaResponse = new SHERPAResponse("SHERPA/RoMEO return not OK status: "
|
||||
+ statusCode);
|
||||
String errorBody = IOUtils.toString(response.getEntity().getContent(), StandardCharsets.UTF_8);
|
||||
log.error("Error from SHERPA HTTP request: " + errorBody);
|
||||
}
|
||||
|
||||
HttpEntity responseBody = response.getEntity();
|
||||
|
||||
// If the response body is valid, pass to SHERPAResponse for parsing as JSON
|
||||
if (null != responseBody) {
|
||||
log.debug("Non-null SHERPA resonse received for query of " + value);
|
||||
InputStream content = null;
|
||||
try {
|
||||
content = responseBody.getContent();
|
||||
sherpaResponse = new SHERPAResponse(content, SHERPAResponse.SHERPAFormat.JSON);
|
||||
} catch (IOException e) {
|
||||
log.error("Encountered exception while contacting SHERPA/RoMEO: " + e.getMessage(), e);
|
||||
} finally {
|
||||
if (content != null) {
|
||||
content.close();
|
||||
}
|
||||
}
|
||||
} else {
|
||||
log.debug("Empty SHERPA response body for query on " + value);
|
||||
sherpaResponse = new SHERPAResponse("SHERPA/RoMEO returned no response");
|
||||
}
|
||||
} catch (URISyntaxException e) {
|
||||
String errorMessage = "Error building SHERPA v2 API URI: " + e.getMessage();
|
||||
log.error(errorMessage, e);
|
||||
sherpaResponse = new SHERPAResponse(errorMessage);
|
||||
} catch (IOException e) {
|
||||
String errorMessage = "Encountered exception while contacting SHERPA/RoMEO: " + e.getMessage();
|
||||
log.error(errorMessage, e);
|
||||
sherpaResponse = new SHERPAResponse(errorMessage);
|
||||
} catch (InterruptedException e) {
|
||||
String errorMessage = "Encountered exception while sleeping thread: " + e.getMessage();
|
||||
log.error(errorMessage, e);
|
||||
sherpaResponse = new SHERPAResponse(errorMessage);
|
||||
} finally {
|
||||
if (method != null) {
|
||||
method.releaseConnection();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (sherpaResponse == null) {
|
||||
log.debug("SHERPA response is still null");
|
||||
sherpaResponse = new SHERPAResponse(
|
||||
"Error processing the SHERPA/RoMEO answer");
|
||||
}
|
||||
|
||||
// Return the final response
|
||||
return sherpaResponse;
|
||||
}
|
||||
|
||||
/**
|
||||
* Construct HTTP GET object for a "field,predicate,value" query with default start, limit
|
||||
* eg. "title","contains-word","Lancet" or "issn","equals","1234-1234"
|
||||
* @param field the field (issn, title, etc)
|
||||
* @param predicate the predicate (contains-word, equals, etc - see API docs)
|
||||
* @param value the query value itself
|
||||
* @return HttpGet method which can then be executed by the client
|
||||
* @throws URISyntaxException if the URL build fails
|
||||
*/
|
||||
public HttpGet constructHttpGet(String type, String field, String predicate, String value)
|
||||
throws URISyntaxException {
|
||||
return constructHttpGet(type, field, predicate, value, 0, 1);
|
||||
}
|
||||
|
||||
/**
|
||||
* Construct HTTP GET object for a "field,predicate,value" query
|
||||
* eg. "title","contains-word","Lancet" or "issn","equals","1234-1234"
|
||||
* @param field the field (issn, title, etc)
|
||||
* @param predicate the predicate (contains-word, equals, etc - see API docs)
|
||||
* @param value the query value itself
|
||||
* @param start row offset
|
||||
* @param limit number of results to return
|
||||
* @return HttpGet object to be executed by the client
|
||||
* @throws URISyntaxException
|
||||
*/
|
||||
public HttpGet constructHttpGet(String type, String field, String predicate, String value, int start, int limit)
|
||||
throws URISyntaxException {
|
||||
// Sanitise query string (strip some characters) field, predicate and value
|
||||
if (null == type) {
|
||||
type = "publication";
|
||||
}
|
||||
field = SHERPAUtils.sanitiseQuery(field);
|
||||
predicate = SHERPAUtils.sanitiseQuery(predicate);
|
||||
value = SHERPAUtils.sanitiseQuery(value);
|
||||
type = SHERPAUtils.sanitiseQuery(type);
|
||||
|
||||
// Build URL based on search query
|
||||
URIBuilder uriBuilder = new URIBuilder(endpoint);
|
||||
uriBuilder.addParameter("item-type", type);
|
||||
uriBuilder.addParameter("filter", "[[\"" + field + "\",\"" + predicate + "\",\"" + value + "\"]]");
|
||||
uriBuilder.addParameter("format", "Json");
|
||||
// Set optional start (offset) and limit parameters
|
||||
if (start >= 0) {
|
||||
uriBuilder.addParameter("offset", String.valueOf(start));
|
||||
}
|
||||
if (limit > 0) {
|
||||
uriBuilder.addParameter("limit", String.valueOf(limit));
|
||||
}
|
||||
if (StringUtils.isNotBlank(apiKey)) {
|
||||
uriBuilder.addParameter("api-key", apiKey);
|
||||
}
|
||||
|
||||
log.debug("SHERPA API URL: " + uriBuilder.toString());
|
||||
|
||||
// Create HTTP GET object
|
||||
HttpGet method = new HttpGet(uriBuilder.build());
|
||||
|
||||
// Set connection parameters
|
||||
int timeout = 5000;
|
||||
method.setConfig(RequestConfig.custom()
|
||||
.setConnectionRequestTimeout(timeout)
|
||||
.setConnectTimeout(timeout)
|
||||
.setSocketTimeout(timeout)
|
||||
.build());
|
||||
|
||||
return method;
|
||||
}
|
||||
|
||||
/**
|
||||
* Prepare the API query for execution by the HTTP client
|
||||
* @param query ISSN query string
|
||||
* @param endpoint API endpoint (base URL)
|
||||
* @param apiKey API key parameter
|
||||
* @return URI object
|
||||
* @throws URISyntaxException
|
||||
*/
|
||||
public URI prepareQuery(String query, String endpoint, String apiKey) throws URISyntaxException {
|
||||
// Sanitise query string
|
||||
query = SHERPAUtils.sanitiseQuery(query);
|
||||
|
||||
// Instantiate URI builder
|
||||
URIBuilder uriBuilder = new URIBuilder(endpoint);
|
||||
|
||||
// Build URI parameters from supplied values
|
||||
uriBuilder.addParameter("item-type", "publication");
|
||||
|
||||
// Log warning if no query is supplied
|
||||
if (null == query) {
|
||||
log.warn("No ISSN supplied as query string for SHERPA service search");
|
||||
}
|
||||
uriBuilder.addParameter("filter", "[[\"issn\",\"equals\",\"" + query + "\"]]");
|
||||
uriBuilder.addParameter("format", "Json");
|
||||
if (StringUtils.isNotBlank(apiKey)) {
|
||||
uriBuilder.addParameter("api-key", apiKey);
|
||||
}
|
||||
log.debug("Would search SHERPA endpoint with " + uriBuilder.toString());
|
||||
|
||||
// Return final built URI
|
||||
return uriBuilder.build();
|
||||
}
|
||||
|
||||
public void setMaxNumberOfTries(int maxNumberOfTries) {
|
||||
this.maxNumberOfTries = maxNumberOfTries;
|
||||
}
|
||||
|
@@ -7,49 +7,111 @@
|
||||
*/
|
||||
package org.dspace.app.sherpa.submit;
|
||||
|
||||
import java.util.Iterator;
|
||||
import java.util.LinkedHashSet;
|
||||
import java.util.LinkedList;
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
|
||||
import org.apache.commons.lang.StringUtils;
|
||||
import org.apache.log4j.Logger;
|
||||
import org.dspace.app.sherpa.SHERPAResponse;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.dspace.app.sherpa.SHERPAService;
|
||||
import org.dspace.app.sherpa.v2.SHERPAResponse;
|
||||
import org.dspace.content.Item;
|
||||
import org.dspace.core.Context;
|
||||
import org.dspace.core.LogManager;
|
||||
|
||||
/**
|
||||
* SHERPASubmitService is
|
||||
* @see
|
||||
* @author Kim Shepherd
|
||||
*/
|
||||
public class SHERPASubmitService {
|
||||
private SHERPAService sherpaService;
|
||||
|
||||
private SHERPASubmitConfigurationService configuration;
|
||||
/**
|
||||
* Spring beans for configuration and API service
|
||||
*/
|
||||
protected SHERPAService sherpaService;
|
||||
protected SHERPASubmitConfigurationService configuration;
|
||||
|
||||
/**
|
||||
* log4j logger
|
||||
*/
|
||||
private static Logger log = Logger.getLogger(SHERPASubmitService.class);
|
||||
private static Logger log = org.apache.logging.log4j.LogManager.getLogger(SHERPASubmitService.class);
|
||||
|
||||
/**
|
||||
* Setter for configuration (from Spring)
|
||||
* @see "dspace-dspace-addon-sherpa-configuration-services.xml"
|
||||
* @param configuration
|
||||
*/
|
||||
public void setConfiguration(SHERPASubmitConfigurationService configuration) {
|
||||
this.configuration = configuration;
|
||||
}
|
||||
|
||||
/**
|
||||
* Setter for SHERPA service, reponsible for actual HTTP API calls
|
||||
* @see "dspace-dspace-addon-sherpa-configuration-services.xml"
|
||||
* @param sherpaService
|
||||
*/
|
||||
public void setSherpaService(SHERPAService sherpaService) {
|
||||
this.sherpaService = sherpaService;
|
||||
}
|
||||
|
||||
public SHERPAResponse searchRelatedJournals(Context context, Item item) {
|
||||
/**
|
||||
* Search SHERPA for journal policies matching the ISSNs in the item.
|
||||
* Rather than a 'search' query for any/all ISSNs, the v2 API requires a separate
|
||||
* query for each ISSN found in the item. The ISSNs are extracted using the configured
|
||||
* issnItemExtractor(s) in the SHERPA spring configuration.
|
||||
* The ISSNs are not validated with a regular expression or other rules - any values
|
||||
* extracted will be included in API queries.
|
||||
* @see "dspace-dspace-addon-sherpa-configuration-services.xml"
|
||||
* @param context DSpace context
|
||||
* @param item DSpace item containing ISSNs to be checked
|
||||
* @return SHERPA v2 API response (policy data)
|
||||
*/
|
||||
public List<SHERPAResponse> searchRelatedJournals(Context context, Item item) {
|
||||
Set<String> issns = getISSNs(context, item);
|
||||
if (issns == null || issns.size() == 0) {
|
||||
return null;
|
||||
} else {
|
||||
return sherpaService.searchByJournalISSN(StringUtils.join(issns, ","));
|
||||
// SHERPA v2 API no longer supports "OR'd" ISSN search, perform individual searches instead
|
||||
Iterator<String> issnIterator = issns.iterator();
|
||||
List<SHERPAResponse> responses = new LinkedList<>();
|
||||
while (issnIterator.hasNext()) {
|
||||
String issn = issnIterator.next();
|
||||
SHERPAResponse response = sherpaService.searchByJournalISSN(issn);
|
||||
if (response.isError()) {
|
||||
// Continue with loop
|
||||
log.warn("Failed to look up SHERPA ROMeO result for ISSN: " + issn
|
||||
+ ": " + response.getMessage());
|
||||
}
|
||||
// Store this response, even if it has an error (useful for UI reporting)
|
||||
responses.add(response);
|
||||
}
|
||||
if (responses.isEmpty()) {
|
||||
responses.add(new SHERPAResponse("SHERPA ROMeO lookup failed"));
|
||||
}
|
||||
return responses;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Search SHERPA for journal policies matching the passed ISSN.
|
||||
* The ISSN are not validated with a regular expression or other rules - any String
|
||||
* passed to this method will be considered an ISSN for the purposes of an API query
|
||||
* @param issn ISSN string
|
||||
* @return SHERPA v2 API response object (policy data)
|
||||
*/
|
||||
public SHERPAResponse searchRelatedJournalsByISSN(String issn) {
|
||||
return sherpaService.searchByJournalISSN(issn);
|
||||
}
|
||||
|
||||
/**
|
||||
* Using the configured itemIssnExtractors from SHERPA configuration, extract
|
||||
* ISSNs from item metadata or authority values
|
||||
* @param context DSpace context
|
||||
* @param item Item containing metadata / authority values
|
||||
* @return Set of ISSN strings
|
||||
*/
|
||||
public Set<String> getISSNs(Context context, Item item) {
|
||||
Set<String> issns = new LinkedHashSet<String>();
|
||||
if (configuration.getIssnItemExtractors() == null) {
|
||||
@@ -68,6 +130,13 @@ public class SHERPASubmitService {
|
||||
return issns;
|
||||
}
|
||||
|
||||
/**
|
||||
* Simple boolean test that runs the getISSNs extraction method
|
||||
* to determine whether an item has any ISSNs at all
|
||||
* @param context DSpace context
|
||||
* @param item Item to test
|
||||
* @return boolean indicating presence of >=1 ISSNs
|
||||
*/
|
||||
public boolean hasISSNs(Context context, Item item) {
|
||||
Set<String> issns = getISSNs(context, item);
|
||||
if (issns == null || issns.size() == 0) {
|
||||
|
@@ -0,0 +1,111 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.app.sherpa.v2;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
* Plain java representation of a SHERPA Journal object, based on SHERPA API v2 responses.
|
||||
*
|
||||
* In a SHERPA search for journal deposit policies, this is generally structured
|
||||
* as a list in the SHERPAResponse object.
|
||||
* Each journal contains a list of publisher data and list of publishing policies as well as basic metadata
|
||||
* about the journal such as ISSNs, titles, whether it appears in DOAJ, primary publisher, etc.
|
||||
* @see SHERPAResponse
|
||||
* @see org.dspace.external.provider.impl.SHERPAv2JournalDataProvider
|
||||
*
|
||||
* @author Kim Shepherd
|
||||
*/
|
||||
public class SHERPAJournal {
|
||||
|
||||
private List<String> titles;
|
||||
private String url;
|
||||
private List<String> issns;
|
||||
private String romeoPub;
|
||||
private String zetoPub;
|
||||
private SHERPAPublisher publisher;
|
||||
private List<SHERPAPublisher> publishers;
|
||||
private List<SHERPAPublisherPolicy> policies;
|
||||
private Boolean inDOAJ;
|
||||
|
||||
public SHERPAJournal() {
|
||||
|
||||
}
|
||||
|
||||
public List<String> getTitles() {
|
||||
return titles;
|
||||
}
|
||||
|
||||
public void setTitles(List<String> titles) {
|
||||
this.titles = titles;
|
||||
}
|
||||
|
||||
public String getUrl() {
|
||||
return url;
|
||||
}
|
||||
|
||||
public void setUrl(String url) {
|
||||
this.url = url;
|
||||
}
|
||||
|
||||
public List<String> getIssns() {
|
||||
return issns;
|
||||
}
|
||||
|
||||
public void setIssns(List<String> issns) {
|
||||
this.issns = issns;
|
||||
}
|
||||
|
||||
public String getRomeoPub() {
|
||||
return romeoPub;
|
||||
}
|
||||
|
||||
public void setRomeoPub(String romeoPub) {
|
||||
this.romeoPub = romeoPub;
|
||||
}
|
||||
|
||||
public String getZetoPub() {
|
||||
return zetoPub;
|
||||
}
|
||||
|
||||
public void setZetoPub(String zetoPub) {
|
||||
this.zetoPub = zetoPub;
|
||||
}
|
||||
|
||||
public SHERPAPublisher getPublisher() {
|
||||
return publisher;
|
||||
}
|
||||
|
||||
public void setPublisher(SHERPAPublisher publisher) {
|
||||
this.publisher = publisher;
|
||||
}
|
||||
|
||||
public List<SHERPAPublisher> getPublishers() {
|
||||
return publishers;
|
||||
}
|
||||
|
||||
public void setPublishers(List<SHERPAPublisher> publishers) {
|
||||
this.publishers = publishers;
|
||||
}
|
||||
|
||||
public List<SHERPAPublisherPolicy> getPolicies() {
|
||||
return policies;
|
||||
}
|
||||
|
||||
public void setPolicies(List<SHERPAPublisherPolicy> policies) {
|
||||
this.policies = policies;
|
||||
}
|
||||
|
||||
public Boolean getInDOAJ() {
|
||||
return inDOAJ;
|
||||
}
|
||||
|
||||
public void setInDOAJ(Boolean inDOAJ) {
|
||||
this.inDOAJ = inDOAJ;
|
||||
}
|
||||
}
|
@@ -0,0 +1,108 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.app.sherpa.v2;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
* Plain java representation of a SHERPA Permitted Version object, based on SHERPA API v2 responses.
|
||||
*
|
||||
* In a SHERPA search for journal deposit policies, this data is contained within a publisher policy.
|
||||
* Each permitted version is for a particular article version (eg. submitted, accepted, published) and contains
|
||||
*
|
||||
* A list of general conditions / terms for deposit of this version of work
|
||||
* A list of allowed locations (eg. institutional repository, personal homepage, non-commercial repository)
|
||||
* A list of prerequisite conditions for deposit (eg. attribution, linking to published version)
|
||||
* A list of required licences for the deposited work (eg. CC-BY-NC)
|
||||
* Embargo requirements, if any
|
||||
*
|
||||
* This class also has some helper data for labels, which can be used with i18n when displaying policy information
|
||||
*
|
||||
* @see SHERPAPublisherPolicy
|
||||
*/
|
||||
public class SHERPAPermittedVersion {
|
||||
|
||||
// Version (submitted, accepted, published)
|
||||
private String articleVersion;
|
||||
|
||||
// Option number
|
||||
private int option;
|
||||
|
||||
// General conditions
|
||||
private List<String> conditions;
|
||||
// Prerequisites (eg. if required by funder)
|
||||
private List<String> prerequisites;
|
||||
// Allowed locations
|
||||
private List<String> locations;
|
||||
// Required license(s)
|
||||
private List<String> licenses;
|
||||
// Embargo
|
||||
private SHERPAEmbargo embargo;
|
||||
|
||||
protected class SHERPAEmbargo {
|
||||
String units;
|
||||
int amount;
|
||||
}
|
||||
|
||||
public String getArticleVersion() {
|
||||
return articleVersion;
|
||||
}
|
||||
|
||||
public void setArticleVersion(String articleVersion) {
|
||||
this.articleVersion = articleVersion;
|
||||
}
|
||||
|
||||
public List<String> getConditions() {
|
||||
return conditions;
|
||||
}
|
||||
|
||||
public void setConditions(List<String> conditions) {
|
||||
this.conditions = conditions;
|
||||
}
|
||||
|
||||
public List<String> getPrerequisites() {
|
||||
return prerequisites;
|
||||
}
|
||||
|
||||
public void setPrerequisites(List<String> prerequisites) {
|
||||
this.prerequisites = prerequisites;
|
||||
}
|
||||
|
||||
public List<String> getLocations() {
|
||||
return locations;
|
||||
}
|
||||
|
||||
public void setLocations(List<String> locations) {
|
||||
this.locations = locations;
|
||||
}
|
||||
|
||||
public List<String> getLicenses() {
|
||||
return licenses;
|
||||
}
|
||||
|
||||
public void setLicenses(List<String> licenses) {
|
||||
this.licenses = licenses;
|
||||
}
|
||||
|
||||
public SHERPAEmbargo getEmbargo() {
|
||||
return embargo;
|
||||
}
|
||||
|
||||
public void setEmbargo(SHERPAEmbargo embargo) {
|
||||
this.embargo = embargo;
|
||||
}
|
||||
|
||||
public int getOption() {
|
||||
return option;
|
||||
}
|
||||
|
||||
public void setOption(int option) {
|
||||
this.option = option;
|
||||
}
|
||||
|
||||
}
|
@@ -0,0 +1,100 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.app.sherpa.v2;
|
||||
|
||||
/**
|
||||
* Plain java representation of a SHERPA Publisher object, based on SHERPA API v2 responses.
|
||||
*
|
||||
* In a search for SHERPA journal deposit policy, this publisher object will appear in a list of publishers
|
||||
* from the journal object, and as a single publisher member for the primary/current publisher of the journal.
|
||||
* In a search for SHERPA publisher information, this object will appear in a list of publishers from the main
|
||||
* SHERPA Publisher Response object
|
||||
*
|
||||
* @see SHERPAJournal
|
||||
* @see SHERPAPublisherResponse
|
||||
*/
|
||||
public class SHERPAPublisher {
|
||||
private String name = null;
|
||||
private String relationshipType;
|
||||
private String country;
|
||||
private String uri = null;
|
||||
private String identifier = null;
|
||||
private int publicationCount;
|
||||
|
||||
// this is not technically in the same place in SHERPA data model but it makes more sense to apply it here
|
||||
// is it is treated as a 'special case' - just for printing links to paid OA access policies
|
||||
private String paidAccessDescription;
|
||||
private String paidAccessUrl;
|
||||
|
||||
public SHERPAPublisher() {
|
||||
}
|
||||
|
||||
public String getName() {
|
||||
return name;
|
||||
}
|
||||
|
||||
public void setName(String name) {
|
||||
this.name = name;
|
||||
}
|
||||
|
||||
public String getRelationshipType() {
|
||||
return relationshipType;
|
||||
}
|
||||
|
||||
public void setRelationshipType(String relationshipType) {
|
||||
this.relationshipType = relationshipType;
|
||||
}
|
||||
|
||||
public String getCountry() {
|
||||
return country;
|
||||
}
|
||||
|
||||
public void setCountry(String country) {
|
||||
this.country = country;
|
||||
}
|
||||
|
||||
public String getUri() {
|
||||
return uri;
|
||||
}
|
||||
|
||||
public void setUri(String uri) {
|
||||
this.uri = uri;
|
||||
}
|
||||
|
||||
public int getPublicationCount() {
|
||||
return publicationCount;
|
||||
}
|
||||
|
||||
public void setPublicationCount(int publicationCount) {
|
||||
this.publicationCount = publicationCount;
|
||||
}
|
||||
|
||||
public String getPaidAccessDescription() {
|
||||
return paidAccessDescription;
|
||||
}
|
||||
|
||||
public void setPaidAccessDescription(String paidAccessDescription) {
|
||||
this.paidAccessDescription = paidAccessDescription;
|
||||
}
|
||||
|
||||
public String getPaidAccessUrl() {
|
||||
return paidAccessUrl;
|
||||
}
|
||||
|
||||
public void setPaidAccessUrl(String paidAccessUrl) {
|
||||
this.paidAccessUrl = paidAccessUrl;
|
||||
}
|
||||
|
||||
public String getIdentifier() {
|
||||
return identifier;
|
||||
}
|
||||
|
||||
public void setIdentifier(String identifier) {
|
||||
this.identifier = identifier;
|
||||
}
|
||||
}
|
@@ -0,0 +1,128 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.app.sherpa.v2;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
/**
|
||||
* Plain java representation of a SHERPA Publisher Policy object, based on SHERPA API v2 responses.
|
||||
*
|
||||
* In a SHERPA search for deposit policies, each journal contains one or more publisher policies
|
||||
* Each publisher policies contains a list of different article versions (eg. submitted, accepted, published)
|
||||
* which hold the data about what can be done with each version.
|
||||
* This class also holds copyright URLs and other policy URLs, as well as some helper information for display
|
||||
* of overall policies in UI (as per legacy SHERPA data)
|
||||
*
|
||||
* @see SHERPAJournal
|
||||
* @see SHERPAPermittedVersion
|
||||
*/
|
||||
public class SHERPAPublisherPolicy {
|
||||
|
||||
private int id;
|
||||
private boolean openAccessPermitted;
|
||||
private String uri;
|
||||
private String internalMoniker;
|
||||
private List<SHERPAPermittedVersion> permittedVersions;
|
||||
private Map<String, String> urls;
|
||||
private boolean openAccessProhibited;
|
||||
private int publicationCount;
|
||||
|
||||
// The legacy "can" / "cannot" indicators
|
||||
private String preArchiving = "cannot";
|
||||
private String postArchiving = "cannot";
|
||||
private String pubArchiving = "cannot";
|
||||
|
||||
public int getId() {
|
||||
return id;
|
||||
}
|
||||
|
||||
public void setId(int id) {
|
||||
this.id = id;
|
||||
}
|
||||
|
||||
public boolean isOpenAccessPermitted() {
|
||||
return openAccessPermitted;
|
||||
}
|
||||
|
||||
public void setOpenAccessPermitted(boolean openAccessPermitted) {
|
||||
this.openAccessPermitted = openAccessPermitted;
|
||||
}
|
||||
|
||||
public String getUri() {
|
||||
return uri;
|
||||
}
|
||||
|
||||
public void setUri(String uri) {
|
||||
this.uri = uri;
|
||||
}
|
||||
|
||||
public String getInternalMoniker() {
|
||||
return internalMoniker;
|
||||
}
|
||||
|
||||
public void setInternalMoniker(String internalMoniker) {
|
||||
this.internalMoniker = internalMoniker;
|
||||
}
|
||||
|
||||
public List<SHERPAPermittedVersion> getPermittedVersions() {
|
||||
return permittedVersions;
|
||||
}
|
||||
|
||||
public void setPermittedVersions(List<SHERPAPermittedVersion> permittedVersions) {
|
||||
this.permittedVersions = permittedVersions;
|
||||
}
|
||||
|
||||
public Map<String, String> getUrls() {
|
||||
return urls;
|
||||
}
|
||||
|
||||
public void setUrls(Map<String, String> urls) {
|
||||
this.urls = urls;
|
||||
}
|
||||
|
||||
public boolean isOpenAccessProhibited() {
|
||||
return openAccessProhibited;
|
||||
}
|
||||
|
||||
public void setOpenAccessProhibited(boolean openAccessProhibited) {
|
||||
this.openAccessProhibited = openAccessProhibited;
|
||||
}
|
||||
|
||||
public int getPublicationCount() {
|
||||
return publicationCount;
|
||||
}
|
||||
|
||||
public void setPublicationCount(int publicationCount) {
|
||||
this.publicationCount = publicationCount;
|
||||
}
|
||||
|
||||
public String getPreArchiving() {
|
||||
return preArchiving;
|
||||
}
|
||||
|
||||
public void setPreArchiving(String preArchiving) {
|
||||
this.preArchiving = preArchiving;
|
||||
}
|
||||
|
||||
public String getPostArchiving() {
|
||||
return postArchiving;
|
||||
}
|
||||
|
||||
public void setPostArchiving(String postArchiving) {
|
||||
this.postArchiving = postArchiving;
|
||||
}
|
||||
|
||||
public String getPubArchiving() {
|
||||
return pubArchiving;
|
||||
}
|
||||
|
||||
public void setPubArchiving(String pubArchiving) {
|
||||
this.pubArchiving = pubArchiving;
|
||||
}
|
||||
}
|
@@ -0,0 +1,224 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.app.sherpa.v2;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.io.InputStreamReader;
|
||||
import java.util.LinkedList;
|
||||
import java.util.List;
|
||||
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.json.JSONArray;
|
||||
import org.json.JSONException;
|
||||
import org.json.JSONObject;
|
||||
import org.json.JSONTokener;
|
||||
|
||||
/**
|
||||
* Model class for the SHERPAv2 API (JSON) response for a publisher search
|
||||
* The structure and approached used is quite different to the simple v1 API used previously
|
||||
*
|
||||
* @see SHERPAPublisher
|
||||
*
|
||||
* @author Kim Shepherd
|
||||
*
|
||||
*/
|
||||
public class SHERPAPublisherResponse {
|
||||
// Is this response to be treated as an error?
|
||||
private boolean error;
|
||||
|
||||
// Error message
|
||||
private String message;
|
||||
|
||||
// Parsed system metadata from search results
|
||||
private SHERPASystemMetadata metadata;
|
||||
|
||||
// List of parsed publisher results
|
||||
private List<SHERPAPublisher> publishers;
|
||||
|
||||
// Internal Sherpa ID
|
||||
private int id;
|
||||
|
||||
// SHERPA URI (the human page version of this API response)
|
||||
private String uri;
|
||||
|
||||
// Format enum - currently only JSON is supported
|
||||
public enum SHERPAFormat {
|
||||
JSON, XML
|
||||
};
|
||||
|
||||
private static Logger log = LogManager.getLogger();
|
||||
|
||||
/**
|
||||
* Parse SHERPA v2 API for a given format
|
||||
* @param input - input stream from the HTTP response content
|
||||
* @param format - requested format
|
||||
* @throws IOException
|
||||
*/
|
||||
public SHERPAPublisherResponse(InputStream input, SHERPAFormat format) throws IOException {
|
||||
if (format == SHERPAFormat.JSON) {
|
||||
parseJSON(input);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse the SHERPA v2 API JSON and construct simple list of publisher objects
|
||||
* This method does not return a value, but rather populates the metadata and publishers objects
|
||||
* with data parsed from the JSON.
|
||||
* @param jsonData - the JSON input stream from the API result response body
|
||||
*/
|
||||
private void parseJSON(InputStream jsonData) throws IOException {
|
||||
InputStreamReader streamReader = new InputStreamReader(jsonData);
|
||||
JSONTokener jsonTokener = new JSONTokener(streamReader);
|
||||
JSONObject httpResponse;
|
||||
try {
|
||||
httpResponse = new JSONObject(jsonTokener);
|
||||
if (httpResponse.has("items")) {
|
||||
JSONArray items = httpResponse.getJSONArray("items");
|
||||
|
||||
// items array in this context is publisher results - parsing is more simple than
|
||||
// parsing the full journal / policy responses
|
||||
if (items.length() > 0) {
|
||||
metadata = new SHERPASystemMetadata();
|
||||
this.publishers = new LinkedList<>();
|
||||
// Iterate search result items
|
||||
for (int itemIndex = 0; itemIndex < items.length(); itemIndex++) {
|
||||
SHERPAPublisher sherpaPublisher = new SHERPAPublisher();
|
||||
|
||||
JSONObject item = items.getJSONObject(itemIndex);
|
||||
|
||||
// Parse system metadata (per-item / result information)
|
||||
if (item.has("system_metadata")) {
|
||||
JSONObject systemMetadata = item.getJSONObject("system_metadata");
|
||||
metadata = parseSystemMetadata(systemMetadata);
|
||||
if (metadata.getId() >= 0) {
|
||||
// Set publisher identifier to be the internal SHERPA ID
|
||||
// eg. '30' (Elsevier)
|
||||
sherpaPublisher.setIdentifier(String.valueOf(metadata.getId()));
|
||||
}
|
||||
}
|
||||
|
||||
// Set publisher name
|
||||
sherpaPublisher.setName(parsePublisherName(item));
|
||||
|
||||
// Set publisher URL
|
||||
sherpaPublisher.setUri(parsePublisherURL(item));
|
||||
|
||||
this.publishers.add(sherpaPublisher);
|
||||
}
|
||||
|
||||
} else {
|
||||
error = true;
|
||||
message = "No results found";
|
||||
}
|
||||
} else {
|
||||
error = true;
|
||||
message = "No results found";
|
||||
}
|
||||
|
||||
} catch (JSONException e) {
|
||||
log.error("Failed to parse SHERPA response", e);
|
||||
error = true;
|
||||
} finally {
|
||||
streamReader.close();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse system metadata and return populated SHERPASystemMetadata object
|
||||
* @param systemMetadata
|
||||
*/
|
||||
private SHERPASystemMetadata parseSystemMetadata(JSONObject systemMetadata) {
|
||||
|
||||
SHERPASystemMetadata metadata = new SHERPASystemMetadata();
|
||||
|
||||
if (systemMetadata.has("uri")) {
|
||||
this.uri = systemMetadata.getString("uri");
|
||||
metadata.setUri(this.uri);
|
||||
} else {
|
||||
log.error("SHERPA URI missing for API response item");
|
||||
}
|
||||
if (systemMetadata.has("id")) {
|
||||
this.id = systemMetadata.getInt("id");
|
||||
metadata.setId(this.id);
|
||||
} else {
|
||||
log.error("SHERPA internal ID missing for API response item");
|
||||
}
|
||||
// Get date created and added - DSpace expects this in the publisher object, though
|
||||
if (systemMetadata.has("date_created")) {
|
||||
metadata.setDateCreated(systemMetadata.getString("date_created"));
|
||||
}
|
||||
if (systemMetadata.has("date_modified")) {
|
||||
metadata.setDateModified(systemMetadata.getString("date_modified"));
|
||||
}
|
||||
|
||||
return metadata;
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse publisher array and return the first name string found
|
||||
* @param publisher - array of publisher JSON data
|
||||
* @return first publisher name found (trimmed String)
|
||||
*/
|
||||
private String parsePublisherName(JSONObject publisher) {
|
||||
String name = null;
|
||||
if (publisher.has("name")) {
|
||||
JSONArray publisherNames = publisher.getJSONArray("name");
|
||||
if (publisherNames.length() > 0) {
|
||||
JSONObject publisherName = publisherNames.getJSONObject(0);
|
||||
if (publisherName.has("name")) {
|
||||
name = publisherName.getString("name").trim();
|
||||
}
|
||||
}
|
||||
}
|
||||
return name;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Parse publisher URL from the json data
|
||||
* @param publisher - publisher object (from JSON array)
|
||||
* @return publisher URL as string
|
||||
*/
|
||||
private String parsePublisherURL(JSONObject publisher) {
|
||||
if (publisher.has("url")) {
|
||||
return publisher.getString("url");
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Create new response object to be handled as an error
|
||||
* @param message - the message to render in logs or error pages
|
||||
*/
|
||||
public SHERPAPublisherResponse(String message) {
|
||||
this.message = message;
|
||||
this.error = true;
|
||||
}
|
||||
|
||||
public boolean isError() {
|
||||
return error;
|
||||
}
|
||||
|
||||
public String getMessage() {
|
||||
return message;
|
||||
}
|
||||
|
||||
public void setMessage(String message) {
|
||||
this.message = message;
|
||||
}
|
||||
|
||||
public SHERPASystemMetadata getMetadata() {
|
||||
return metadata;
|
||||
}
|
||||
|
||||
public List<SHERPAPublisher> getPublishers() {
|
||||
return publishers;
|
||||
}
|
||||
}
|
@@ -0,0 +1,546 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.app.sherpa.v2;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.io.InputStreamReader;
|
||||
import java.util.ArrayList;
|
||||
import java.util.LinkedList;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.TreeMap;
|
||||
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.json.JSONArray;
|
||||
import org.json.JSONException;
|
||||
import org.json.JSONObject;
|
||||
import org.json.JSONTokener;
|
||||
|
||||
/**
|
||||
* Model class for the SHERPAv2 API (JSON) response for a publication (journal) search
|
||||
* The structure and approached used is quite different to the simple v1 API used previously
|
||||
* The structure is based on journal data, which in turn contains data about publishers and policies
|
||||
*
|
||||
* @see SHERPAJournal
|
||||
*
|
||||
* @author Kim Shepherd
|
||||
*
|
||||
*/
|
||||
public class SHERPAResponse {
|
||||
// Is this response to be treated as an error?
|
||||
private boolean error;
|
||||
|
||||
// Error message
|
||||
private String message;
|
||||
|
||||
// Parsed system metadata from search results
|
||||
private SHERPASystemMetadata metadata;
|
||||
|
||||
// List of parsed journal results
|
||||
private List<SHERPAJournal> journals;
|
||||
|
||||
// Internal Sherpa ID
|
||||
private int id;
|
||||
|
||||
// SHERPA URI (the human page version of this API response)
|
||||
private String uri;
|
||||
|
||||
// Format enum - currently only JSON is supported
|
||||
public enum SHERPAFormat {
|
||||
JSON, XML
|
||||
};
|
||||
|
||||
private static Logger log = LogManager.getLogger();
|
||||
|
||||
/**
|
||||
* Parse SHERPA v2 API for a given format
|
||||
* @param input - input stream from the HTTP response content
|
||||
* @param format - requested format
|
||||
* @throws IOException
|
||||
*/
|
||||
public SHERPAResponse(InputStream input, SHERPAFormat format) throws IOException {
|
||||
if (format == SHERPAFormat.JSON) {
|
||||
parseJSON(input);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse the SHERPA v2 API JSON and construct Romeo policy data for display
|
||||
* This method does not return a value, but rather populates the metadata and journals objects
|
||||
* with data parsed from the JSON.
|
||||
* @param jsonData - the JSON input stream from the API result response body
|
||||
*/
|
||||
private void parseJSON(InputStream jsonData) throws IOException {
|
||||
InputStreamReader streamReader = new InputStreamReader(jsonData);
|
||||
JSONTokener jsonTokener = new JSONTokener(streamReader);
|
||||
JSONObject httpResponse;
|
||||
try {
|
||||
httpResponse = new JSONObject(jsonTokener);
|
||||
if (httpResponse.has("items")) {
|
||||
JSONArray items = httpResponse.getJSONArray("items");
|
||||
|
||||
// items array is search results, *not* journals or publishers - they are listed for each item
|
||||
// - however, we only ever want one result since we're passing an "equals ISSN" query
|
||||
if (items.length() > 0) {
|
||||
metadata = new SHERPASystemMetadata();
|
||||
this.journals = new LinkedList<>();
|
||||
// Iterate search result items
|
||||
for (int itemIndex = 0; itemIndex < items.length(); itemIndex++) {
|
||||
List<SHERPAPublisher> sherpaPublishers = new LinkedList<>();
|
||||
List<SHERPAPublisherPolicy> policies = new ArrayList<>();
|
||||
SHERPAPublisher sherpaPublisher = new SHERPAPublisher();
|
||||
SHERPAJournal sherpaJournal = new SHERPAJournal();
|
||||
|
||||
JSONObject item = items.getJSONObject(itemIndex);
|
||||
|
||||
// Parse system metadata (per-item / result information)
|
||||
if (item.has("system_metadata")) {
|
||||
JSONObject systemMetadata = item.getJSONObject("system_metadata");
|
||||
metadata = parseSystemMetadata(systemMetadata);
|
||||
}
|
||||
|
||||
// Parse "publisher policy"
|
||||
// note - most of the information that was previously under 'publisher' is now under here
|
||||
if (item.has("publisher_policy")) {
|
||||
|
||||
// Parse main publisher policies node
|
||||
JSONArray publisherPolicies = item.getJSONArray("publisher_policy");
|
||||
for (int i = 0; i < publisherPolicies.length(); i++) {
|
||||
|
||||
JSONObject policy = publisherPolicies.getJSONObject(i);
|
||||
|
||||
// Special case - quickly check the policy for the 'paid access' option
|
||||
// and continue if found, then parse the rest of the policy
|
||||
String moniker = null;
|
||||
if (policy.has("internal_moniker")) {
|
||||
moniker = policy.getString("internal_moniker");
|
||||
}
|
||||
// This seems to be usually policy(ies) for the journal proper
|
||||
// and then an "Open access option" which contains some of the info
|
||||
// that the 'paidaccess' node in the old API used to contain
|
||||
// Look for: internal_moniker = "Open access option"
|
||||
// Check if this is OA options (Paid Access) or not
|
||||
if ("Open access option".equalsIgnoreCase(moniker)) {
|
||||
log.debug("This is the Open access options policy - a special case");
|
||||
if (policy.has("urls")) {
|
||||
JSONArray urls = policy.getJSONArray("urls");
|
||||
for (int u = 0; u < urls.length(); u++) {
|
||||
JSONObject url = urls.getJSONObject(u);
|
||||
if (url.has("description") &&
|
||||
"Open Access".equalsIgnoreCase(url.getString("description"))) {
|
||||
log.debug("Found OA paid access url: " + url.getString("url"));
|
||||
sherpaPublisher.setPaidAccessDescription(url.getString("description"));
|
||||
sherpaPublisher.setPaidAccessUrl(url.getString("url"));
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
// Continue the loop here - this "policy" is a bit different and we
|
||||
// don't want to add irrelevant conditions to the policy
|
||||
continue;
|
||||
}
|
||||
|
||||
// Parse the main publisher policy object and add to the list
|
||||
SHERPAPublisherPolicy sherpaPublisherPolicy = parsePublisherPolicy(policy);
|
||||
policies.add(sherpaPublisherPolicy);
|
||||
}
|
||||
|
||||
// set publisher name - note we're only looking for the first name here
|
||||
// as per previous functionality (for simple display)
|
||||
if (item.has("publishers")) {
|
||||
JSONArray publishers = item.getJSONArray("publishers");
|
||||
if (publishers.length() > 0) {
|
||||
JSONObject publisherElement = publishers.getJSONObject(0);
|
||||
if (publisherElement.has("publisher")) {
|
||||
JSONObject publisher = publisherElement.getJSONObject("publisher");
|
||||
sherpaPublisher.setName(parsePublisherName(publisher));
|
||||
sherpaPublisher.setUri(parsePublisherURL(publisher));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Parse journal data
|
||||
sherpaJournal = parseJournal(item, sherpaPublisher.getName());
|
||||
}
|
||||
|
||||
sherpaPublishers.add(sherpaPublisher);
|
||||
sherpaJournal.setPublisher(sherpaPublisher);
|
||||
sherpaJournal.setPublishers(sherpaPublishers);
|
||||
sherpaJournal.setPolicies(policies);
|
||||
this.journals.add(sherpaJournal);
|
||||
}
|
||||
|
||||
} else {
|
||||
error = true;
|
||||
message = "No results found";
|
||||
}
|
||||
} else {
|
||||
error = true;
|
||||
message = "No results found";
|
||||
}
|
||||
|
||||
} catch (JSONException e) {
|
||||
log.error("Failed to parse SHERPA response", e);
|
||||
error = true;
|
||||
} finally {
|
||||
streamReader.close();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse system metadata and return populated SHERPASystemMetadata object
|
||||
* @param systemMetadata
|
||||
*/
|
||||
private SHERPASystemMetadata parseSystemMetadata(JSONObject systemMetadata) {
|
||||
|
||||
SHERPASystemMetadata metadata = new SHERPASystemMetadata();
|
||||
|
||||
if (systemMetadata.has("uri")) {
|
||||
this.uri = systemMetadata.getString("uri");
|
||||
metadata.setUri(this.uri);
|
||||
} else {
|
||||
log.error("SHERPA URI missing for API response item");
|
||||
}
|
||||
if (systemMetadata.has("id")) {
|
||||
this.id = systemMetadata.getInt("id");
|
||||
metadata.setId(this.id);
|
||||
} else {
|
||||
log.error("SHERPA internal ID missing for API response item");
|
||||
}
|
||||
// Get date created and added - DSpace expects this in the publisher object, though
|
||||
if (systemMetadata.has("date_created")) {
|
||||
metadata.setDateCreated(systemMetadata.getString("date_created"));
|
||||
}
|
||||
if (systemMetadata.has("date_modified")) {
|
||||
metadata.setDateModified(systemMetadata.getString("date_modified"));
|
||||
}
|
||||
// Is this item publicly visible?
|
||||
if (systemMetadata.has("publicly_visible")) {
|
||||
metadata.setPubliclyVisible ("yes".equals(systemMetadata
|
||||
.getString("publicly_visible")));
|
||||
}
|
||||
// Is this item listed in the DOAJ?
|
||||
if (systemMetadata.has("listed_in_doaj")) {
|
||||
metadata.setPubliclyVisible ("yes".equals(systemMetadata
|
||||
.getString("listed_in_doaj")));
|
||||
}
|
||||
|
||||
return metadata;
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse journal JSON data and return populated bean
|
||||
* This method also takes publisherName as a string to help construct some
|
||||
* legacy labels
|
||||
* @param item - the main result item JSON (which is the closest thing to an actual 'journal')
|
||||
* @param publisherName - the parsed publisher name
|
||||
* @return
|
||||
*/
|
||||
private SHERPAJournal parseJournal(JSONObject item, String publisherName) {
|
||||
|
||||
SHERPAJournal sherpaJournal = new SHERPAJournal();
|
||||
|
||||
// set journal title
|
||||
if (item.has("title")) {
|
||||
JSONArray titles = item.getJSONArray("title");
|
||||
if (titles.length() > 0) {
|
||||
List<String> titleList = new ArrayList<>();
|
||||
for (int t = 0; t < titles.length(); t++) {
|
||||
JSONObject title = titles.getJSONObject(t);
|
||||
if (title.has("title")) {
|
||||
titleList.add(title.getString("title").trim());
|
||||
}
|
||||
}
|
||||
sherpaJournal.setTitles(titleList);
|
||||
if (titleList.size() > 0) {
|
||||
// Faking this a bit based on what I'd seen - not in the API v2 data
|
||||
sherpaJournal.setRomeoPub(publisherName + ": "
|
||||
+ titleList.get(0));
|
||||
sherpaJournal.setZetoPub(publisherName + ": "
|
||||
+ titleList.get(0));
|
||||
log.debug("Found journal title: " + titleList.get(0));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Journal URL
|
||||
if (item.has("url")) {
|
||||
sherpaJournal.setUrl(item.getString("url"));
|
||||
}
|
||||
|
||||
// set ISSNs
|
||||
if (item.has("issns")) {
|
||||
JSONArray issns = item.getJSONArray("issns");
|
||||
// just get first - DSpace data model only allows for one
|
||||
List<String> issnList = new ArrayList<>();
|
||||
for (int ii = 0; ii < issns.length(); ii++) {
|
||||
JSONObject issn = issns.getJSONObject(ii);
|
||||
issnList.add(issn.getString("issn").trim());
|
||||
}
|
||||
sherpaJournal.setIssns(issnList);
|
||||
}
|
||||
|
||||
// Is the item in DOAJ?
|
||||
if (item.has("listed_in_doaj")) {
|
||||
sherpaJournal.setInDOAJ(("yes".equals(item.getString("listed_in_doaj"))));
|
||||
}
|
||||
|
||||
return sherpaJournal;
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse a publisher_policy JSON data and return a populated bean
|
||||
* @param policy - each publisher policy node in the JSON array
|
||||
* @return populated SHERPAPublisherPolicy object
|
||||
*/
|
||||
private SHERPAPublisherPolicy parsePublisherPolicy(JSONObject policy) {
|
||||
|
||||
SHERPAPublisherPolicy sherpaPublisherPolicy = new SHERPAPublisherPolicy();
|
||||
|
||||
// Get and set monikers
|
||||
String moniker = null;
|
||||
if (policy.has("internal_moniker")) {
|
||||
moniker = policy.getString("internal_moniker");
|
||||
sherpaPublisherPolicy.setInternalMoniker(moniker);
|
||||
}
|
||||
|
||||
// URLs (used to be Copyright Links)
|
||||
if (policy.has("urls")) {
|
||||
JSONArray urls = policy.getJSONArray("urls");
|
||||
Map<String, String> copyrightLinks = new TreeMap<>();
|
||||
for (int u = 0; u < urls.length(); u++) {
|
||||
JSONObject url = urls.getJSONObject(u);
|
||||
if (url.has("description") && url.has("url")) {
|
||||
log.debug("Setting copyright URL: " + url.getString("url"));
|
||||
copyrightLinks.put(url.getString("url"), url.getString("description"));
|
||||
}
|
||||
}
|
||||
sherpaPublisherPolicy.setUrls(copyrightLinks);
|
||||
}
|
||||
|
||||
// Permitted OA options
|
||||
int submittedOption = 0;
|
||||
int acceptedOption = 0;
|
||||
int publishedOption = 0;
|
||||
int currentOption = 0;
|
||||
if (policy.has("permitted_oa")) {
|
||||
List<String> allowed = new ArrayList<>();
|
||||
JSONArray permittedOA = policy.getJSONArray("permitted_oa");
|
||||
List<SHERPAPermittedVersion> permittedVersions = new ArrayList<>();
|
||||
|
||||
// Iterate each permitted OA version / option. The permitted_oa node is also known as a 'pathway' --
|
||||
// essentially "a way to get a work into a repository". Each pathway could refer to one article version
|
||||
// like a pre-print, or multiple versions might have the same acceptable locations and conditions.
|
||||
// As described below, where multiple versions are referenced in a single permitted_oa pathway, they will
|
||||
// be split out and treated separately. This keeps processing simple, especially later in display or
|
||||
// compliance checking when it is preferred to group / indicate rules by the article version
|
||||
for (int p = 0; p < permittedOA.length(); p++) {
|
||||
JSONObject permitted = permittedOA.getJSONObject(p);
|
||||
// Although it adds redundancy, we will treat each 'article version' within
|
||||
// the permitted_oa ("pathway") node as a separate version altogether to keep the rest of our display
|
||||
// handled nicely. This was confirmed as an appropriate approach by JISC
|
||||
if (permitted.has("article_version")) {
|
||||
JSONArray versions = permitted.getJSONArray("article_version");
|
||||
for (int v = 0; v < versions.length(); v++) {
|
||||
// Parse this permitted_oa node but specifically looking for the article_version 'v'
|
||||
SHERPAPermittedVersion permittedVersion = parsePermittedVersion(permitted, v);
|
||||
|
||||
// To determine which option # we are, inspect article versions and set
|
||||
allowed.add(permittedVersion.getArticleVersion());
|
||||
if ("submitted".equals(permittedVersion.getArticleVersion())) {
|
||||
submittedOption++;
|
||||
currentOption = submittedOption;
|
||||
} else if ("accepted".equals(permittedVersion.getArticleVersion())) {
|
||||
acceptedOption++;
|
||||
currentOption = acceptedOption;
|
||||
} else if ("published".equals(permittedVersion.getArticleVersion())) {
|
||||
publishedOption++;
|
||||
currentOption = publishedOption;
|
||||
}
|
||||
permittedVersion.setOption(currentOption);
|
||||
permittedVersions.add(permittedVersion);
|
||||
}
|
||||
}
|
||||
|
||||
// Populate the old indicators into the publisher policy object
|
||||
if (allowed.contains("submitted")) {
|
||||
sherpaPublisherPolicy.setPreArchiving("can");
|
||||
}
|
||||
if (allowed.contains("accepted")) {
|
||||
sherpaPublisherPolicy.setPostArchiving("can");
|
||||
}
|
||||
if (allowed.contains("published")) {
|
||||
sherpaPublisherPolicy.setPubArchiving("can");
|
||||
}
|
||||
|
||||
}
|
||||
sherpaPublisherPolicy.setPermittedVersions(permittedVersions);
|
||||
}
|
||||
|
||||
return sherpaPublisherPolicy;
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse permitted version JSON and populate new bean from the data
|
||||
* @param permitted - each 'permitted_oa' node in the JSON array
|
||||
* @return populated SHERPAPermittedVersion object
|
||||
*/
|
||||
private SHERPAPermittedVersion parsePermittedVersion(JSONObject permitted, int index) {
|
||||
|
||||
SHERPAPermittedVersion permittedVersion = new SHERPAPermittedVersion();
|
||||
|
||||
// Get the article version, which is ultimately used for the ticks / crosses
|
||||
// in the UI display. My assumptions around translation:
|
||||
// submitted = preprint
|
||||
// accepted = postprint
|
||||
// published = pdfversion
|
||||
// These strings can be used to construct i18n messages.
|
||||
String articleVersion = "unknown";
|
||||
String versionLabel = "Unknown";
|
||||
|
||||
// Each 'permitted OA' can actually refer to multiple versions
|
||||
if (permitted.has("article_version")) {
|
||||
JSONArray versions = permitted.getJSONArray("article_version");
|
||||
|
||||
// Get one particular article version to return as a PermittedVersion. The outer loop calling this
|
||||
// is iterating all permitted_oa and permitted_oa->article_version array members
|
||||
articleVersion = versions.getString(index);
|
||||
permittedVersion.setArticleVersion(articleVersion);
|
||||
log.debug("Added allowed version: " + articleVersion + " to list");
|
||||
}
|
||||
|
||||
// These are now child arrays, in old API they were explicit like
|
||||
// "preprint restrictions", etc., and just contained text rather than data
|
||||
if (permitted.has("conditions")) {
|
||||
List<String> conditionList = new ArrayList<>();
|
||||
JSONArray conditions = permitted.getJSONArray("conditions");
|
||||
for (int c = 0; c < conditions.length(); c++) {
|
||||
conditionList.add(conditions.getString(c).trim());
|
||||
}
|
||||
permittedVersion.setConditions(conditionList);
|
||||
}
|
||||
|
||||
// Any prerequisites for this option (eg required by funder)
|
||||
List<String> prerequisites = new ArrayList<>();
|
||||
if (permitted.has("prerequisites")) {
|
||||
JSONObject prereqs = permitted.getJSONObject("prerequisites");
|
||||
if (prereqs.has("prerequisites_phrases")) {
|
||||
JSONArray phrases = prereqs.getJSONArray("prerequisites_phrases");
|
||||
for (int pp = 0; pp < phrases.length(); pp++) {
|
||||
JSONObject phrase = phrases.getJSONObject(pp);
|
||||
if (phrase.has("phrase")) {
|
||||
prerequisites.add(phrase.getString("phrase").trim());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
permittedVersion.setPrerequisites(prerequisites);
|
||||
|
||||
// Locations where this version / option may be archived
|
||||
List<String> sherpaLocations = new ArrayList<>();
|
||||
if (permitted.has("location")) {
|
||||
JSONObject locations = permitted.getJSONObject("location");
|
||||
if (locations.has("location_phrases")) {
|
||||
JSONArray locationPhrases = locations.getJSONArray("location_phrases");
|
||||
if (locationPhrases.length() > 0) {
|
||||
for (int l = 0; l < locationPhrases.length(); l++) {
|
||||
JSONObject locationPhrase = locationPhrases.getJSONObject(l);
|
||||
if (locationPhrase.has("phrase")) {
|
||||
sherpaLocations.add(locationPhrase.getString("phrase").trim());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
permittedVersion.setLocations(sherpaLocations);
|
||||
|
||||
List<String> sherpaLicenses = new ArrayList<>();
|
||||
// required licences
|
||||
if (permitted.has("license")) {
|
||||
JSONArray licences = permitted.getJSONArray("license");
|
||||
for (int l = 0; l < licences.length(); l++) {
|
||||
JSONObject licence = licences.getJSONObject(l);
|
||||
if (licence.has("license_phrases")) {
|
||||
JSONArray phrases = licence.getJSONArray("license_phrases");
|
||||
for (int ll = 0; ll < phrases.length(); ll++) {
|
||||
JSONObject phrase = phrases.getJSONObject(ll);
|
||||
if (phrase.has("phrase")) {
|
||||
sherpaLicenses.add(phrase.getString("phrase").trim());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
permittedVersion.setLicenses(sherpaLicenses);
|
||||
|
||||
return permittedVersion;
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse publisher array and return the first name string found
|
||||
* @param publisher - array of publisher JSON data
|
||||
* @return first publisher name found (trimmed String)
|
||||
*/
|
||||
private String parsePublisherName(JSONObject publisher) {
|
||||
String name = null;
|
||||
if (publisher.has("name")) {
|
||||
JSONArray publisherNames = publisher.getJSONArray("name");
|
||||
if (publisherNames.length() > 0) {
|
||||
JSONObject publisherName = publisherNames.getJSONObject(0);
|
||||
if (publisherName.has("name")) {
|
||||
name = publisherName.getString("name").trim();
|
||||
}
|
||||
}
|
||||
}
|
||||
return name;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Parse publisher URL from the json data
|
||||
* @param publisher - publisher object (from JSON array)
|
||||
* @return publisher URL as string
|
||||
*/
|
||||
private String parsePublisherURL(JSONObject publisher) {
|
||||
if (publisher.has("url")) {
|
||||
return publisher.getString("url");
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Create new response object to be handled as an error
|
||||
* @param message - the message to render in logs or error pages
|
||||
*/
|
||||
public SHERPAResponse(String message) {
|
||||
this.message = message;
|
||||
this.error = true;
|
||||
}
|
||||
|
||||
public boolean isError() {
|
||||
return error;
|
||||
}
|
||||
|
||||
public String getMessage() {
|
||||
return message;
|
||||
}
|
||||
|
||||
public void setMessage(String message) {
|
||||
this.message = message;
|
||||
}
|
||||
|
||||
public List<SHERPAJournal> getJournals() {
|
||||
return journals;
|
||||
}
|
||||
|
||||
public SHERPASystemMetadata getMetadata() {
|
||||
return metadata;
|
||||
}
|
||||
}
|
@@ -0,0 +1,80 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.app.sherpa.v2;
|
||||
|
||||
/**
|
||||
* Plain java representation of a SHERPA System Metadata object, based on SHERPA API v2 responses.
|
||||
*
|
||||
* This data is included in both journal deposit policy and publisher searches and contains basic metadata
|
||||
* about the SHERPA record and API response, eg. creation and modification dates, internal IDs, permissions, etc.
|
||||
*
|
||||
* @see SHERPAResponse
|
||||
* @see SHERPAPublisherResponse
|
||||
*
|
||||
* @author Kim Shepherd
|
||||
*/
|
||||
public class SHERPASystemMetadata {
|
||||
|
||||
private int id;
|
||||
private String uri;
|
||||
private String dateCreated;
|
||||
private String dateModified;
|
||||
private boolean isPubliclyVisible = false;
|
||||
private boolean inDOAJ = false;
|
||||
|
||||
public SHERPASystemMetadata() {
|
||||
}
|
||||
|
||||
public int getId() {
|
||||
return id;
|
||||
}
|
||||
|
||||
public void setId(int id) {
|
||||
this.id = id;
|
||||
}
|
||||
|
||||
public String getUri() {
|
||||
return uri;
|
||||
}
|
||||
|
||||
public void setUri(String uri) {
|
||||
this.uri = uri;
|
||||
}
|
||||
|
||||
public String getDateCreated() {
|
||||
return dateCreated;
|
||||
}
|
||||
|
||||
public void setDateCreated(String dateCreated) {
|
||||
this.dateCreated = dateCreated;
|
||||
}
|
||||
|
||||
public String getDateModified() {
|
||||
return dateModified;
|
||||
}
|
||||
|
||||
public void setDateModified(String dateModified) {
|
||||
this.dateModified = dateModified;
|
||||
}
|
||||
|
||||
public boolean isPubliclyVisible() {
|
||||
return isPubliclyVisible;
|
||||
}
|
||||
|
||||
public void setPubliclyVisible(boolean publiclyVisible) {
|
||||
isPubliclyVisible = publiclyVisible;
|
||||
}
|
||||
|
||||
public boolean isInDOAJ() {
|
||||
return inDOAJ;
|
||||
}
|
||||
|
||||
public void setInDOAJ(boolean inDOAJ) {
|
||||
this.inDOAJ = inDOAJ;
|
||||
}
|
||||
}
|
@@ -0,0 +1,38 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.app.sherpa.v2;
|
||||
|
||||
/**
|
||||
* SHERPA v2 API query handling utility methods (static). Used by external data providers and SHERPA service.
|
||||
*
|
||||
* @author Kim Shepherd
|
||||
*/
|
||||
public final class SHERPAUtils {
|
||||
|
||||
// Private constructor (since this is a Utility class)
|
||||
private SHERPAUtils() {}
|
||||
|
||||
/**
|
||||
* Sanitise a SHERPA v2 API query for some special JSON characters to help with parsing at remote end
|
||||
* Strip all these characters: "'{};
|
||||
* The URI builder used in the provider and service classes will perform URL encoding. This string
|
||||
* is the raw query submitted to the provider or service.
|
||||
* @param query query string
|
||||
* @return safe query string
|
||||
*/
|
||||
public static String sanitiseQuery(String query) {
|
||||
String safe = query;
|
||||
try {
|
||||
safe = query.replaceAll("['{}\";]", "");
|
||||
} catch (NullPointerException e) {
|
||||
safe = "";
|
||||
}
|
||||
return safe;
|
||||
}
|
||||
|
||||
}
|
@@ -23,13 +23,15 @@ import java.util.List;
|
||||
|
||||
import org.apache.commons.cli.CommandLine;
|
||||
import org.apache.commons.cli.CommandLineParser;
|
||||
import org.apache.commons.cli.DefaultParser;
|
||||
import org.apache.commons.cli.HelpFormatter;
|
||||
import org.apache.commons.cli.Options;
|
||||
import org.apache.commons.cli.ParseException;
|
||||
import org.apache.commons.cli.PosixParser;
|
||||
import org.apache.commons.lang.ArrayUtils;
|
||||
import org.apache.commons.lang.StringUtils;
|
||||
import org.apache.log4j.Logger;
|
||||
import org.apache.commons.collections4.CollectionUtils;
|
||||
import org.apache.commons.io.FileUtils;
|
||||
import org.apache.commons.lang3.ArrayUtils;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.dspace.content.Collection;
|
||||
import org.dspace.content.Community;
|
||||
import org.dspace.content.Item;
|
||||
@@ -39,6 +41,11 @@ import org.dspace.content.service.CommunityService;
|
||||
import org.dspace.content.service.ItemService;
|
||||
import org.dspace.core.Context;
|
||||
import org.dspace.core.LogManager;
|
||||
import org.dspace.discovery.DiscoverQuery;
|
||||
import org.dspace.discovery.DiscoverResult;
|
||||
import org.dspace.discovery.SearchService;
|
||||
import org.dspace.discovery.SearchServiceException;
|
||||
import org.dspace.discovery.SearchUtils;
|
||||
import org.dspace.services.ConfigurationService;
|
||||
import org.dspace.services.factory.DSpaceServicesFactory;
|
||||
|
||||
@@ -52,7 +59,7 @@ public class GenerateSitemaps {
|
||||
/**
|
||||
* Logger
|
||||
*/
|
||||
private static Logger log = Logger.getLogger(GenerateSitemaps.class);
|
||||
private static final Logger log = org.apache.logging.log4j.LogManager.getLogger(GenerateSitemaps.class);
|
||||
|
||||
private static final CommunityService communityService = ContentServiceFactory.getInstance().getCommunityService();
|
||||
private static final CollectionService collectionService =
|
||||
@@ -60,6 +67,7 @@ public class GenerateSitemaps {
|
||||
private static final ItemService itemService = ContentServiceFactory.getInstance().getItemService();
|
||||
private static final ConfigurationService configurationService =
|
||||
DSpaceServicesFactory.getInstance().getConfigurationService();
|
||||
private static final SearchService searchService = SearchUtils.getSearchService();
|
||||
|
||||
/**
|
||||
* Default constructor
|
||||
@@ -69,7 +77,7 @@ public class GenerateSitemaps {
|
||||
public static void main(String[] args) throws Exception {
|
||||
final String usage = GenerateSitemaps.class.getCanonicalName();
|
||||
|
||||
CommandLineParser parser = new PosixParser();
|
||||
CommandLineParser parser = new DefaultParser();
|
||||
HelpFormatter hf = new HelpFormatter();
|
||||
|
||||
Options options = new Options();
|
||||
@@ -84,6 +92,9 @@ public class GenerateSitemaps {
|
||||
options
|
||||
.addOption("p", "ping", true,
|
||||
"ping specified search engine URL");
|
||||
options
|
||||
.addOption("d", "delete", false,
|
||||
"delete sitemaps dir and its contents");
|
||||
|
||||
CommandLine line = null;
|
||||
|
||||
@@ -105,10 +116,9 @@ public class GenerateSitemaps {
|
||||
}
|
||||
|
||||
/*
|
||||
* Sanity check -- if no sitemap generation or pinging to do, print
|
||||
* usage
|
||||
* Sanity check -- if no sitemap generation or pinging to do, or deletion, print usage
|
||||
*/
|
||||
if (line.getArgs().length != 0 || line.hasOption('b')
|
||||
if (line.getArgs().length != 0 || line.hasOption('d') || line.hasOption('b')
|
||||
&& line.hasOption('s') && !line.hasOption('g')
|
||||
&& !line.hasOption('m') && !line.hasOption('y')
|
||||
&& !line.hasOption('p')) {
|
||||
@@ -123,6 +133,10 @@ public class GenerateSitemaps {
|
||||
generateSitemaps(!line.hasOption('b'), !line.hasOption('s'));
|
||||
}
|
||||
|
||||
if (line.hasOption('d')) {
|
||||
deleteSitemaps();
|
||||
}
|
||||
|
||||
if (line.hasOption('a')) {
|
||||
pingConfiguredSearchEngines();
|
||||
}
|
||||
@@ -140,6 +154,29 @@ public class GenerateSitemaps {
|
||||
System.exit(0);
|
||||
}
|
||||
|
||||
/**
|
||||
* Runs generate-sitemaps without any params for the scheduler (task-scheduler.xml).
|
||||
*
|
||||
* @throws SQLException if a database error occurs.
|
||||
* @throws IOException if IO error occurs.
|
||||
*/
|
||||
public static void generateSitemapsScheduled() throws IOException, SQLException {
|
||||
generateSitemaps(true, true);
|
||||
}
|
||||
|
||||
/**
|
||||
* Delete the sitemaps directory and its contents if it exists
|
||||
* @throws IOException if IO error occurs
|
||||
*/
|
||||
public static void deleteSitemaps() throws IOException {
|
||||
File outputDir = new File(configurationService.getProperty("sitemap.dir"));
|
||||
if (!outputDir.exists() && !outputDir.isDirectory()) {
|
||||
log.error("Unable to delete sitemaps directory, doesn't exist or isn't a directort");
|
||||
} else {
|
||||
FileUtils.deleteDirectory(outputDir);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Generate sitemap.org protocol and/or basic HTML sitemaps.
|
||||
*
|
||||
@@ -150,14 +187,9 @@ public class GenerateSitemaps {
|
||||
* @throws IOException if IO error
|
||||
* if IO error occurs.
|
||||
*/
|
||||
public static void generateSitemaps(boolean makeHTMLMap,
|
||||
boolean makeSitemapOrg) throws SQLException, IOException {
|
||||
String sitemapStem = configurationService.getProperty("dspace.url")
|
||||
+ "/sitemap";
|
||||
String htmlMapStem = configurationService.getProperty("dspace.url")
|
||||
+ "/htmlmap";
|
||||
String handleURLStem = configurationService.getProperty("dspace.url")
|
||||
+ "/handle/";
|
||||
public static void generateSitemaps(boolean makeHTMLMap, boolean makeSitemapOrg) throws SQLException, IOException {
|
||||
String uiURLStem = configurationService.getProperty("dspace.ui.url");
|
||||
String sitemapStem = uiURLStem + "/sitemap";
|
||||
|
||||
File outputDir = new File(configurationService.getProperty("sitemap.dir"));
|
||||
if (!outputDir.exists() && !outputDir.mkdir()) {
|
||||
@@ -168,13 +200,11 @@ public class GenerateSitemaps {
|
||||
AbstractGenerator sitemapsOrg = null;
|
||||
|
||||
if (makeHTMLMap) {
|
||||
html = new HTMLSitemapGenerator(outputDir, htmlMapStem + "?map=",
|
||||
null);
|
||||
html = new HTMLSitemapGenerator(outputDir, sitemapStem, ".html");
|
||||
}
|
||||
|
||||
if (makeSitemapOrg) {
|
||||
sitemapsOrg = new SitemapsOrgGenerator(outputDir, sitemapStem
|
||||
+ "?map=", null);
|
||||
sitemapsOrg = new SitemapsOrgGenerator(outputDir, sitemapStem, ".xml");
|
||||
}
|
||||
|
||||
Context c = new Context(Context.Mode.READ_ONLY);
|
||||
@@ -182,7 +212,7 @@ public class GenerateSitemaps {
|
||||
List<Community> comms = communityService.findAll(c);
|
||||
|
||||
for (Community comm : comms) {
|
||||
String url = handleURLStem + comm.getHandle();
|
||||
String url = uiURLStem + "/communities/" + comm.getID();
|
||||
|
||||
if (makeHTMLMap) {
|
||||
html.addURL(url, null);
|
||||
@@ -197,7 +227,7 @@ public class GenerateSitemaps {
|
||||
List<Collection> colls = collectionService.findAll(c);
|
||||
|
||||
for (Collection coll : colls) {
|
||||
String url = handleURLStem + coll.getHandle();
|
||||
String url = uiURLStem + "/collections/" + coll.getID();
|
||||
|
||||
if (makeHTMLMap) {
|
||||
html.addURL(url, null);
|
||||
@@ -214,14 +244,37 @@ public class GenerateSitemaps {
|
||||
|
||||
while (allItems.hasNext()) {
|
||||
Item i = allItems.next();
|
||||
String url = handleURLStem + i.getHandle();
|
||||
Date lastMod = i.getLastModified();
|
||||
|
||||
if (makeHTMLMap) {
|
||||
html.addURL(url, lastMod);
|
||||
}
|
||||
if (makeSitemapOrg) {
|
||||
sitemapsOrg.addURL(url, lastMod);
|
||||
DiscoverQuery entityQuery = new DiscoverQuery();
|
||||
entityQuery.setQuery("search.uniqueid:\"Item-" + i.getID() + "\" and entityType:*");
|
||||
entityQuery.addSearchField("entityType");
|
||||
|
||||
try {
|
||||
DiscoverResult discoverResult = searchService.search(c, entityQuery);
|
||||
|
||||
String url;
|
||||
if (CollectionUtils.isNotEmpty(discoverResult.getIndexableObjects())
|
||||
&& CollectionUtils.isNotEmpty(discoverResult.getSearchDocument(
|
||||
discoverResult.getIndexableObjects().get(0)).get(0).getSearchFieldValues("entityType"))
|
||||
&& StringUtils.isNotBlank(discoverResult.getSearchDocument(
|
||||
discoverResult.getIndexableObjects().get(0)).get(0).getSearchFieldValues("entityType").get(0))
|
||||
) {
|
||||
url = uiURLStem + "/entities/" + StringUtils.lowerCase(discoverResult.getSearchDocument(
|
||||
discoverResult.getIndexableObjects().get(0))
|
||||
.get(0).getSearchFieldValues("entityType").get(0)) + "/" + i.getID();
|
||||
} else {
|
||||
url = uiURLStem + "/items/" + i.getID();
|
||||
}
|
||||
Date lastMod = i.getLastModified();
|
||||
|
||||
if (makeHTMLMap) {
|
||||
html.addURL(url, lastMod);
|
||||
}
|
||||
if (makeSitemapOrg) {
|
||||
sitemapsOrg.addURL(url, lastMod);
|
||||
}
|
||||
} catch (SearchServiceException e) {
|
||||
log.error("Failed getting entitytype through solr for item " + i.getID() + ": " + e.getMessage());
|
||||
}
|
||||
|
||||
c.uncacheEntity(i);
|
||||
@@ -293,7 +346,7 @@ public class GenerateSitemaps {
|
||||
.getProperty("http.proxy.port"));
|
||||
}
|
||||
|
||||
String sitemapURL = configurationService.getProperty("dspace.url")
|
||||
String sitemapURL = configurationService.getProperty("dspace.ui.url")
|
||||
+ "/sitemap";
|
||||
|
||||
URL url = new URL(engineURL + URLEncoder.encode(sitemapURL, "UTF-8"));
|
||||
|
@@ -59,7 +59,7 @@ public class SitemapsOrgGenerator extends AbstractGenerator {
|
||||
|
||||
@Override
|
||||
public String getFilename(int number) {
|
||||
return "sitemap" + number + ".xml.gz";
|
||||
return "sitemap" + number + ".xml";
|
||||
}
|
||||
|
||||
@Override
|
||||
@@ -100,12 +100,12 @@ public class SitemapsOrgGenerator extends AbstractGenerator {
|
||||
|
||||
@Override
|
||||
public boolean useCompression() {
|
||||
return true;
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getIndexFilename() {
|
||||
return "sitemap_index.xml.gz";
|
||||
return "sitemap_index.xml";
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@@ -16,10 +16,11 @@ import java.util.Properties;
|
||||
|
||||
import org.apache.commons.cli.CommandLine;
|
||||
import org.apache.commons.cli.CommandLineParser;
|
||||
import org.apache.commons.cli.DefaultParser;
|
||||
import org.apache.commons.cli.Options;
|
||||
import org.apache.commons.cli.PosixParser;
|
||||
import org.dspace.core.ConfigurationManager;
|
||||
import org.dspace.core.Context;
|
||||
import org.dspace.services.ConfigurationService;
|
||||
import org.dspace.services.factory.DSpaceServicesFactory;
|
||||
|
||||
/**
|
||||
* This class allows the running of the DSpace statistic tools
|
||||
@@ -56,7 +57,7 @@ public class CreateStatReport {
|
||||
/**
|
||||
* File suffix for log files
|
||||
*/
|
||||
private static String outputSuffix = ".dat";
|
||||
private static final String outputSuffix = ".dat";
|
||||
|
||||
/**
|
||||
* User context
|
||||
@@ -66,9 +67,6 @@ public class CreateStatReport {
|
||||
/**
|
||||
* the config file from which to configure the analyser
|
||||
*/
|
||||
private static String configFile = ConfigurationManager.getProperty("dspace.dir") +
|
||||
File.separator + "config" + File.separator +
|
||||
"dstat.cfg";
|
||||
|
||||
/**
|
||||
* Default constructor
|
||||
@@ -81,8 +79,12 @@ public class CreateStatReport {
|
||||
* Usage: java CreateStatReport -r <statistic to run>
|
||||
*/
|
||||
public static void main(String[] argv) throws Exception {
|
||||
ConfigurationService configurationService
|
||||
= DSpaceServicesFactory.getInstance().getConfigurationService();
|
||||
|
||||
// Open the statistics config file
|
||||
final String configFile = configurationService.getProperty("dspace.dir")
|
||||
+ File.separator + "config" + File.separator + "dstat.cfg";
|
||||
FileInputStream fis = new java.io.FileInputStream(new File(configFile));
|
||||
Properties config = new Properties();
|
||||
config.load(fis);
|
||||
@@ -108,11 +110,11 @@ public class CreateStatReport {
|
||||
context.turnOffAuthorisationSystem();
|
||||
|
||||
//get paths to directories
|
||||
outputLogDirectory = ConfigurationManager.getProperty("log.report.dir") + File.separator;
|
||||
outputReportDirectory = ConfigurationManager.getProperty("report.dir") + File.separator;
|
||||
outputLogDirectory = configurationService.getProperty("log.report.dir") + File.separator;
|
||||
outputReportDirectory = configurationService.getProperty("report.dir") + File.separator;
|
||||
|
||||
//read in command line variable to determine which statistic to run
|
||||
CommandLineParser parser = new PosixParser();
|
||||
CommandLineParser parser = new DefaultParser();
|
||||
Options options = new Options();
|
||||
options.addOption("r", "report", true, "report");
|
||||
CommandLine line = parser.parse(options, argv);
|
||||
@@ -405,6 +407,5 @@ public class CreateStatReport {
|
||||
System.out.println(
|
||||
"Available: <stat-initial> <stat-general> <stat-monthly> <stat-report-initial> <stat-report-general> " +
|
||||
"<stat-report-monthly>");
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
@@ -20,7 +20,8 @@ import java.util.List;
|
||||
import java.util.regex.Matcher;
|
||||
import java.util.regex.Pattern;
|
||||
|
||||
import org.dspace.core.ConfigurationManager;
|
||||
import org.dspace.services.ConfigurationService;
|
||||
import org.dspace.services.factory.DSpaceServicesFactory;
|
||||
|
||||
/**
|
||||
* This class provides HTML reports for the ReportGenerator class
|
||||
@@ -34,7 +35,7 @@ public class HTMLReport implements Report {
|
||||
/**
|
||||
* a list of the statistic blocks being managed by this class
|
||||
*/
|
||||
private List<Statistics> blocks = new ArrayList<Statistics>();
|
||||
private final List<Statistics> blocks = new ArrayList<>();
|
||||
|
||||
/**
|
||||
* the title for the page
|
||||
@@ -59,16 +60,23 @@ public class HTMLReport implements Report {
|
||||
/**
|
||||
* the output file to which to write aggregation data
|
||||
*/
|
||||
private String output = ConfigurationManager.getProperty("dspace.dir") +
|
||||
File.separator + "log" + File.separator + "report";
|
||||
private String output;
|
||||
|
||||
/**
|
||||
* constructor for HTML reporting
|
||||
* Output file path is set to {@code ${dspace.dir}/log/report}.
|
||||
*/
|
||||
public HTMLReport() {
|
||||
// empty constructor
|
||||
ConfigurationService configurationService
|
||||
= DSpaceServicesFactory.getInstance().getConfigurationService();
|
||||
output = configurationService.getProperty("dspace.dir")
|
||||
+ File.separator + "log" + File.separator + "report";
|
||||
}
|
||||
|
||||
/**
|
||||
* Set a non-default output file path.
|
||||
*
|
||||
* @param newOutput new path to the report.
|
||||
*/
|
||||
public void setOutput(String newOutput) {
|
||||
if (newOutput != null) {
|
||||
output = newOutput;
|
||||
@@ -82,7 +90,7 @@ public class HTMLReport implements Report {
|
||||
*/
|
||||
@Override
|
||||
public String render() {
|
||||
StringBuffer frag = new StringBuffer();
|
||||
StringBuilder frag = new StringBuilder();
|
||||
|
||||
// get the page headings
|
||||
frag.append(header(pageTitle));
|
||||
@@ -140,7 +148,7 @@ public class HTMLReport implements Report {
|
||||
* @return an HTML string providing internal page navigation
|
||||
*/
|
||||
public String navigation() {
|
||||
StringBuffer frag = new StringBuffer();
|
||||
StringBuilder frag = new StringBuilder();
|
||||
|
||||
frag.append("<div class=\"reportNavigation\">");
|
||||
frag.append("<a href=\"#general_overview\">General Overview</a>");
|
||||
@@ -173,7 +181,6 @@ public class HTMLReport implements Report {
|
||||
@Override
|
||||
public void addBlock(Statistics stat) {
|
||||
blocks.add(stat);
|
||||
return;
|
||||
}
|
||||
|
||||
|
||||
@@ -207,7 +214,7 @@ public class HTMLReport implements Report {
|
||||
*/
|
||||
@Override
|
||||
public String dateRange() {
|
||||
StringBuffer frag = new StringBuffer();
|
||||
StringBuilder frag = new StringBuilder();
|
||||
DateFormat df = DateFormat.getDateInstance();
|
||||
|
||||
frag.append("<div class=\"reportDate\">");
|
||||
@@ -255,7 +262,6 @@ public class HTMLReport implements Report {
|
||||
if (pageTitle == null) {
|
||||
pageTitle = mainTitle;
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
|
||||
@@ -280,7 +286,7 @@ public class HTMLReport implements Report {
|
||||
// FIXME: this need to be figured out to integrate nicely into the
|
||||
// whole JSTL thing, but for the moment it's just going to deliver
|
||||
// some styles
|
||||
StringBuffer frag = new StringBuffer();
|
||||
StringBuilder frag = new StringBuilder();
|
||||
|
||||
frag.append("<style type=\"text/css\">\n");
|
||||
frag.append("body { font-family: Arial, Helvetica, sans-serif }");
|
||||
@@ -334,7 +340,7 @@ public class HTMLReport implements Report {
|
||||
*/
|
||||
@Override
|
||||
public String statBlock(Statistics content) {
|
||||
StringBuffer frag = new StringBuffer();
|
||||
StringBuilder frag = new StringBuilder();
|
||||
Stat[] stats = content.getStats();
|
||||
|
||||
// start the table
|
||||
@@ -345,14 +351,14 @@ public class HTMLReport implements Report {
|
||||
frag.append("\t<tr>\n");
|
||||
frag.append("\t\t<th>\n");
|
||||
if (content.getStatName() != null) {
|
||||
frag.append("\t\t\t" + content.getStatName() + "\n");
|
||||
frag.append("\t\t\t").append(content.getStatName()).append("\n");
|
||||
} else {
|
||||
frag.append("\t\t\t \n");
|
||||
}
|
||||
frag.append("\t\t</th>\n");
|
||||
frag.append("\t\t<th>\n");
|
||||
if (content.getResultName() != null) {
|
||||
frag.append("\t\t\t" + content.getResultName() + "\n");
|
||||
frag.append("\t\t\t").append(content.getResultName()).append("\n");
|
||||
} else {
|
||||
frag.append("\t\t\t \n");
|
||||
}
|
||||
@@ -370,10 +376,10 @@ public class HTMLReport implements Report {
|
||||
style = "reportEvenRow";
|
||||
}
|
||||
|
||||
frag.append("\t<tr class=\"" + style + "\">\n\t\t<td>\n");
|
||||
frag.append("\t<tr class=\"").append(style).append("\">\n\t\t<td>\n");
|
||||
frag.append("\t\t\t");
|
||||
if (stats[i].getReference() != null) {
|
||||
frag.append("<a href=\"" + stats[i].getReference() + "\" ");
|
||||
frag.append("<a href=\"").append(stats[i].getReference()).append("\" ");
|
||||
frag.append("target=\"_blank\">");
|
||||
}
|
||||
frag.append(this.clean(stats[i].getKey()));
|
||||
@@ -405,9 +411,9 @@ public class HTMLReport implements Report {
|
||||
@Override
|
||||
public String floorInfo(int floor) {
|
||||
if (floor > 0) {
|
||||
StringBuffer frag = new StringBuffer();
|
||||
StringBuilder frag = new StringBuilder();
|
||||
frag.append("<div class=\"reportFloor\">");
|
||||
frag.append("(more than " + ReportTools.numberFormat(floor) + " times)");
|
||||
frag.append("(more than ").append(ReportTools.numberFormat(floor)).append(" times)");
|
||||
frag.append("</div>\n");
|
||||
return frag.toString();
|
||||
} else {
|
||||
@@ -419,12 +425,12 @@ public class HTMLReport implements Report {
|
||||
* output the explanation of the report block in HTML format
|
||||
*
|
||||
* @param explanation some text explaining the coming report block
|
||||
* @return a string containing an explanaton HTML formatted
|
||||
* @return a string containing an explanation HTML formatted
|
||||
*/
|
||||
@Override
|
||||
public String blockExplanation(String explanation) {
|
||||
if (explanation != null) {
|
||||
StringBuffer frag = new StringBuffer();
|
||||
StringBuilder frag = new StringBuilder();
|
||||
frag.append("<div class=\"reportExplanation\">");
|
||||
frag.append(explanation);
|
||||
frag.append("</div>\n\n");
|
||||
|
@@ -30,12 +30,14 @@ import java.util.regex.Matcher;
|
||||
import java.util.regex.Pattern;
|
||||
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.dspace.core.ConfigurationManager;
|
||||
import org.dspace.core.Context;
|
||||
import org.dspace.core.LogManager;
|
||||
import org.dspace.core.Utils;
|
||||
import org.dspace.discovery.DiscoverQuery;
|
||||
import org.dspace.discovery.SearchServiceException;
|
||||
import org.dspace.discovery.SearchUtils;
|
||||
import org.dspace.services.ConfigurationService;
|
||||
import org.dspace.services.factory.DSpaceServicesFactory;
|
||||
|
||||
/**
|
||||
* This class performs all the actual analysis of a given set of DSpace log
|
||||
@@ -267,7 +269,7 @@ public class LogAnalyser {
|
||||
/**
|
||||
* the log directory to be analysed
|
||||
*/
|
||||
private static String logDir = ConfigurationManager.getProperty("log.report.dir");
|
||||
private static String logDir;
|
||||
|
||||
/**
|
||||
* the regex to describe the file name format
|
||||
@@ -275,16 +277,14 @@ public class LogAnalyser {
|
||||
private static String fileTemplate = "dspace\\.log.*";
|
||||
|
||||
/**
|
||||
* the config file from which to configure the analyser
|
||||
* the configuration file from which to configure the analyser
|
||||
*/
|
||||
private static String configFile = ConfigurationManager.getProperty("dspace.dir") +
|
||||
File.separator + "config" + File.separator +
|
||||
"dstat.cfg";
|
||||
private static String configFile;
|
||||
|
||||
/**
|
||||
* the output file to which to write aggregation data
|
||||
*/
|
||||
private static String outFile = ConfigurationManager.getProperty("log.report.dir") + File.separator + "dstat.dat";
|
||||
private static String outFile;
|
||||
|
||||
/**
|
||||
* the starting date of the report
|
||||
@@ -581,9 +581,11 @@ public class LogAnalyser {
|
||||
}
|
||||
|
||||
// now do the host name and url lookup
|
||||
hostName = ConfigurationManager.getProperty("dspace.hostname").trim();
|
||||
name = ConfigurationManager.getProperty("dspace.name").trim();
|
||||
url = ConfigurationManager.getProperty("dspace.url").trim();
|
||||
ConfigurationService configurationService
|
||||
= DSpaceServicesFactory.getInstance().getConfigurationService();
|
||||
hostName = Utils.getHostName(configurationService.getProperty("dspace.ui.url"));
|
||||
name = configurationService.getProperty("dspace.name").trim();
|
||||
url = configurationService.getProperty("dspace.ui.url").trim();
|
||||
if ((url != null) && (!url.endsWith("/"))) {
|
||||
url = url + "/";
|
||||
}
|
||||
@@ -621,8 +623,13 @@ public class LogAnalyser {
|
||||
String myConfigFile, String myOutFile,
|
||||
Date myStartDate, Date myEndDate,
|
||||
boolean myLookUp) {
|
||||
ConfigurationService configurationService
|
||||
= DSpaceServicesFactory.getInstance().getConfigurationService();
|
||||
|
||||
if (myLogDir != null) {
|
||||
logDir = myLogDir;
|
||||
} else {
|
||||
logDir = configurationService.getProperty("log.report.dir");
|
||||
}
|
||||
|
||||
if (myFileTemplate != null) {
|
||||
@@ -631,6 +638,9 @@ public class LogAnalyser {
|
||||
|
||||
if (myConfigFile != null) {
|
||||
configFile = myConfigFile;
|
||||
} else {
|
||||
configFile = configurationService.getProperty("dspace.dir")
|
||||
+ File.separator + "config" + File.separator + "dstat.cfg";
|
||||
}
|
||||
|
||||
if (myStartDate != null) {
|
||||
@@ -643,9 +653,9 @@ public class LogAnalyser {
|
||||
|
||||
if (myOutFile != null) {
|
||||
outFile = myOutFile;
|
||||
} else {
|
||||
outFile = configurationService.getProperty("log.report.dir") + File.separator + "dstat.dat";
|
||||
}
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
|
||||
@@ -656,7 +666,7 @@ public class LogAnalyser {
|
||||
*/
|
||||
public static String createOutput() {
|
||||
// start a string buffer to hold the final output
|
||||
StringBuffer summary = new StringBuffer();
|
||||
StringBuilder summary = new StringBuilder();
|
||||
|
||||
// define an iterator that will be used to go over the hashmap keys
|
||||
Iterator<String> keys = null;
|
||||
@@ -819,7 +829,7 @@ public class LogAnalyser {
|
||||
*/
|
||||
public static void setRegex(String fileTemplate) {
|
||||
// build the exclude characters regular expression
|
||||
StringBuffer charRegEx = new StringBuffer();
|
||||
StringBuilder charRegEx = new StringBuilder();
|
||||
charRegEx.append("[");
|
||||
for (int i = 0; i < excludeChars.size(); i++) {
|
||||
charRegEx.append("\\").append(excludeChars.get(i));
|
||||
@@ -863,7 +873,7 @@ public class LogAnalyser {
|
||||
logRegex = Pattern.compile(fileTemplate);
|
||||
|
||||
// set up the pattern for matching any of the query types
|
||||
StringBuffer typeRXString = new StringBuffer();
|
||||
StringBuilder typeRXString = new StringBuilder();
|
||||
typeRXString.append("(");
|
||||
for (int i = 0; i < excludeTypes.size(); i++) {
|
||||
if (i > 0) {
|
||||
@@ -875,7 +885,7 @@ public class LogAnalyser {
|
||||
typeRX = Pattern.compile(typeRXString.toString());
|
||||
|
||||
// set up the pattern for matching any of the words to exclude
|
||||
StringBuffer wordRXString = new StringBuffer();
|
||||
StringBuilder wordRXString = new StringBuilder();
|
||||
wordRXString.append("(");
|
||||
for (int i = 0; i < excludeWords.size(); i++) {
|
||||
if (i > 0) {
|
||||
@@ -889,8 +899,6 @@ public class LogAnalyser {
|
||||
}
|
||||
wordRXString.append(")");
|
||||
wordRX = Pattern.compile(wordRXString.toString());
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -919,18 +927,18 @@ public class LogAnalyser {
|
||||
*/
|
||||
public static void readConfig(String configFile) throws IOException {
|
||||
//instantiate aggregators
|
||||
actionAggregator = new HashMap<String, Integer>();
|
||||
searchAggregator = new HashMap<String, Integer>();
|
||||
userAggregator = new HashMap<String, Integer>();
|
||||
itemAggregator = new HashMap<String, Integer>();
|
||||
archiveStats = new HashMap<String, Integer>();
|
||||
actionAggregator = new HashMap<>();
|
||||
searchAggregator = new HashMap<>();
|
||||
userAggregator = new HashMap<>();
|
||||
itemAggregator = new HashMap<>();
|
||||
archiveStats = new HashMap<>();
|
||||
|
||||
//instantiate lists
|
||||
generalSummary = new ArrayList<String>();
|
||||
excludeWords = new ArrayList<String>();
|
||||
excludeTypes = new ArrayList<String>();
|
||||
excludeChars = new ArrayList<String>();
|
||||
itemTypes = new ArrayList<String>();
|
||||
generalSummary = new ArrayList<>();
|
||||
excludeWords = new ArrayList<>();
|
||||
excludeTypes = new ArrayList<>();
|
||||
excludeChars = new ArrayList<>();
|
||||
itemTypes = new ArrayList<>();
|
||||
|
||||
// prepare our standard file readers and buffered readers
|
||||
FileReader fr = null;
|
||||
@@ -1001,8 +1009,6 @@ public class LogAnalyser {
|
||||
// close the inputs
|
||||
br.close();
|
||||
fr.close();
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
/**
|
||||
|
@@ -28,14 +28,15 @@ import java.util.regex.Matcher;
|
||||
import java.util.regex.Pattern;
|
||||
|
||||
import org.dspace.content.Item;
|
||||
import org.dspace.content.MetadataSchema;
|
||||
import org.dspace.content.MetadataSchemaEnum;
|
||||
import org.dspace.content.MetadataValue;
|
||||
import org.dspace.content.factory.ContentServiceFactory;
|
||||
import org.dspace.content.service.ItemService;
|
||||
import org.dspace.core.ConfigurationManager;
|
||||
import org.dspace.core.Context;
|
||||
import org.dspace.handle.factory.HandleServiceFactory;
|
||||
import org.dspace.handle.service.HandleService;
|
||||
import org.dspace.services.ConfigurationService;
|
||||
import org.dspace.services.factory.DSpaceServicesFactory;
|
||||
|
||||
/**
|
||||
* This class performs the action of coordinating a usage report being
|
||||
@@ -161,7 +162,7 @@ public class ReportGenerator {
|
||||
/**
|
||||
* pattern that matches an unqualified aggregator property
|
||||
*/
|
||||
private static Pattern real = Pattern.compile("^(.+)=(.+)");
|
||||
private static final Pattern real = Pattern.compile("^(.+)=(.+)");
|
||||
|
||||
//////////////////////////
|
||||
// Miscellaneous variables
|
||||
@@ -189,11 +190,12 @@ public class ReportGenerator {
|
||||
/**
|
||||
* the log file action to human readable action map
|
||||
*/
|
||||
private static String map = ConfigurationManager.getProperty("dspace.dir") +
|
||||
File.separator + "config" + File.separator + "dstat.map";
|
||||
private static String map;
|
||||
|
||||
private static final ItemService itemService = ContentServiceFactory.getInstance().getItemService();
|
||||
private static final HandleService handleService = HandleServiceFactory.getInstance().getHandleService();
|
||||
private static final ConfigurationService configurationService
|
||||
= DSpaceServicesFactory.getInstance().getConfigurationService();
|
||||
|
||||
/**
|
||||
* Default constructor
|
||||
@@ -268,6 +270,9 @@ public class ReportGenerator {
|
||||
throws Exception, SQLException {
|
||||
if (myMap != null) {
|
||||
map = myMap;
|
||||
} else {
|
||||
map = configurationService.getProperty("dspace.dir")
|
||||
+ File.separator + "config" + File.separator + "dstat.map";
|
||||
}
|
||||
|
||||
// create the relevant report type
|
||||
@@ -302,15 +307,15 @@ public class ReportGenerator {
|
||||
startTime = new GregorianCalendar();
|
||||
|
||||
/** instantiate aggregators */
|
||||
actionAggregator = new HashMap<String, String>();
|
||||
searchAggregator = new HashMap<String, String>();
|
||||
userAggregator = new HashMap<String, String>();
|
||||
itemAggregator = new HashMap<String, String>();
|
||||
archiveStats = new HashMap<String, String>();
|
||||
actionMap = new HashMap<String, String>();
|
||||
actionAggregator = new HashMap<>();
|
||||
searchAggregator = new HashMap<>();
|
||||
userAggregator = new HashMap<>();
|
||||
itemAggregator = new HashMap<>();
|
||||
archiveStats = new HashMap<>();
|
||||
actionMap = new HashMap<>();
|
||||
|
||||
/** instantite lists */
|
||||
generalSummary = new ArrayList<String>();
|
||||
/** instantiate lists */
|
||||
generalSummary = new ArrayList<>();
|
||||
|
||||
// set the parameters for this analysis
|
||||
setParameters(myInput);
|
||||
@@ -486,8 +491,6 @@ public class ReportGenerator {
|
||||
report.addBlock(process);
|
||||
|
||||
report.render();
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
|
||||
@@ -612,8 +615,6 @@ public class ReportGenerator {
|
||||
if (myInput != null) {
|
||||
input = myInput;
|
||||
}
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
|
||||
@@ -763,13 +764,14 @@ public class ReportGenerator {
|
||||
// build the referece
|
||||
// FIXME: here we have blurred the line between content and presentation
|
||||
// and it should probably be un-blurred
|
||||
List<MetadataValue> title = itemService.getMetadata(item, MetadataSchema.DC_SCHEMA, "title", null, Item.ANY);
|
||||
List<MetadataValue> title = itemService.getMetadata(item, MetadataSchemaEnum.DC.getName(),
|
||||
"title", null, Item.ANY);
|
||||
List<MetadataValue> author = itemService
|
||||
.getMetadata(item, MetadataSchema.DC_SCHEMA, "contributor", "author", Item.ANY);
|
||||
.getMetadata(item, MetadataSchemaEnum.DC.getName(), "contributor", "author", Item.ANY);
|
||||
|
||||
StringBuffer authors = new StringBuffer();
|
||||
StringBuilder authors = new StringBuilder();
|
||||
if (author.size() > 0) {
|
||||
authors.append("(" + author.get(0).getValue());
|
||||
authors.append("(").append(author.get(0).getValue());
|
||||
}
|
||||
if (author.size() > 1) {
|
||||
authors.append(" et al");
|
||||
|
@@ -15,7 +15,7 @@ package org.dspace.app.statistics;
|
||||
*
|
||||
* @author Richard Jones
|
||||
*/
|
||||
public class Stat implements Comparable {
|
||||
public class Stat implements Comparable<Stat> {
|
||||
// FIXME: this class is functional but a bit messy, and should be neatened
|
||||
// up and completed
|
||||
|
||||
@@ -132,17 +132,17 @@ public class Stat implements Comparable {
|
||||
|
||||
|
||||
/**
|
||||
* compare the current object to the given object returning -1 if o is less
|
||||
* than the current object, 0 if they are the same, and +1 if o is greater
|
||||
* than the current object.
|
||||
* Compare the current Stat to the given Stat returning -1 if o is less
|
||||
* than the current Stat, 0 if they are the same, and +1 if o is greater
|
||||
* than the current Stat.
|
||||
*
|
||||
* @param o the object to compare to the current one
|
||||
* @param stat the Stat object to compare to the current one
|
||||
* @return +1, 0, -1 if o is less than, equal to, or greater than the
|
||||
* current object value.
|
||||
*/
|
||||
@Override
|
||||
public int compareTo(Object o) {
|
||||
int objectValue = ((Stat) o).getValue();
|
||||
public int compareTo(Stat stat) {
|
||||
int objectValue = stat.getValue();
|
||||
|
||||
if (objectValue < this.getValue()) {
|
||||
return -1;
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user