Merge branch 'main' into DS-2670

This commit is contained in:
Mark H. Wood
2020-08-04 15:17:11 -04:00
2841 changed files with 239240 additions and 123372 deletions

11
.dockerignore Normal file
View File

@@ -0,0 +1,11 @@
.git/
.idea/
.settings/
*/target/
dspace/modules/*/target/
Dockerfile.*
dspace/src/main/docker/dspace-postgres-pgcrypto
dspace/src/main/docker/dspace-postgres-pgcrypto-curl
dspace/src/main/docker/solr
dspace/src/main/docker/README.md
dspace/src/main/docker-compose/

6
.gitattributes vendored
View File

@@ -1,6 +1,12 @@
# Auto detect text files and perform LF normalization # Auto detect text files and perform LF normalization
* text=auto * text=auto
# Ensure Unix files always keep Unix line endings
*.sh text eol=lf
# Ensure Windows files always keep Windows line endings
*.bat text eol=crlf
# Standard to msysgit # Standard to msysgit
*.doc diff=astextplain *.doc diff=astextplain
*.DOC diff=astextplain *.DOC diff=astextplain

22
.github/ISSUE_TEMPLATE/bug_report.md vendored Normal file
View File

@@ -0,0 +1,22 @@
---
name: Bug report
about: Create a report to help us improve
title: ''
labels: bug, needs triage
assignees: ''
---
**Describe the bug**
A clear and concise description of what the bug is. Include the version(s) of DSpace where you've seen this problem. Link to examples if they are public.
**To Reproduce**
Steps to reproduce the behavior:
1. Do this
2. Then this...
**Expected behavior**
A clear and concise description of what you expected to happen.
**Related work**
Link to any related tickets or PRs here.

View File

@@ -0,0 +1,20 @@
---
name: Feature request
about: Suggest a new feature for this project
title: ''
labels: new feature, needs triage
assignees: ''
---
**Is your feature request related to a problem? Please describe.**
A clear and concise description of what the problem is. Ex. I'm always frustrated when [...]
**Describe the solution you'd like**
A clear and concise description of what you want to happen.
**Describe alternatives or workarounds you've considered**
A clear and concise description of any alternative solutions or features you've considered.
**Additional context**
Add any other context or screenshots about the feature request here.

View File

@@ -0,0 +1,26 @@
# This workflow runs whenever a new pull request is created
# TEMPORARILY DISABLED. Unfortunately this doesn't work for PRs created from forked repositories (which is how we tend to create PRs).
# There is no known workaround yet. See https://github.community/t/how-to-use-github-token-for-prs-from-forks/16818
name: Pull Request opened
# Only run for newly opened PRs against the "main" branch
on:
pull_request:
types: [opened]
branches:
- main
jobs:
automation:
runs-on: ubuntu-latest
steps:
# Assign the PR to whomever created it. This is useful for visualizing assignments on project boards
# See https://github.com/marketplace/actions/pull-request-assigner
- name: Assign PR to creator
uses: thomaseizinger/assign-pr-creator-action@v1.0.0
# Note, this authentication token is created automatically
# See: https://docs.github.com/en/actions/configuring-and-managing-workflows/authenticating-with-the-github_token
with:
repo-token: ${{ secrets.GITHUB_TOKEN }}
# Ignore errors. It is possible the PR was created by someone who cannot be assigned
continue-on-error: true

26
.github/pull_request_template.md vendored Normal file
View File

@@ -0,0 +1,26 @@
## References
_Add references/links to any related issues or PRs. These may include:_
* Related to [REST Contract](https://github.com/DSpace/Rest7Contract) or an open REST Contract PR, if any
* Fixes [GitHub issue](https://github.com/DSpace/DSpace/issues), if any
## Description
Short summary of changes (1-2 sentences).
## Instructions for Reviewers
Please add a more detailed description of the changes made by your PR. At a minimum, providing a bulleted list of changes in your PR is helpful to reviewers.
List of changes in this PR:
* First, ...
* Second, ...
**Include guidance for how to test or review your PR.** This may include: steps to reproduce a bug, screenshots or description of a new feature, or reasons behind specific changes.
## Checklist
_This checklist provides a reminder of what we are going to look for when reviewing your PR. You need not complete this checklist prior to creating your PR (draft PRs are always welcome). If you are unsure about an item in the checklist, don't hesitate to ask. We're here to help!_
- [ ] My PR is small in size (e.g. less than 1,000 lines of code, not including comments & integration tests). Exceptions may be made if previously agreed upon.
- [ ] My PR passes Checkstyle validation based on the [Code Style Guide](https://wiki.lyrasis.org/display/DSPACE/Code+Style+Guide).
- [ ] My PR includes Javadoc for _all new (or modified) public methods and classes_. It also includes Javadoc for large or complex private methods.
- [ ] My PR passes all tests and includes new/updated Unit or Integration Tests based on the [Code Testing Guide](https://wiki.lyrasis.org/display/DSPACE/Code+Testing+Guide).
- [ ] If my PR includes new, third-party dependencies (in any `pom.xml`), I've made sure their licenses align with the [DSpace BSD License](https://github.com/DSpace/DSpace/blob/main/LICENSE) based on the [Licensing of Contributions](https://wiki.lyrasis.org/display/DSPACE/Code+Contribution+Guidelines#CodeContributionGuidelines-LicensingofContributions) documentation.
- [ ] If my PR modifies the REST API, I've linked to the REST Contract page (or open PR) related to this change.

29
.github/workflows/issue_opened.yml vendored Normal file
View File

@@ -0,0 +1,29 @@
# This workflow runs whenever a new issue is created
name: Issue opened
on:
issues:
types: [opened]
jobs:
automation:
runs-on: ubuntu-latest
steps:
# Add the new issue to a project board, if it needs triage
# See https://github.com/marketplace/actions/create-project-card-action
- name: Add issue to project board
# Only add to project board if issue is flagged as "needs triage" or has no labels
# NOTE: By default we flag new issues as "needs triage" in our issue template
if: (contains(github.event.issue.labels.*.name, 'needs triage') || join(github.event.issue.labels.*.name) == '')
uses: technote-space/create-project-card-action@v1
# Note, the authentication token below is an ORG level Secret.
# It must be created/recreated manually via a personal access token with "public_repo" and "admin:org" permissions
# See: https://docs.github.com/en/actions/configuring-and-managing-workflows/authenticating-with-the-github_token#permissions-for-the-github_token
# This is necessary because the "DSpace Backlog" project is an org level project (i.e. not repo specific)
with:
GITHUB_TOKEN: ${{ secrets.ORG_PROJECT_TOKEN }}
PROJECT: DSpace Backlog
COLUMN: Triage
CHECK_ORG_PROJECT: true
# Ignore errors.
continue-on-error: true

5
.gitignore vendored
View File

@@ -9,6 +9,7 @@ tags
/bin/ /bin/
.project .project
.classpath .classpath
.checkstyle
## Ignore project files created by IntelliJ IDEA ## Ignore project files created by IntelliJ IDEA
*.iml *.iml
@@ -25,7 +26,6 @@ dist/
nbdist/ nbdist/
nbactions.xml nbactions.xml
nb-configuration.xml nb-configuration.xml
META-INF/
## Ignore all *.properties file in root folder, EXCEPT build.properties (the default) ## Ignore all *.properties file in root folder, EXCEPT build.properties (the default)
## KEPT FOR BACKWARDS COMPATIBILITY WITH 5.x (build.properties is now replaced with local.cfg) ## KEPT FOR BACKWARDS COMPATIBILITY WITH 5.x (build.properties is now replaced with local.cfg)
@@ -39,3 +39,6 @@ META-INF/
##Mac noise ##Mac noise
.DS_Store .DS_Store
##Ignore JRebel project configuration
rebel.xml

9
.lgtm.yml Normal file
View File

@@ -0,0 +1,9 @@
# LGTM Settings (https://lgtm.com/)
# For reference, see https://lgtm.com/help/lgtm/lgtm.yml-configuration-file
# or template at https://lgtm.com/static/downloads/lgtm.template.yml
extraction:
java:
index:
# Specify the Java version required to build the project
java_version: 11

View File

@@ -1,14 +1,15 @@
language: java language: java
sudo: false sudo: false
dist: trusty
env: env:
# Give Maven 1GB of memory to work with # Give Maven 1GB of memory to work with
- MAVEN_OPTS=-Xmx1024M - MAVEN_OPTS=-Xmx1024M
jdk: jdk:
# DS-3384 Oracle JDK 8 has DocLint enabled by default. # DS-3384 Oracle JDK has DocLint enabled by default.
# Let's use this to catch any newly introduced DocLint issues. # Let's use this to catch any newly introduced DocLint issues.
- oraclejdk8 - oraclejdk11
## Should we run into any problems with oraclejdk8 on Travis, we may try the following workaround. ## Should we run into any problems with oraclejdk8 on Travis, we may try the following workaround.
## https://docs.travis-ci.com/user/languages/java#Testing-Against-Multiple-JDKs ## https://docs.travis-ci.com/user/languages/java#Testing-Against-Multiple-JDKs
@@ -18,7 +19,6 @@ jdk:
# packages: # packages:
# - oracle-java8-installer # - oracle-java8-installer
# Install prerequisites for building Mirage2 more rapidly
before_install: before_install:
# Remove outdated settings.xml from Travis builds. Workaround for https://github.com/travis-ci/travis-ci/issues/4629 # Remove outdated settings.xml from Travis builds. Workaround for https://github.com/travis-ci/travis-ci/issues/4629
- rm ~/.m2/settings.xml - rm ~/.m2/settings.xml
@@ -26,19 +26,21 @@ before_install:
# Skip install stage, as we'll do it below # Skip install stage, as we'll do it below
install: "echo 'Skipping install stage, dependencies will be downloaded during build and test stages.'" install: "echo 'Skipping install stage, dependencies will be downloaded during build and test stages.'"
# Two stage Build and Test # Build DSpace and run both Unit and Integration Tests
# 1. Install & Unit Test APIs
# 2. Assemble DSpace
script: script:
# 1. [Install & Unit Test] Check source code licenses and run source code Unit Tests # Summary of flags used (below):
# license:check => Validate all source code license headers # license:check => Validate all source code license headers
# -Dmaven.test.skip=false => Enable DSpace Unit Tests # -DskipTests=false => Enable DSpace Unit Tests
# -DskipITs=false => Enable DSpace Integration Tests # -DskipITs=false => Enable DSpace Integration Tests
# -P !assembly => Skip normal assembly (as it can be memory intensive) # -Pdspace-rest => Enable optional dspace-rest module as part of build
# -B => Maven batch/non-interactive mode (recommended for CI) # -P !assembly => Skip assembly of "dspace-installer" directory (as it can be memory intensive)
# -V => Display Maven version info before build # -B => Maven batch/non-interactive mode (recommended for CI)
# -Dsurefire.rerunFailingTestsCount=2 => try again for flakey tests, and keep track of/report on number of retries # -V => Display Maven version info before build
- "mvn clean install license:check -Dmaven.test.skip=false -DskipITs=false -P !assembly -B -V -Dsurefire.rerunFailingTestsCount=2" # -Dsurefire.rerunFailingTestsCount=2 => try again for flakey tests, and keep track of/report on number of retries
# 2. [Assemble DSpace] Ensure overlay & assembly process works (from [src]/dspace/) - "mvn clean install license:check -DskipTests=false -DskipITs=false -Pdspace-rest -P !assembly -B -V -Dsurefire.rerunFailingTestsCount=2"
# -P !assembly => SKIP the actual building of [src]/dspace/dspace-installer (as it can be memory intensive)
- "cd dspace && mvn package -P !assembly -B -V -Dsurefire.rerunFailingTestsCount=2" # After a successful build and test (see 'script'), send code coverage reports to coveralls.io
# These code coverage reports are generated by jacoco-maven-plugin (during test process above).
after_success:
# Run "verify", enabling the "coveralls" profile. This sends our reports to coveralls.io (see coveralls-maven-plugin)
- "cd dspace && mvn verify -P coveralls"

63
Dockerfile Normal file
View File

@@ -0,0 +1,63 @@
# This image will be published as dspace/dspace
# See https://github.com/DSpace/DSpace/tree/main/dspace/src/main/docker for usage details
#
# This version is JDK11 compatible
# - tomcat:8-jdk11
# - ANT 1.10.7
# - maven:3-jdk-11 (see dspace-dependencies)
# - note: default tag for branch: dspace/dspace: dspace/dspace:dspace-7_x
# Step 1 - Run Maven Build
FROM dspace/dspace-dependencies:dspace-7_x as build
ARG TARGET_DIR=dspace-installer
WORKDIR /app
# The dspace-install directory will be written to /install
RUN mkdir /install \
&& chown -Rv dspace: /install \
&& chown -Rv dspace: /app
USER dspace
# Copy the DSpace source code into the workdir (excluding .dockerignore contents)
ADD --chown=dspace . /app/
COPY dspace/src/main/docker/local.cfg /app/local.cfg
# Build DSpace (note: this build doesn't include the optional, deprecated "dspace-rest" webapp)
# Copy the dspace-install directory to /install. Clean up the build to keep the docker image small
RUN mvn package && \
mv /app/dspace/target/${TARGET_DIR}/* /install && \
mvn clean
# Step 2 - Run Ant Deploy
FROM tomcat:8-jdk11 as ant_build
ARG TARGET_DIR=dspace-installer
COPY --from=build /install /dspace-src
WORKDIR /dspace-src
# Create the initial install deployment using ANT
ENV ANT_VERSION 1.10.7
ENV ANT_HOME /tmp/ant-$ANT_VERSION
ENV PATH $ANT_HOME/bin:$PATH
RUN mkdir $ANT_HOME && \
wget -qO- "https://archive.apache.org/dist/ant/binaries/apache-ant-$ANT_VERSION-bin.tar.gz" | tar -zx --strip-components=1 -C $ANT_HOME
RUN ant init_installation update_configs update_code update_webapps
# Step 3 - Run tomcat
# Create a new tomcat image that does not retain the the build directory contents
FROM tomcat:8-jdk11
ENV DSPACE_INSTALL=/dspace
COPY --from=ant_build /dspace $DSPACE_INSTALL
EXPOSE 8080 8009
ENV JAVA_OPTS=-Xmx2000m
# Run the "server" webapp off the /server path (e.g. http://localhost:8080/server/)
RUN ln -s $DSPACE_INSTALL/webapps/server /usr/local/tomcat/webapps/server
# If you wish to run "server" webapp off the ROOT path, then comment out the above RUN, and uncomment the below RUN.
# You also MUST update the URL in dspace/src/main/docker/local.cfg
# Please note that server webapp should only run on one path at a time.
#RUN mv /usr/local/tomcat/webapps/ROOT /usr/local/tomcat/webapps/ROOT.bk && \
# ln -s $DSPACE_INSTALL/webapps/server /usr/local/tomcat/webapps/ROOT

53
Dockerfile.cli Normal file
View File

@@ -0,0 +1,53 @@
# This image will be published as dspace/dspace-cli
# See https://github.com/DSpace/DSpace/tree/main/dspace/src/main/docker for usage details
#
# This version is JDK11 compatible
# - openjdk:11
# - ANT 1.10.7
# - maven:3-jdk-11 (see dspace-dependencies)
# - note: default tag for branch: dspace/dspace-cli: dspace/dspace-cli:dspace-7_x
# Step 1 - Run Maven Build
FROM dspace/dspace-dependencies:dspace-7_x as build
ARG TARGET_DIR=dspace-installer
WORKDIR /app
# The dspace-install directory will be written to /install
RUN mkdir /install \
&& chown -Rv dspace: /install \
&& chown -Rv dspace: /app
USER dspace
# Copy the DSpace source code into the workdir (excluding .dockerignore contents)
ADD --chown=dspace . /app/
COPY dspace/src/main/docker/local.cfg /app/local.cfg
# Build DSpace. Copy the dspace-install directory to /install. Clean up the build to keep the docker image small
RUN mvn package && \
mv /app/dspace/target/${TARGET_DIR}/* /install && \
mvn clean
# Step 2 - Run Ant Deploy
FROM openjdk:11 as ant_build
ARG TARGET_DIR=dspace-installer
COPY --from=build /install /dspace-src
WORKDIR /dspace-src
# Create the initial install deployment using ANT
ENV ANT_VERSION 1.10.7
ENV ANT_HOME /tmp/ant-$ANT_VERSION
ENV PATH $ANT_HOME/bin:$PATH
RUN mkdir $ANT_HOME && \
wget -qO- "https://archive.apache.org/dist/ant/binaries/apache-ant-$ANT_VERSION-bin.tar.gz" | tar -zx --strip-components=1 -C $ANT_HOME
RUN ant init_installation update_configs update_code
# Step 3 - Run jdk
# Create a new tomcat image that does not retain the the build directory contents
FROM openjdk:11
ENV DSPACE_INSTALL=/dspace
COPY --from=ant_build /dspace $DSPACE_INSTALL
ENV JAVA_OPTS=-Xmx1000m

27
Dockerfile.dependencies Normal file
View File

@@ -0,0 +1,27 @@
# This image will be published as dspace/dspace-dependencies
# The purpose of this image is to make the build for dspace/dspace run faster
#
# This version is JDK11 compatible
# - maven:3-jdk-11
# Step 1 - Run Maven Build
FROM maven:3-jdk-11 as build
ARG TARGET_DIR=dspace-installer
WORKDIR /app
RUN useradd dspace \
&& mkdir /home/dspace \
&& chown -Rv dspace: /home/dspace
USER dspace
# Copy the DSpace source code into the workdir (excluding .dockerignore contents)
ADD --chown=dspace . /app/
COPY dspace/src/main/docker/local.cfg /app/local.cfg
# Trigger the installation of all maven dependencies
RUN mvn package
# Clear the contents of the /app directory (including all maven builds), so no artifacts remain.
# This ensures when dspace:dspace is built, it will just the Maven local cache (.m2) for dependencies
USER root
RUN rm -rf /app/*

72
Dockerfile.test Normal file
View File

@@ -0,0 +1,72 @@
# This image will be published as dspace/dspace
# See https://github.com/DSpace/DSpace/tree/main/dspace/src/main/docker for usage details
#
# This version is JDK11 compatible
# - tomcat:8-jdk11
# - ANT 1.10.7
# - maven:3-jdk-11 (see dspace-dependencies)
# - note: default tag for branch: dspace/dspace: dspace/dspace:dspace-7_x-test
#
# This image is meant for TESTING/DEVELOPMENT ONLY as it deploys the old v6 REST API under HTTP (not HTTPS)
# Step 1 - Run Maven Build
FROM dspace/dspace-dependencies:dspace-7_x as build
ARG TARGET_DIR=dspace-installer
WORKDIR /app
# The dspace-install directory will be written to /install
RUN mkdir /install \
&& chown -Rv dspace: /install \
&& chown -Rv dspace: /app
USER dspace
# Copy the DSpace source code into the workdir (excluding .dockerignore contents)
ADD --chown=dspace . /app/
COPY dspace/src/main/docker/local.cfg /app/local.cfg
# Build DSpace (including the optional, deprecated "dspace-rest" webapp)
# Copy the dspace-install directory to /install. Clean up the build to keep the docker image small
RUN mvn package -Pdspace-rest && \
mv /app/dspace/target/${TARGET_DIR}/* /install && \
mvn clean
# Step 2 - Run Ant Deploy
FROM tomcat:8-jdk11 as ant_build
ARG TARGET_DIR=dspace-installer
COPY --from=build /install /dspace-src
WORKDIR /dspace-src
# Create the initial install deployment using ANT
ENV ANT_VERSION 1.10.7
ENV ANT_HOME /tmp/ant-$ANT_VERSION
ENV PATH $ANT_HOME/bin:$PATH
RUN mkdir $ANT_HOME && \
wget -qO- "https://archive.apache.org/dist/ant/binaries/apache-ant-$ANT_VERSION-bin.tar.gz" | tar -zx --strip-components=1 -C $ANT_HOME
RUN ant init_installation update_configs update_code update_webapps
# Step 3 - Run tomcat
# Create a new tomcat image that does not retain the the build directory contents
FROM tomcat:8-jdk11
ENV DSPACE_INSTALL=/dspace
COPY --from=ant_build /dspace $DSPACE_INSTALL
EXPOSE 8080 8009
ENV JAVA_OPTS=-Xmx2000m
# Run the "server" webapp off the /server path (e.g. http://localhost:8080/server/)
# and the v6.x (deprecated) REST API off the "/rest" path
RUN ln -s $DSPACE_INSTALL/webapps/server /usr/local/tomcat/webapps/server && \
ln -s $DSPACE_INSTALL/webapps/rest /usr/local/tomcat/webapps/rest
# If you wish to run "server" webapp off the ROOT path, then comment out the above RUN, and uncomment the below RUN.
# You also MUST update the URL in dspace/src/main/docker/local.cfg
# Please note that server webapp should only run on one path at a time.
#RUN mv /usr/local/tomcat/webapps/ROOT /usr/local/tomcat/webapps/ROOT.bk && \
# ln -s $DSPACE_INSTALL/webapps/server /usr/local/tomcat/webapps/ROOT && \
# ln -s $DSPACE_INSTALL/webapps/rest /usr/local/tomcat/webapps/rest
# Overwrite the v6.x (deprecated) REST API's web.xml, so that we can run it on HTTP (defaults to requiring HTTPS)
COPY dspace/src/main/docker/test/rest_web.xml $DSPACE_INSTALL/webapps/rest/WEB-INF/web.xml
RUN sed -i -e "s|\${dspace.dir}|$DSPACE_INSTALL|" $DSPACE_INSTALL/webapps/rest/WEB-INF/web.xml

View File

@@ -1,7 +1,6 @@
DSpace source code license: DSpace source code BSD License:
Copyright (c) 2002-2020, LYRASIS. All rights reserved.
Copyright (c) 2002-2016, DuraSpace. All rights reserved.
Redistribution and use in source and binary forms, with or without Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are modification, are permitted provided that the following conditions are

View File

@@ -199,8 +199,6 @@ https://wiki.duraspace.org/display/DSPACE/Code+Contribution+Guidelines
* Woodstox (org.codehaus.woodstox:woodstox-core-asl:4.1.4 - http://woodstox.codehaus.org) * Woodstox (org.codehaus.woodstox:woodstox-core-asl:4.1.4 - http://woodstox.codehaus.org)
* Woodstox (org.codehaus.woodstox:wstx-asl:3.2.0 - http://woodstox.codehaus.org) * Woodstox (org.codehaus.woodstox:wstx-asl:3.2.0 - http://woodstox.codehaus.org)
* Woodstox (org.codehaus.woodstox:wstx-asl:3.2.7 - http://woodstox.codehaus.org) * Woodstox (org.codehaus.woodstox:wstx-asl:3.2.7 - http://woodstox.codehaus.org)
* databene ContiPerf (org.databene:contiperf:2.3.4 - http://databene.org/contiperf)
* elasticsearch (org.elasticsearch:elasticsearch:1.4.0 - http://nexus.sonatype.org/oss-repository-hosting.html/elasticsearch)
* flyway-core (org.flywaydb:flyway-core:4.0.3 - https://flywaydb.org/flyway-core) * flyway-core (org.flywaydb:flyway-core:4.0.3 - https://flywaydb.org/flyway-core)
* Ogg and Vorbis for Java, Core (org.gagravarr:vorbis-java-core:0.1 - https://github.com/Gagravarr/VorbisJava) * Ogg and Vorbis for Java, Core (org.gagravarr:vorbis-java-core:0.1 - https://github.com/Gagravarr/VorbisJava)
* Apache Tika plugin for Ogg, Vorbis and FLAC (org.gagravarr:vorbis-java-tika:0.1 - https://github.com/Gagravarr/VorbisJava) * Apache Tika plugin for Ogg, Vorbis and FLAC (org.gagravarr:vorbis-java-tika:0.1 - https://github.com/Gagravarr/VorbisJava)
@@ -300,7 +298,7 @@ https://wiki.duraspace.org/display/DSPACE/Code+Contribution+Guidelines
* Repackaged Cocoon Servlet Service Implementation (org.dspace.dependencies.cocoon:dspace-cocoon-servlet-service-impl:1.0.3 - http://projects.dspace.org/dspace-pom/dspace-cocoon-servlet-service-impl) * Repackaged Cocoon Servlet Service Implementation (org.dspace.dependencies.cocoon:dspace-cocoon-servlet-service-impl:1.0.3 - http://projects.dspace.org/dspace-pom/dspace-cocoon-servlet-service-impl)
* DSpace Kernel :: Additions and Local Customizations (org.dspace.modules:additions:6.0-rc4-SNAPSHOT - https://github.com/dspace/DSpace/modules/additions) * DSpace Kernel :: Additions and Local Customizations (org.dspace.modules:additions:6.0-rc4-SNAPSHOT - https://github.com/dspace/DSpace/modules/additions)
* Hamcrest All (org.hamcrest:hamcrest-all:1.3 - https://github.com/hamcrest/JavaHamcrest/hamcrest-all) * Hamcrest All (org.hamcrest:hamcrest-all:1.3 - https://github.com/hamcrest/JavaHamcrest/hamcrest-all)
* Hamcrest Core (org.hamcrest:hamcrest-core:1.3 - https://github.com/hamcrest/JavaHamcrest/hamcrest-core) * Hamcrest Core (org.hamcrest:hamcrest-all:1.3 - https://github.com/hamcrest/JavaHamcrest/hamcrest-all)
* JBibTeX (org.jbibtex:jbibtex:1.0.10 - http://www.jbibtex.org) * JBibTeX (org.jbibtex:jbibtex:1.0.10 - http://www.jbibtex.org)
* ASM Core (org.ow2.asm:asm:4.1 - http://asm.objectweb.org/asm/) * ASM Core (org.ow2.asm:asm:4.1 - http://asm.objectweb.org/asm/)
* ASM Analysis (org.ow2.asm:asm-analysis:4.1 - http://asm.objectweb.org/asm-analysis/) * ASM Analysis (org.ow2.asm:asm-analysis:4.1 - http://asm.objectweb.org/asm-analysis/)
@@ -368,7 +366,6 @@ https://wiki.duraspace.org/display/DSPACE/Code+Contribution+Guidelines
* FindBugs-Annotations (com.google.code.findbugs:annotations:3.0.1u2 - http://findbugs.sourceforge.net/) * FindBugs-Annotations (com.google.code.findbugs:annotations:3.0.1u2 - http://findbugs.sourceforge.net/)
* MaxMind GeoIP Legacy API (com.maxmind.geoip:geoip-api:1.3.0 - https://github.com/maxmind/geoip-api-java) * MaxMind GeoIP Legacy API (com.maxmind.geoip:geoip-api:1.3.0 - https://github.com/maxmind/geoip-api-java)
* JHighlight (com.uwyn:jhighlight:1.0 - https://jhighlight.dev.java.net/) * JHighlight (com.uwyn:jhighlight:1.0 - https://jhighlight.dev.java.net/)
* DSpace TM-Extractors Dependency (org.dspace.dependencies:dspace-tm-extractors:1.0.1 - http://projects.dspace.org/dspace-pom/dspace-tm-extractors)
* A Hibernate O/RM Module (org.hibernate:hibernate-core:4.2.21.Final - http://hibernate.org) * A Hibernate O/RM Module (org.hibernate:hibernate-core:4.2.21.Final - http://hibernate.org)
* A Hibernate O/RM Module (org.hibernate:hibernate-ehcache:4.2.21.Final - http://hibernate.org) * A Hibernate O/RM Module (org.hibernate:hibernate-ehcache:4.2.21.Final - http://hibernate.org)
* Hibernate Commons Annotations (org.hibernate.common:hibernate-commons-annotations:4.0.2.Final - http://hibernate.org) * Hibernate Commons Annotations (org.hibernate.common:hibernate-commons-annotations:4.0.2.Final - http://hibernate.org)

9
NOTICE
View File

@@ -1,11 +1,14 @@
Licensing Notices
=================
Licensing Notice [July 2019] DuraSpace joined with LYRASIS (another 501(c)3 organization) in July 2019.
LYRASIS holds the copyrights of DuraSpace.
Fedora Commons joined with the DSpace Foundation and began operating under [July 2009] Fedora Commons joined with the DSpace Foundation and began operating under
the new name DuraSpace in July 2009. DuraSpace holds the copyrights of the new name DuraSpace in July 2009. DuraSpace holds the copyrights of
the DSpace Foundation, Inc. the DSpace Foundation, Inc.
The DSpace Foundation, Inc. is a 501(c)3 corporation established in July 2007 [July 2007] The DSpace Foundation, Inc. is a 501(c)3 corporation established in July 2007
with a mission to promote and advance the dspace platform enabling management, with a mission to promote and advance the dspace platform enabling management,
access and preservation of digital works. The Foundation was able to transfer access and preservation of digital works. The Foundation was able to transfer
the legal copyright from Hewlett-Packard Company (HP) and Massachusetts the legal copyright from Hewlett-Packard Company (HP) and Massachusetts

View File

@@ -1,24 +1,24 @@
# DSpace # DSpace
[![Build Status](https://travis-ci.org/DSpace/DSpace.png?branch=master)](https://travis-ci.org/DSpace/DSpace) [![Build Status](https://travis-ci.com/DSpace/DSpace.png?branch=main)](https://travis-ci.com/DSpace/DSpace)
[DSpace Documentation](https://wiki.duraspace.org/display/DSDOC/) | [DSpace Documentation](https://wiki.lyrasis.org/display/DSDOC/) |
[DSpace Releases](https://github.com/DSpace/DSpace/releases) | [DSpace Releases](https://github.com/DSpace/DSpace/releases) |
[DSpace Wiki](https://wiki.duraspace.org/display/DSPACE/Home) | [DSpace Wiki](https://wiki.lyrasis.org/display/DSPACE/Home) |
[Support](https://wiki.duraspace.org/display/DSPACE/Support) [Support](https://wiki.lyrasis.org/display/DSPACE/Support)
DSpace open source software is a turnkey repository application used by more than DSpace open source software is a turnkey repository application used by more than
2,000 organizations and institutions worldwide to provide durable access to digital resources. 2,000 organizations and institutions worldwide to provide durable access to digital resources.
For more information, visit http://www.dspace.org/ For more information, visit http://www.dspace.org/
*** ***
:warning: **Work on DSpace 7 has begun on our `master` branch.** This means that there is temporarily NO user interface on this `master` branch. DSpace 7 will feature a new, unified [Angular](https://angular.io/) user interface, along with an enhanced, rebuilt REST API. The latest status of this work can be found on the [DSpace 7 UI Working Group](https://wiki.duraspace.org/display/DSPACE/DSpace+7+UI+Working+Group) page. Additionally, the codebases can be found in the following places: :warning: **Work on DSpace 7 has begun on our `main` branch.** This means that there is NO user interface on this `main` branch. DSpace 7 will feature a new, unified [Angular](https://angular.io/) user interface, along with an enhanced, rebuilt REST API. The latest status of this work can be found on the [DSpace 7 Working Group](https://wiki.lyrasis.org/display/DSPACE/DSpace+7+Working+Group) page. Additionally, the codebases can be found in the following places:
* DSpace 7 REST API work is occurring on the [`master` branch](https://github.com/DSpace/DSpace/tree/master/dspace-spring-rest) of this repository. * DSpace 7 REST API work is occurring on the [`main` branch](https://github.com/DSpace/DSpace/tree/main/dspace-server-webapp) of this repository.
* The REST Contract is being documented at https://github.com/DSpace/Rest7Contract * The REST Contract is at https://github.com/DSpace/Rest7Contract
* DSpace 7 Angular UI work is occurring at https://github.com/DSpace/dspace-angular * DSpace 7 Angular UI work is occurring at https://github.com/DSpace/dspace-angular
**If you would like to get involved in our DSpace 7 development effort, we welcome new contributors.** Just join one of our meetings or get in touch via Slack. See the [DSpace 7 UI Working Group](https://wiki.duraspace.org/display/DSPACE/DSpace+7+UI+Working+Group) wiki page for more info. **If you would like to get involved in our DSpace 7 development effort, we welcome new contributors.** Just join one of our meetings or get in touch via Slack. See the [DSpace 7 Working Group](https://wiki.lyrasis.org/display/DSPACE/DSpace+7+Working+Group) wiki page for more info.
**If you are looking for the ongoing maintenance work for DSpace 6 (or prior releases)**, you can find that work on the corresponding maintenance branch (e.g. [`dspace-6_x`](https://github.com/DSpace/DSpace/tree/dspace-6_x)) in this repository. **If you are looking for the ongoing maintenance work for DSpace 6 (or prior releases)**, you can find that work on the corresponding maintenance branch (e.g. [`dspace-6_x`](https://github.com/DSpace/DSpace/tree/dspace-6_x)) in this repository.
*** ***
@@ -31,29 +31,32 @@ Past releases are all available via GitHub at https://github.com/DSpace/DSpace/r
## Documentation / Installation ## Documentation / Installation
Documentation for each release may be viewed online or downloaded via our [Documentation Wiki](https://wiki.duraspace.org/display/DSDOC/). Documentation for each release may be viewed online or downloaded via our [Documentation Wiki](https://wiki.lyrasis.org/display/DSDOC/).
The latest DSpace Installation instructions are available at: The latest DSpace Installation instructions are available at:
https://wiki.duraspace.org/display/DSDOC6x/Installing+DSpace https://wiki.lyrasis.org/display/DSDOC6x/Installing+DSpace
Please be aware that, as a Java web application, DSpace requires a database (PostgreSQL or Oracle) Please be aware that, as a Java web application, DSpace requires a database (PostgreSQL or Oracle)
and a servlet container (usually Tomcat) in order to function. and a servlet container (usually Tomcat) in order to function.
More information about these and all other prerequisites can be found in the Installation instructions above. More information about these and all other prerequisites can be found in the Installation instructions above.
## Running DSpace 7 in Docker
See [Running DSpace 7 with Docker Compose](dspace/src/main/docker-compose/README.md)
## Contributing ## Contributing
DSpace is a community built and supported project. We do not have a centralized development or support team, DSpace is a community built and supported project. We do not have a centralized development or support team,
but have a dedicated group of volunteers who help us improve the software, documentation, resources, etc. but have a dedicated group of volunteers who help us improve the software, documentation, resources, etc.
We welcome contributions of any type. Here's a few basic guides that provide suggestions for contributing to DSpace: We welcome contributions of any type. Here's a few basic guides that provide suggestions for contributing to DSpace:
* [How to Contribute to DSpace](https://wiki.duraspace.org/display/DSPACE/How+to+Contribute+to+DSpace): How to contribute in general (via code, documentation, bug reports, expertise, etc) * [How to Contribute to DSpace](https://wiki.lyrasis.org/display/DSPACE/How+to+Contribute+to+DSpace): How to contribute in general (via code, documentation, bug reports, expertise, etc)
* [Code Contribution Guidelines](https://wiki.duraspace.org/display/DSPACE/Code+Contribution+Guidelines): How to give back code or contribute features, bug fixes, etc. * [Code Contribution Guidelines](https://wiki.lyrasis.org/display/DSPACE/Code+Contribution+Guidelines): How to give back code or contribute features, bug fixes, etc.
* [DSpace Community Advisory Team (DCAT)](https://wiki.duraspace.org/display/cmtygp/DSpace+Community+Advisory+Team): If you are not a developer, we also have an interest group specifically for repository managers. The DCAT group meets virtually, once a month, and sends open invitations to join their meetings via the [DCAT mailing list](https://groups.google.com/d/forum/DSpaceCommunityAdvisoryTeam). * [DSpace Community Advisory Team (DCAT)](https://wiki.lyrasis.org/display/cmtygp/DSpace+Community+Advisory+Team): If you are not a developer, we also have an interest group specifically for repository managers. The DCAT group meets virtually, once a month, and sends open invitations to join their meetings via the [DCAT mailing list](https://groups.google.com/d/forum/DSpaceCommunityAdvisoryTeam).
We also encourage GitHub Pull Requests (PRs) at any time. Please see our [Development with Git](https://wiki.duraspace.org/display/DSPACE/Development+with+Git) guide for more info. We also encourage GitHub Pull Requests (PRs) at any time. Please see our [Development with Git](https://wiki.lyrasis.org/display/DSPACE/Development+with+Git) guide for more info.
In addition, a listing of all known contributors to DSpace software can be In addition, a listing of all known contributors to DSpace software can be
found online at: https://wiki.duraspace.org/display/DSPACE/DSpaceContributors found online at: https://wiki.lyrasis.org/display/DSPACE/DSpaceContributors
## Getting Help ## Getting Help
@@ -61,12 +64,12 @@ DSpace provides public mailing lists where you can post questions or raise topic
We welcome everyone to participate in these lists: We welcome everyone to participate in these lists:
* [dspace-community@googlegroups.com](https://groups.google.com/d/forum/dspace-community) : General discussion about DSpace platform, announcements, sharing of best practices * [dspace-community@googlegroups.com](https://groups.google.com/d/forum/dspace-community) : General discussion about DSpace platform, announcements, sharing of best practices
* [dspace-tech@googlegroups.com](https://groups.google.com/d/forum/dspace-tech) : Technical support mailing list. See also our guide for [How to troubleshoot an error](https://wiki.duraspace.org/display/DSPACE/Troubleshoot+an+error). * [dspace-tech@googlegroups.com](https://groups.google.com/d/forum/dspace-tech) : Technical support mailing list. See also our guide for [How to troubleshoot an error](https://wiki.lyrasis.org/display/DSPACE/Troubleshoot+an+error).
* [dspace-devel@googlegroups.com](https://groups.google.com/d/forum/dspace-devel) : Developers / Development mailing list * [dspace-devel@googlegroups.com](https://groups.google.com/d/forum/dspace-devel) : Developers / Development mailing list
Great Q&A is also available under the [DSpace tag on Stackoverflow](http://stackoverflow.com/questions/tagged/dspace) Great Q&A is also available under the [DSpace tag on Stackoverflow](http://stackoverflow.com/questions/tagged/dspace)
Additional support options are listed at https://wiki.duraspace.org/display/DSPACE/Support Additional support options are at https://wiki.lyrasis.org/display/DSPACE/Support
DSpace also has an active service provider network. If you'd rather hire a service provider to DSpace also has an active service provider network. If you'd rather hire a service provider to
install, upgrade, customize or host DSpace, then we recommend getting in touch with one of our install, upgrade, customize or host DSpace, then we recommend getting in touch with one of our
@@ -74,9 +77,59 @@ install, upgrade, customize or host DSpace, then we recommend getting in touch w
## Issue Tracker ## Issue Tracker
The DSpace Issue Tracker can be found at: https://jira.duraspace.org/projects/DS/summary DSpace uses GitHub to track issues:
* Backend (REST API) issues: https://github.com/DSpace/DSpace/issues
* Frontend (User Interface) issues: https://github.com/DSpace/dspace-angular/issues
## Testing
### Running Tests
By default, in DSpace, Unit Tests and Integration Tests are disabled. However, they are
run automatically by [Travis CI](https://travis-ci.com/DSpace/DSpace/) for all Pull Requests and code commits.
* How to run both Unit Tests (via `maven-surefire-plugin`) and Integration Tests (via `maven-failsafe-plugin`):
```
mvn clean test -DskipTests=false -DskipITs=false
```
* How to run just Unit Tests:
```
mvn test -DskipTests=false
```
* How to run a *single* Unit Test
```
# Run all tests in a specific test class
# NOTE: failIfNoTests=false is required to skip tests in other modules
mvn test -DskipTests=false -Dtest=[full.package.testClassName] -DfailIfNoTests=false
# Run one test method in a specific test class
mvn test -DskipTests=false -Dtest=[full.package.testClassName]#[testMethodName] -DfailIfNoTests=false
```
* How to run Integration Tests (requires enabling Unit tests too)
```
mvn verify -DskipTests=false -DskipITs=false
```
* How to run a *single* Integration Test (requires enabling Unit tests too)
```
# Run all integration tests in a specific test class
# NOTE: failIfNoTests=false is required to skip tests in other modules
mvn test -DskipTests=false -DskipITs=false -Dtest=[full.package.testClassName] -DfailIfNoTests=false
# Run one test method in a specific test class
mvn test -DskipTests=false -DskipITs=false -Dtest=[full.package.testClassName]#[testMethodName] -DfailIfNoTests=false
```
* How to run only tests of a specific DSpace module
```
# Before you can run only one module's tests, other modules may need installing into your ~/.m2
cd [dspace-src]
mvn clean install
# Then, move into a module subdirectory, and run the test command
cd [dspace-src]/dspace-server-webapp
# Choose your test command from the lists above
```
## License ## License
DSpace source code is freely available under a standard [BSD 3-Clause license](https://opensource.org/licenses/BSD-3-Clause). DSpace source code is freely available under a standard [BSD 3-Clause license](https://opensource.org/licenses/BSD-3-Clause).
The full license is available at http://www.dspace.org/license/ The full license is available in the [LICENSE](LICENSE) file or online at http://www.dspace.org/license/

View File

@@ -0,0 +1,10 @@
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE suppressions PUBLIC
"-//Puppy Crawl//DTD Suppressions 1.2//EN"
"http://checkstyle.sourceforge.net/dtds/suppressions_1_2.dtd">
<suppressions>
<!-- Temporarily suppress indentation checks for all Tests -->
<!-- TODO: We should have these turned on. But, currently there's a known bug with indentation checks
on JMockIt Expectations blocks and similar. See https://github.com/checkstyle/checkstyle/issues/3739 -->
<suppress checks="Indentation" files="src[/\\]test[/\\]java"/>
</suppressions>

142
checkstyle.xml Normal file
View File

@@ -0,0 +1,142 @@
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE module PUBLIC
"-//Puppy Crawl//DTD Check Configuration 1.3//EN"
"http://checkstyle.sourceforge.net/dtds/configuration_1_3.dtd">
<!--
DSpace CodeStyle Requirements
1. 4-space indents for Java, and 2-space indents for XML. NO TABS ALLOWED.
2. K&R style braces required. Braces required on all blocks.
3. Do not use wildcard imports (e.g. import java.util.*). Duplicated or unused imports also not allowed.
4. Javadocs should exist for all public classes and methods. (Methods rule is unenforced at this time.) Keep it short and to the point
5. Maximum line length is 120 characters (except for long URLs, packages or imports)
6. No trailing spaces allowed (except in comments)
7. Tokens should be surrounded by whitespace (see http://checkstyle.sourceforge.net/config_whitespace.html#WhitespaceAround)
8. Each source file must include our license header (validated separately by license-maven-plugin, see pom.xml)
For more information on CheckStyle configurations below, see: http://checkstyle.sourceforge.net/checks.html
-->
<module name="Checker">
<!-- Configure checker to use UTF-8 encoding -->
<property name="charset" value="UTF-8"/>
<!-- Configure checker to run on files with these extensions -->
<property name="fileExtensions" value="java, properties, cfg, xml"/>
<!-- Suppression configurations in checkstyle-suppressions.xml in same directory -->
<module name="SuppressionFilter">
<property name="file" value="${checkstyle.suppressions.file}" default="checkstyle-suppressions.xml"/>
</module>
<!-- No tab characters ('\t') allowed in the source code -->
<module name="FileTabCharacter">
<property name="eachLine" value="true"/>
<property name="fileExtensions" value="java, properties, cfg, css, js, xml"/>
</module>
<!-- No Trailing Whitespace, except on lines that only have an asterisk (e.g. Javadoc comments) -->
<module name="RegexpSingleline">
<property name="format" value="(?&lt;!\*)\s+$|\*\s\s+$"/>
<property name="message" value="Line has trailing whitespace"/>
<property name="fileExtensions" value="java, properties, cfg, css, js, xml"/>
</module>
<!-- Allow individual lines of code to be excluded from these rules, if they are annotated
with @SuppressWarnings. See also SuppressWarningsHolder below -->
<module name="SuppressWarningsFilter" />
<!-- Maximum line length is 120 characters -->
<module name="LineLength">
<property name="fileExtensions" value="java"/>
<property name="max" value="120"/>
<!-- Only exceptions for packages, imports, URLs, and JavaDoc {@link} tags -->
<property name="ignorePattern" value="^package.*|^import.*|http://|https://|@link"/>
</module>
<!-- Check individual Java source files for specific rules -->
<module name="TreeWalker">
<!-- Highlight any TODO or FIXME comments in info messages -->
<module name="TodoComment">
<property name="severity" value="info"/>
<property name="format" value="(TODO)|(FIXME)"/>
</module>
<!-- Do not report errors on any lines annotated with @SuppressWarnings -->
<module name="SuppressWarningsHolder"/>
<!-- ##### Import statement requirements ##### -->
<!-- Star imports (e.g. import java.util.*) are NOT ALLOWED -->
<module name="AvoidStarImport"/>
<!-- Redundant import statements are NOT ALLOWED -->
<module name="RedundantImport"/>
<!-- Unused import statements are NOT ALLOWED -->
<module name="UnusedImports"/>
<!-- Ensure imports appear alphabetically and grouped -->
<module name="CustomImportOrder">
<property name="sortImportsInGroupAlphabetically" value="true"/>
<property name="separateLineBetweenGroups" value="true"/>
<property name="customImportOrderRules" value="STATIC###STANDARD_JAVA_PACKAGE###THIRD_PARTY_PACKAGE"/>
</module>
<!-- ##### Javadocs requirements ##### -->
<!-- Requirements for Javadocs for classes/interfaces -->
<module name="JavadocType">
<!-- All public classes/interfaces MUST HAVE Javadocs -->
<property name="scope" value="public"/>
<!-- Add an exception for anonymous inner classes -->
<property name="excludeScope" value="anoninner"/>
<!-- Ignore errors related to unknown tags -->
<property name="allowUnknownTags" value="true"/>
<!-- Allow params tags to be optional -->
<property name="allowMissingParamTags" value="false"/>
</module>
<!-- Requirements for Javadocs for methods -->
<module name="JavadocMethod">
<!-- All public methods MUST HAVE Javadocs -->
<!-- <property name="scope" value="public"/> -->
<!-- TODO: Above rule has been disabled because of large amount of missing public method Javadocs -->
<property name="scope" value="nothing"/>
<!-- Allow params, throws and return tags to be optional -->
<property name="allowMissingParamTags" value="true"/>
<property name="allowMissingReturnTag" value="true"/>
</module>
<!-- ##### Requirements for K&R Style braces ##### -->
<!-- Code blocks MUST HAVE braces, even single line statements (if, while, etc) -->
<module name="NeedBraces"/>
<!-- Left braces should be at the end of current line (default value)-->
<module name="LeftCurly"/>
<!-- Right braces should be on start of a new line (default value) -->
<module name="RightCurly"/>
<!-- ##### Indentation / Whitespace requirements ##### -->
<!-- Require 4-space indentation (default value) -->
<module name="Indentation"/>
<!-- Whitespace should exist around all major tokens -->
<module name="WhitespaceAround">
<!-- However, make an exception for empty constructors, methods, types, etc. -->
<property name="allowEmptyConstructors" value="true"/>
<property name="allowEmptyMethods" value="true"/>
<property name="allowEmptyTypes" value="true"/>
<property name="allowEmptyLoops" value="true"/>
</module>
<!-- Validate whitespace around Generics (angle brackets) per typical conventions
http://checkstyle.sourceforge.net/config_whitespace.html#GenericWhitespace -->
<module name="GenericWhitespace"/>
<!-- ##### Requirements for "switch" statements ##### -->
<!-- "switch" statements MUST have a "default" clause -->
<module name="MissingSwitchDefault"/>
<!-- "case" clauses in switch statements MUST include break, return, throw or continue -->
<module name="FallThrough"/>
<!-- ##### Other / Miscellaneous requirements ##### -->
<!-- Require utility classes do not have a public constructor -->
<module name="HideUtilityClassConstructor"/>
<!-- Require each variable declaration is its own statement on its own line -->
<module name="MultipleVariableDeclarations"/>
<!-- Each line of code can only include one statement -->
<module name="OneStatementPerLine"/>
<!-- Require that "catch" statements are not empty (must at least contain a comment) -->
<module name="EmptyCatchBlock"/>
</module>
</module>

25
docker-compose-cli.yml Normal file
View File

@@ -0,0 +1,25 @@
version: "3.7"
services:
dspace-cli:
image: "${DOCKER_OWNER:-dspace}/dspace-cli:${DSPACE_VER:-dspace-7_x}"
container_name: dspace-cli
build:
context: .
dockerfile: Dockerfile.cli
#environment:
volumes:
- ./dspace/src/main/docker-compose/local.cfg:/dspace/config/local.cfg
- assetstore:/dspace/assetstore
entrypoint: /dspace/bin/dspace
command: help
networks:
- dspacenet
tty: true
stdin_open: true
volumes:
assetstore:
networks:
dspacenet:

69
docker-compose.yml Normal file
View File

@@ -0,0 +1,69 @@
version: '3.7'
networks:
dspacenet:
services:
dspace:
container_name: dspace
image: "${DOCKER_OWNER:-dspace}/dspace:${DSPACE_VER:-dspace-7_x-test}"
build:
context: .
dockerfile: Dockerfile.test
depends_on:
- dspacedb
networks:
dspacenet:
ports:
- published: 8080
target: 8080
stdin_open: true
tty: true
volumes:
- assetstore:/dspace/assetstore
- ./dspace/src/main/docker-compose/local.cfg:/dspace/config/local.cfg
# Ensure that the database is ready BEFORE starting tomcat
# 1. While a TCP connection to dspacedb port 5432 is not available, continue to sleep
# 2. Then, run database migration to init database tables
# 3. Finally, start Tomcat
entrypoint:
- /bin/bash
- '-c'
- |
while (!</dev/tcp/dspacedb/5432) > /dev/null 2>&1; do sleep 1; done;
/dspace/bin/dspace database migrate
catalina.sh run
dspacedb:
container_name: dspacedb
environment:
PGDATA: /pgdata
image: dspace/dspace-postgres-pgcrypto
networks:
dspacenet:
ports:
- published: 5432
target: 5432
stdin_open: true
tty: true
volumes:
- pgdata:/pgdata
dspacesolr:
container_name: dspacesolr
image: dspace/dspace-solr
networks:
dspacenet:
ports:
- published: 8983
target: 8983
stdin_open: true
tty: true
volumes:
- solr_authority:/opt/solr/server/solr/authority/data
- solr_oai:/opt/solr/server/solr/oai/data
- solr_search:/opt/solr/server/solr/search/data
- solr_statistics:/opt/solr/server/solr/statistics/data
volumes:
assetstore:
pgdata:
solr_authority:
solr_oai:
solr_search:
solr_statistics:

View File

@@ -12,7 +12,7 @@
<parent> <parent>
<groupId>org.dspace</groupId> <groupId>org.dspace</groupId>
<artifactId>dspace-parent</artifactId> <artifactId>dspace-parent</artifactId>
<version>7.0-SNAPSHOT</version> <version>7.0-beta4-SNAPSHOT</version>
<relativePath>..</relativePath> <relativePath>..</relativePath>
</parent> </parent>
@@ -50,6 +50,33 @@
<configuration> <configuration>
<debug>true</debug> <debug>true</debug>
<showDeprecation>true</showDeprecation> <showDeprecation>true</showDeprecation>
<annotationProcessorPaths>
<!-- Enable Hibernate's Metamodel Generator to generate metadata model classes
(ending in _ suffix) for more type-safe Criteria queries -->
<path>
<groupId>org.hibernate</groupId>
<artifactId>hibernate-jpamodelgen</artifactId>
<version>${hibernate.version}</version>
</path>
<!-- Enable JAXB -->
<path>
<groupId>javax.xml.bind</groupId>
<artifactId>jaxb-api</artifactId>
<version>${jaxb-api.version}</version>
</path>
<!-- Enable Commons Annotations -->
<path>
<groupId>javax.annotation</groupId>
<artifactId>javax.annotation-api</artifactId>
<version>${javax-annotation.version}</version>
</path>
<!-- Enable http://errorprone.info -->
<path>
<groupId>com.google.errorprone</groupId>
<artifactId>error_prone_core</artifactId>
<version>${errorprone.version}</version>
</path>
</annotationProcessorPaths>
</configuration> </configuration>
</plugin> </plugin>
<plugin> <plugin>
@@ -71,25 +98,11 @@
</execution> </execution>
</executions> </executions>
</plugin> </plugin>
<!-- Verify OS license headers for all source code files -->
<plugin>
<groupId>com.mycila</groupId>
<artifactId>license-maven-plugin</artifactId>
<configuration>
<excludes>
<exclude>**/src/test/resources/**</exclude>
<exclude>**/src/test/data/**</exclude>
<exclude>**/.gitignore</exclude>
<exclude>src/test/data/dspaceFolder/config/spiders/**</exclude>
<exclude>src/main/java/org/apache/solr/handler/extraction/ExtractingParams.java</exclude>
</excludes>
</configuration>
</plugin>
<plugin> <plugin>
<groupId>org.codehaus.mojo</groupId> <groupId>org.codehaus.mojo</groupId>
<artifactId>build-helper-maven-plugin</artifactId> <artifactId>build-helper-maven-plugin</artifactId>
<version>1.9.1</version> <version>3.0.0</version>
<executions> <executions>
<execution> <execution>
<phase>validate</phase> <phase>validate</phase>
@@ -124,7 +137,7 @@
<activation> <activation>
<activeByDefault>false</activeByDefault> <activeByDefault>false</activeByDefault>
<!-- property> <!-- property>
<name>maven.test.skip</name> <name>skipTests</name>
<value>false</value> <value>false</value>
</property --> </property -->
</activation> </activation>
@@ -145,7 +158,7 @@
<activation> <activation>
<activeByDefault>false</activeByDefault> <activeByDefault>false</activeByDefault>
<property> <property>
<name>maven.test.skip</name> <name>skipTests</name>
<value>false</value> <value>false</value>
</property> </property>
</activation> </activation>
@@ -157,7 +170,6 @@
install of DSpace, against which Tests can be run. --> install of DSpace, against which Tests can be run. -->
<plugin> <plugin>
<artifactId>maven-dependency-plugin</artifactId> <artifactId>maven-dependency-plugin</artifactId>
<version>2.8</version>
<configuration> <configuration>
<outputDirectory>${project.build.directory}/testing</outputDirectory> <outputDirectory>${project.build.directory}/testing</outputDirectory>
<artifactItems> <artifactItems>
@@ -192,7 +204,7 @@
(see: http://gmaven.codehaus.org/Executing+Groovy+Code ) (see: http://gmaven.codehaus.org/Executing+Groovy+Code )
We are generating a OS-agnostic version (agnostic.build.dir) of We are generating a OS-agnostic version (agnostic.build.dir) of
the ${project.build.directory} property (full path of target dir). the ${project.build.directory} property (full path of target dir).
This is needed by the FileWeaver & Surefire plugins (see below) This is needed by the Surefire & Failsafe plugins (see below)
to initialize the Unit Test environment's dspace.cfg file. to initialize the Unit Test environment's dspace.cfg file.
Otherwise, the Unit Test Framework will not work on Windows OS. Otherwise, the Unit Test Framework will not work on Windows OS.
This Groovy code was mostly borrowed from: This Groovy code was mostly borrowed from:
@@ -201,18 +213,17 @@
<plugin> <plugin>
<groupId>org.codehaus.gmaven</groupId> <groupId>org.codehaus.gmaven</groupId>
<artifactId>groovy-maven-plugin</artifactId> <artifactId>groovy-maven-plugin</artifactId>
<version>2.0</version>
<executions> <executions>
<execution> <execution>
<id>setproperty</id> <id>setproperty</id>
<phase>generate-test-resources</phase> <!-- XXX I think this should be 'initialize' - MHW --> <phase>initialize</phase>
<goals> <goals>
<goal>execute</goal> <goal>execute</goal>
</goals> </goals>
<configuration> <configuration>
<source> <source>
project.properties['agnostic.build.dir']=project.build.directory.replace(File.separator,'/'); project.properties['agnostic.build.dir'] = project.build.directory.replace(File.separator, '/');
println("Initializing Maven property 'agnostic.build.dir' to: " + project.properties['agnostic.build.dir']); log.info("Initializing Maven property 'agnostic.build.dir' to: {}", project.properties['agnostic.build.dir']);
</source> </source>
</configuration> </configuration>
</execution> </execution>
@@ -225,63 +236,27 @@
<configuration> <configuration>
<systemPropertyVariables> <systemPropertyVariables>
<!-- Specify the dspace.dir to use for test environment --> <!-- Specify the dspace.dir to use for test environment -->
<!-- ${agnostic.build.dir} is set dynamically by groovy-maven-plugin above -->
<!-- This system property is loaded by AbstractDSpaceTest to initialize the test environment --> <!-- This system property is loaded by AbstractDSpaceTest to initialize the test environment -->
<dspace.dir>${agnostic.build.dir}/testing/dspace/</dspace.dir> <dspace.dir>${agnostic.build.dir}/testing/dspace/</dspace.dir>
<!-- Turn off any DSpace logging --> <!-- Turn off any DSpace logging -->
<dspace.log.init.disable>true</dspace.log.init.disable> <dspace.log.init.disable>true</dspace.log.init.disable>
<solr.install.dir>${agnostic.build.dir}/testing/dspace/solr/</solr.install.dir>
</systemPropertyVariables> </systemPropertyVariables>
</configuration> </configuration>
</plugin> </plugin>
<plugin>
<groupId>org.codehaus.mojo</groupId>
<artifactId>xml-maven-plugin</artifactId>
<version>1.0.1</version>
<executions>
<execution>
<id>validate-ALL-xml-and-xsl</id>
<phase>process-test-resources</phase>
<goals>
<goal>validate</goal>
</goals>
</execution>
</executions>
<configuration>
<validationSets>
<!-- validate ALL XML and XSL config files in the testing folder -->
<validationSet>
<dir>${agnostic.build.dir}/testing</dir>
<includes>
<include>**/*.xml</include>
<include>**/*.xsl</include>
<include>**/*.xconf</include>
</includes>
</validationSet>
<!-- validate ALL XML and XSL files throughout the project -->
<validationSet>
<dir>${root.basedir}</dir>
<includes>
<include>**/*.xml</include>
<include>**/*.xsl</include>
<include>**/*.xmap</include>
</includes>
</validationSet>
</validationSets>
</configuration>
</plugin>
<!-- Run Integration Testing! This plugin just kicks off the tests (when enabled). --> <!-- Run Integration Testing! This plugin just kicks off the tests (when enabled). -->
<plugin> <plugin>
<artifactId>maven-failsafe-plugin</artifactId> <artifactId>maven-failsafe-plugin</artifactId>
<configuration> <configuration>
<systemPropertyVariables> <systemPropertyVariables>
<!-- Specify the dspace.dir to use for test environment --> <!-- Specify the dspace.dir to use for test environment -->
<!-- ${agnostic.build.dir} is set dynamically by groovy-maven-plugin above -->
<dspace.dir>${agnostic.build.dir}/testing/dspace/</dspace.dir> <dspace.dir>${agnostic.build.dir}/testing/dspace/</dspace.dir>
<!-- Turn off any DSpace logging --> <!-- Turn off any DSpace logging -->
<dspace.log.init.disable>true</dspace.log.init.disable> <dspace.log.init.disable>true</dspace.log.init.disable>
<solr.install.dir>${agnostic.build.dir}/testing/dspace/solr/</solr.install.dir>
</systemPropertyVariables> </systemPropertyVariables>
</configuration> </configuration>
</plugin> </plugin>
@@ -291,30 +266,52 @@
</profile> </profile>
</profiles> </profiles>
<dependencies> <dependencies>
<dependency> <dependency>
<groupId>org.hibernate</groupId> <groupId>org.hibernate</groupId>
<artifactId>hibernate-core</artifactId> <artifactId>hibernate-ehcache</artifactId>
<exclusions>
<exclusion>
<groupId>org.jboss.logging</groupId>
<artifactId>jboss-logging</artifactId>
</exclusion>
</exclusions>
</dependency> </dependency>
<dependency> <dependency>
<groupId>org.hibernate</groupId> <groupId>org.hibernate</groupId>
<artifactId>hibernate-ehcache</artifactId> <artifactId>hibernate-jpamodelgen</artifactId>
</dependency> </dependency>
<dependency>
<groupId>org.hibernate.validator</groupId>
<artifactId>hibernate-validator-cdi</artifactId>
<version>${hibernate-validator.version}</version>
</dependency>
<dependency> <dependency>
<groupId>org.springframework</groupId> <groupId>org.springframework</groupId>
<artifactId>spring-orm</artifactId> <artifactId>spring-orm</artifactId>
</dependency> </dependency>
<dependency> <dependency>
<groupId>org.dspace</groupId> <groupId>org.glassfish</groupId>
<artifactId>javax.el</artifactId>
<version>3.0.1-b10</version>
</dependency>
<dependency>
<groupId>net.handle</groupId>
<artifactId>handle</artifactId> <artifactId>handle</artifactId>
</dependency> </dependency>
<dependency>
<groupId>net.cnri</groupId>
<artifactId>cnri-servlet-container</artifactId>
<exclusions>
<!-- Newer versions provided in our parent POM -->
<exclusion>
<groupId>org.ow2.asm</groupId>
<artifactId>asm-commons</artifactId>
</exclusion>
</exclusions>
</dependency>
<!-- Jetty is needed to run Handle Server -->
<dependency>
<groupId>org.eclipse.jetty</groupId>
<artifactId>jetty-server</artifactId>
</dependency>
<dependency> <dependency>
<groupId>org.dspace</groupId> <groupId>org.dspace</groupId>
<artifactId>jargon</artifactId> <artifactId>jargon</artifactId>
@@ -323,24 +320,10 @@
<groupId>org.dspace</groupId> <groupId>org.dspace</groupId>
<artifactId>mets</artifactId> <artifactId>mets</artifactId>
</dependency> </dependency>
<dependency>
<groupId>org.dspace.dependencies</groupId>
<artifactId>dspace-tm-extractors</artifactId>
</dependency>
<dependency> <dependency>
<groupId>org.apache.jena</groupId> <groupId>org.apache.jena</groupId>
<artifactId>apache-jena-libs</artifactId> <artifactId>apache-jena-libs</artifactId>
<type>pom</type> <type>pom</type>
<exclusions>
<exclusion>
<groupId>com.fasterxml.jackson.core</groupId>
<artifactId>jackson-core</artifactId>
</exclusion>
<exclusion>
<groupId>com.fasterxml.jackson.core</groupId>
<artifactId>jackson-databind</artifactId>
</exclusion>
</exclusions>
</dependency> </dependency>
<dependency> <dependency>
<groupId>commons-cli</groupId> <groupId>commons-cli</groupId>
@@ -351,8 +334,8 @@
<artifactId>commons-codec</artifactId> <artifactId>commons-codec</artifactId>
</dependency> </dependency>
<dependency> <dependency>
<groupId>commons-collections</groupId> <groupId>org.apache.commons</groupId>
<artifactId>commons-collections</artifactId> <artifactId>commons-collections4</artifactId>
</dependency> </dependency>
<dependency> <dependency>
<groupId>org.apache.commons</groupId> <groupId>org.apache.commons</groupId>
@@ -368,8 +351,8 @@
<artifactId>commons-io</artifactId> <artifactId>commons-io</artifactId>
</dependency> </dependency>
<dependency> <dependency>
<groupId>commons-lang</groupId> <groupId>org.apache.commons</groupId>
<artifactId>commons-lang</artifactId> <artifactId>commons-lang3</artifactId>
</dependency> </dependency>
<dependency> <dependency>
<groupId>org.apache.commons</groupId> <groupId>org.apache.commons</groupId>
@@ -385,7 +368,7 @@
</dependency> </dependency>
<dependency> <dependency>
<groupId>javax.servlet</groupId> <groupId>javax.servlet</groupId>
<artifactId>servlet-api</artifactId> <artifactId>javax.servlet-api</artifactId>
<scope>provided</scope> <scope>provided</scope>
</dependency> </dependency>
<dependency> <dependency>
@@ -402,10 +385,6 @@
<groupId>org.jdom</groupId> <groupId>org.jdom</groupId>
<artifactId>jdom</artifactId> <artifactId>jdom</artifactId>
</dependency> </dependency>
<dependency>
<groupId>log4j</groupId>
<artifactId>log4j</artifactId>
</dependency>
<dependency> <dependency>
<groupId>oro</groupId> <groupId>oro</groupId>
<artifactId>oro</artifactId> <artifactId>oro</artifactId>
@@ -415,20 +394,8 @@
<artifactId>pdfbox</artifactId> <artifactId>pdfbox</artifactId>
</dependency> </dependency>
<dependency> <dependency>
<groupId>org.apache.pdfbox</groupId> <groupId>org.apache.pdfbox</groupId>
<artifactId>fontbox</artifactId> <artifactId>fontbox</artifactId>
</dependency>
<dependency>
<groupId>org.bouncycastle</groupId>
<artifactId>bcprov-jdk15</artifactId>
</dependency>
<dependency>
<groupId>org.bouncycastle</groupId>
<artifactId>bcmail-jdk15</artifactId>
</dependency>
<dependency>
<groupId>org.apache.poi</groupId>
<artifactId>poi</artifactId>
</dependency> </dependency>
<dependency> <dependency>
<groupId>org.apache.poi</groupId> <groupId>org.apache.poi</groupId>
@@ -449,12 +416,6 @@
<dependency> <dependency>
<groupId>xerces</groupId> <groupId>xerces</groupId>
<artifactId>xercesImpl</artifactId> <artifactId>xercesImpl</artifactId>
<exclusions>
<exclusion>
<groupId>xml-apis</groupId>
<artifactId>xml-apis</artifactId>
</exclusion>
</exclusions>
</dependency> </dependency>
<dependency> <dependency>
<groupId>xml-apis</groupId> <groupId>xml-apis</groupId>
@@ -476,11 +437,6 @@
<groupId>org.dspace</groupId> <groupId>org.dspace</groupId>
<artifactId>dspace-services</artifactId> <artifactId>dspace-services</artifactId>
</dependency> </dependency>
<dependency> <!-- Keep jmockit before junit -->
<groupId>org.jmockit</groupId>
<artifactId>jmockit</artifactId>
<scope>test</scope>
</dependency>
<dependency> <dependency>
<groupId>junit</groupId> <groupId>junit</groupId>
<artifactId>junit</artifactId> <artifactId>junit</artifactId>
@@ -497,13 +453,13 @@
<scope>test</scope> <scope>test</scope>
</dependency> </dependency>
<dependency> <dependency>
<groupId>org.databene</groupId> <groupId>org.mockito</groupId>
<artifactId>contiperf</artifactId> <artifactId>mockito-core</artifactId>
<scope>test</scope> <scope>test</scope>
</dependency> </dependency>
<dependency> <dependency>
<groupId>org.mockito</groupId> <groupId>org.springframework</groupId>
<artifactId>mockito-core</artifactId> <artifactId>spring-test</artifactId>
<scope>test</scope> <scope>test</scope>
</dependency> </dependency>
<dependency> <dependency>
@@ -511,67 +467,187 @@
<artifactId>rome-modules</artifactId> <artifactId>rome-modules</artifactId>
<version>1.0</version> <version>1.0</version>
</dependency> </dependency>
<dependency>
<groupId>gr.ekt.bte</groupId>
<artifactId>bte-core</artifactId>
<version>0.9.3.5</version>
<exclusions>
<!-- A more recent version is retrieved from another dependency -->
<exclusion>
<groupId>org.mockito</groupId>
<artifactId>mockito-core</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency> <dependency>
<groupId>gr.ekt.bte</groupId> <groupId>gr.ekt.bte</groupId>
<artifactId>bte-io</artifactId> <artifactId>bte-io</artifactId>
<version>0.9.3.5</version> <version>0.9.3.5</version>
<exclusions> <exclusions>
<!-- A more recent version is retrieved from another dependency -->
<exclusion> <exclusion>
<groupId>org.apache.commons</groupId> <groupId>net.bytebuddy</groupId>
<artifactId>commons-lang3</artifactId> <artifactId>byte-buddy</artifactId>
</exclusion>
<!-- A more recent version is retrieved from another dependency -->
<exclusion>
<groupId>org.mockito</groupId>
<artifactId>mockito-core</artifactId>
</exclusion> </exclusion>
</exclusions> </exclusions>
</dependency> </dependency>
<dependency>
<groupId>org.apache.httpcomponents</groupId>
<artifactId>httpcore</artifactId>
</dependency>
<dependency> <dependency>
<groupId>org.apache.httpcomponents</groupId> <groupId>org.apache.httpcomponents</groupId>
<artifactId>httpclient</artifactId> <artifactId>httpclient</artifactId>
</dependency> </dependency>
<dependency> <dependency>
<groupId>org.apache.solr</groupId> <groupId>org.apache.solr</groupId>
<artifactId>solr-solrj</artifactId> <artifactId>solr-solrj</artifactId>
<version>${solr.version}</version> <version>${solr.client.version}</version>
</dependency>
<!-- Solr Core is needed for Integration Tests (to run a MockSolrServer) -->
<!-- The following Solr / Lucene dependencies also support integration tests -->
<dependency>
<groupId>org.apache.solr</groupId>
<artifactId>solr-core</artifactId>
<scope>test</scope>
<version>${solr.client.version}</version>
<exclusions> <exclusions>
<exclusion> <exclusion>
<groupId>org.slf4j</groupId> <groupId>commons-cli</groupId>
<artifactId>jcl-over-slf4j</artifactId> <artifactId>commons-cli</artifactId>
</exclusion> </exclusion>
<exclusion> <exclusion>
<groupId>org.slf4j</groupId> <groupId>org.eclipse.jetty</groupId>
<artifactId>slf4j-api</artifactId> <artifactId>jetty-continuation</artifactId>
</exclusion>
<exclusion>
<groupId>org.eclipse.jetty</groupId>
<artifactId>jetty-deploy</artifactId>
</exclusion>
<exclusion>
<groupId>org.eclipse.jetty</groupId>
<artifactId>jetty-http</artifactId>
</exclusion>
<exclusion>
<groupId>org.eclipse.jetty</groupId>
<artifactId>jetty-io</artifactId>
</exclusion>
<exclusion>
<groupId>org.eclipse.jetty</groupId>
<artifactId>jetty-jmx</artifactId>
</exclusion>
<exclusion>
<groupId>org.eclipse.jetty</groupId>
<artifactId>jetty-rewrite</artifactId>
</exclusion>
<exclusion>
<groupId>org.eclipse.jetty</groupId>
<artifactId>jetty-security</artifactId>
</exclusion>
<exclusion>
<groupId>org.eclipse.jetty</groupId>
<artifactId>jetty-server</artifactId>
</exclusion>
<exclusion>
<groupId>org.eclipse.jetty</groupId>
<artifactId>jetty-servlet</artifactId>
</exclusion>
<exclusion>
<groupId>org.eclipse.jetty</groupId>
<artifactId>jetty-servlets</artifactId>
</exclusion>
<exclusion>
<groupId>org.eclipse.jetty</groupId>
<artifactId>jetty-util</artifactId>
</exclusion>
<exclusion>
<groupId>org.eclipse.jetty</groupId>
<artifactId>jetty-webapp</artifactId>
</exclusion>
<exclusion>
<groupId>org.eclipse.jetty</groupId>
<artifactId>jetty-xml</artifactId>
</exclusion> </exclusion>
</exclusions> </exclusions>
</dependency> </dependency>
<dependency> <dependency>
<groupId>commons-configuration</groupId> <groupId>org.apache.solr</groupId>
<artifactId>commons-configuration</artifactId> <artifactId>solr-cell</artifactId>
<exclusions>
<!-- Newer versions provided in our parent POM -->
<exclusion>
<groupId>commons-cli</groupId>
<artifactId>commons-cli</artifactId>
</exclusion>
<exclusion>
<groupId>org.ow2.asm</groupId>
<artifactId>asm-commons</artifactId>
</exclusion>
<exclusion>
<groupId>org.bouncycastle</groupId>
<artifactId>bcpkix-jdk15on</artifactId>
</exclusion>
<exclusion>
<groupId>org.bouncycastle</groupId>
<artifactId>bcprov-jdk15on</artifactId>
</exclusion>
<exclusion>
<groupId>org.eclipse.jetty</groupId>
<artifactId>jetty-xml</artifactId>
</exclusion>
<exclusion>
<groupId>org.eclipse.jetty</groupId>
<artifactId>jetty-http</artifactId>
</exclusion>
<exclusion>
<groupId>org.eclipse.jetty</groupId>
<artifactId>jetty-servlet</artifactId>
</exclusion>
<exclusion>
<groupId>org.eclipse.jetty</groupId>
<artifactId>jetty-webapp</artifactId>
</exclusion>
<exclusion>
<groupId>org.eclipse.jetty</groupId>
<artifactId>jetty-util</artifactId>
</exclusion>
<exclusion>
<groupId>org.eclipse.jetty</groupId>
<artifactId>jetty-deploy</artifactId>
</exclusion>
<exclusion>
<groupId>org.eclipse.jetty</groupId>
<artifactId>jetty-continuation</artifactId>
</exclusion>
<exclusion>
<groupId>org.eclipse.jetty</groupId>
<artifactId>jetty-servlets</artifactId>
</exclusion>
<exclusion>
<groupId>org.eclipse.jetty</groupId>
<artifactId>jetty-io</artifactId>
</exclusion>
<exclusion>
<groupId>org.eclipse.jetty</groupId>
<artifactId>jetty-security</artifactId>
</exclusion>
</exclusions>
</dependency> </dependency>
<dependency> <dependency>
<groupId>com.maxmind.geoip</groupId> <groupId>org.apache.lucene</groupId>
<artifactId>geoip-api</artifactId> <artifactId>lucene-core</artifactId>
<version>1.3.0</version> </dependency>
<!-- Reminder: Keep icu4j (in Parent POM) synced with version used by lucene-analyzers-icu below,
otherwise ICUFoldingFilterFactory may throw errors in tests. -->
<dependency>
<groupId>org.apache.lucene</groupId>
<artifactId>lucene-analyzers-icu</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.lucene</groupId>
<artifactId>lucene-analyzers-smartcn</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.lucene</groupId>
<artifactId>lucene-analyzers-stempel</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.xmlbeans</groupId>
<artifactId>xmlbeans</artifactId>
<version>2.6.0</version>
</dependency>
<dependency>
<groupId>com.maxmind.geoip2</groupId>
<artifactId>geoip2</artifactId>
<version>2.11.0</version>
</dependency> </dependency>
<dependency> <dependency>
<groupId>org.apache.ant</groupId> <groupId>org.apache.ant</groupId>
@@ -583,12 +659,6 @@
<version>2.1.7</version> <version>2.1.7</version>
</dependency> </dependency>
<dependency>
<groupId>org.elasticsearch</groupId>
<artifactId>elasticsearch</artifactId>
<version>1.4.0</version>
</dependency>
<dependency> <dependency>
<groupId>com.coverity.security</groupId> <groupId>com.coverity.security</groupId>
<artifactId>coverity-escapers</artifactId> <artifactId>coverity-escapers</artifactId>
@@ -605,7 +675,6 @@
<dependency> <dependency>
<groupId>com.google.guava</groupId> <groupId>com.google.guava</groupId>
<artifactId>guava</artifactId> <artifactId>guava</artifactId>
<version>19.0</version>
</dependency> </dependency>
@@ -614,11 +683,6 @@
<artifactId>postgresql</artifactId> <artifactId>postgresql</artifactId>
</dependency> </dependency>
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-api</artifactId>
</dependency>
<dependency> <dependency>
<groupId>jdbm</groupId> <groupId>jdbm</groupId>
<artifactId>jdbm</artifactId> <artifactId>jdbm</artifactId>
@@ -672,7 +736,6 @@
<dependency> <dependency>
<groupId>joda-time</groupId> <groupId>joda-time</groupId>
<artifactId>joda-time</artifactId> <artifactId>joda-time</artifactId>
<version>2.9.2</version>
</dependency> </dependency>
<dependency> <dependency>
<groupId>javax.inject</groupId> <groupId>javax.inject</groupId>
@@ -681,56 +744,102 @@
<type>jar</type> <type>jar</type>
</dependency> </dependency>
<!-- JAXB API and implementation (no longer bundled as of Java 11) -->
<dependency>
<groupId>javax.xml.bind</groupId>
<artifactId>jaxb-api</artifactId>
</dependency>
<dependency>
<groupId>org.glassfish.jaxb</groupId>
<artifactId>jaxb-runtime</artifactId>
</dependency>
<!-- Apache Axiom -->
<dependency> <dependency>
<groupId>org.apache.ws.commons.axiom</groupId> <groupId>org.apache.ws.commons.axiom</groupId>
<artifactId>axiom-impl</artifactId> <artifactId>axiom-impl</artifactId>
<!-- NOTE: SWORDv2 needs 1.2.14, required by Abdera: https://abdera.apache.org/ --> <version>${axiom.version}</version>
<version>1.2.14</version> <exclusions>
<!-- Exclude Geronimo as it is NOT necessary when using javax.activation (which we use)
See: https://ws.apache.org/axiom/userguide/ch04.html#d0e732 -->
<exclusion>
<groupId>org.apache.geronimo.specs</groupId>
<artifactId>*</artifactId>
</exclusion>
</exclusions>
</dependency> </dependency>
<dependency> <dependency>
<groupId>org.apache.ws.commons.axiom</groupId> <groupId>org.apache.ws.commons.axiom</groupId>
<artifactId>axiom-api</artifactId> <artifactId>axiom-api</artifactId>
<!-- NOTE: SWORDv2 needs 1.2.14, required by Abdera: https://abdera.apache.org/ --> <version>${axiom.version}</version>
<version>1.2.14</version> <exclusions>
<!-- Exclude Geronimo as it is NOT necessary when using javax.activation (which we use)
See: https://ws.apache.org/axiom/userguide/ch04.html#d0e732 -->
<exclusion>
<groupId>org.apache.geronimo.specs</groupId>
<artifactId>*</artifactId>
</exclusion>
</exclusions>
</dependency> </dependency>
<dependency> <dependency>
<groupId>org.glassfish.jersey.core</groupId> <groupId>org.glassfish.jersey.core</groupId>
<artifactId>jersey-client</artifactId> <artifactId>jersey-client</artifactId>
<version>2.22.1</version> <version>${jersey.version}</version>
</dependency> </dependency>
<!-- S3 --> <!-- S3 -->
<dependency> <dependency>
<groupId>com.amazonaws</groupId> <groupId>com.amazonaws</groupId>
<artifactId>aws-java-sdk-s3</artifactId> <artifactId>aws-java-sdk-s3</artifactId>
<version>1.10.50</version> <version>1.10.50</version>
<exclusions>
<exclusion>
<groupId>joda-time</groupId>
<artifactId>joda-time</artifactId>
</exclusion>
<exclusion>
<groupId>com.fasterxml.jackson.core</groupId>
<artifactId>jackson-databind</artifactId>
</exclusion>
</exclusions>
</dependency> </dependency>
<!-- S3 also wanted jackson... -->
<!-- For ORCID v2 integration -->
<dependency> <dependency>
<groupId>com.fasterxml.jackson.core</groupId> <groupId>org.dspace</groupId>
<artifactId>jackson-core</artifactId> <artifactId>orcid-jaxb-api</artifactId>
<version>2.7.0</version> <version>2.1.0</version>
</dependency> </dependency>
<dependency> <dependency>
<groupId>com.fasterxml.jackson.core</groupId> <groupId>org.json</groupId>
<artifactId>jackson-databind</artifactId> <artifactId>json</artifactId>
<version>2.7.0</version> <version>20180130</version>
</dependency> </dependency>
<!-- Used for Solr core export/import -->
<dependency> <dependency>
<groupId>com.fasterxml.jackson.core</groupId> <groupId>com.opencsv</groupId>
<artifactId>jackson-annotations</artifactId> <artifactId>opencsv</artifactId>
<version>2.7.0</version> <version>4.5</version>
</dependency> </dependency>
<!-- Email templating -->
<dependency>
<groupId>org.apache.velocity</groupId>
<artifactId>velocity-engine-core</artifactId>
<version>2.0</version>
<type>jar</type>
</dependency>
<dependency>
<groupId>org.xmlunit</groupId>
<artifactId>xmlunit-core</artifactId>
<version>2.6.3</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.hibernate.javax.persistence</groupId>
<artifactId>hibernate-jpa-2.1-api</artifactId>
<version>1.0.0.Final</version>
</dependency>
<dependency>
<groupId>org.apache.bcel</groupId>
<artifactId>bcel</artifactId>
<version>6.4.0</version>
</dependency>
</dependencies> </dependencies>
</project> </project>

View File

@@ -1,165 +0,0 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.solr.handler.extraction;
/**
* The various Solr Parameters names to use when extracting content.
*
**/
public interface ExtractingParams {
/**
* Map all generated attribute names to field names with lowercase and underscores.
*/
public static final String LOWERNAMES = "lowernames";
/**
* if true, ignore TikaException (give up to extract text but index meta data)
*/
public static final String IGNORE_TIKA_EXCEPTION = "ignoreTikaException";
/**
* The param prefix for mapping Tika metadata to Solr fields.
* <p>
* To map a field, add a name like:
* <pre>fmap.title=solr.title</pre>
*
* In this example, the tika "title" metadata value will be added to a Solr field named "solr.title"
*
*
*/
public static final String MAP_PREFIX = "fmap.";
/**
* The boost value for the name of the field. The boost can be specified by a name mapping.
* <p>
* For example
* <pre>
* map.title=solr.title
* boost.solr.title=2.5
* </pre>
* will boost the solr.title field for this document by 2.5
*
*/
public static final String BOOST_PREFIX = "boost.";
/**
* Pass in literal values to be added to the document, as in
* <pre>
* literal.myField=Foo
* </pre>
*
*/
public static final String LITERALS_PREFIX = "literal.";
/**
* Restrict the extracted parts of a document to be indexed
* by passing in an XPath expression. All content that satisfies the XPath expr.
* will be passed to the {@link org.apache.solr.handler.extraction.SolrContentHandler}.
* <p>
* See Tika's docs for what the extracted document looks like.
*
* @see #CAPTURE_ELEMENTS
*/
public static final String XPATH_EXPRESSION = "xpath";
/**
* Only extract and return the content, do not index it.
*/
public static final String EXTRACT_ONLY = "extractOnly";
/**
* Content output format if extractOnly is true. Default is "xml", alternative is "text".
*/
public static final String EXTRACT_FORMAT = "extractFormat";
/**
* Capture attributes separately according to the name of the element, instead of just adding them to the string buffer
*/
public static final String CAPTURE_ATTRIBUTES = "captureAttr";
/**
* Literal field values will by default override other values such as metadata and content. Set this to false to revert to pre-4.0 behaviour
*/
public static final String LITERALS_OVERRIDE = "literalsOverride";
/**
* Capture the specified fields (and everything included below it that isn't capture by some other capture field) separately from the default. This is different
* then the case of passing in an XPath expression.
* <p>
* The Capture field is based on the localName returned to the {@link org.apache.solr.handler.extraction.SolrContentHandler}
* by Tika, not to be confused by the mapped field. The field name can then
* be mapped into the index schema.
* <p>
* For instance, a Tika document may look like:
* <pre>
* &lt;html&gt;
* ...
* &lt;body&gt;
* &lt;p&gt;some text here. &lt;div&gt;more text&lt;/div&gt;&lt;/p&gt;
* Some more text
* &lt;/body&gt;
* </pre>
* By passing in the p tag, you could capture all P tags separately from the rest of the t
* Thus, in the example, the capture of the P tag would be: "some text here. more text"
*
*/
public static final String CAPTURE_ELEMENTS = "capture";
/**
* The type of the stream. If not specified, Tika will use mime type detection.
*/
public static final String STREAM_TYPE = "stream.type";
/**
* Optional. The file name. If specified, Tika can take this into account while
* guessing the MIME type.
*/
public static final String RESOURCE_NAME = "resource.name";
/**
* Optional. The password for this resource. Will be used instead of the rule based password lookup mechanisms
*/
public static final String RESOURCE_PASSWORD = "resource.password";
/**
* Optional. If specified, the prefix will be prepended to all Metadata, such that it would be possible
* to setup a dynamic field to automatically capture it
*/
public static final String UNKNOWN_FIELD_PREFIX = "uprefix";
/**
* Optional. If specified and the name of a potential field cannot be determined, the default Field specified
* will be used instead.
*/
public static final String DEFAULT_FIELD = "defaultField";
/**
* Optional. If specified, loads the file as a source for password lookups for Tika encrypted documents.
* <p>
* File format is Java properties format with one key=value per line.
* The key is evaluated as a regex against the file name, and the value is the password
* The rules are evaluated top-bottom, i.e. the first match will be used
* If you want a fallback password to be always used, supply a .*=&lt;defaultmypassword&gt; at the end
*/
public static final String PASSWORD_MAP_FILE = "passwordsFile";
}

View File

@@ -17,7 +17,7 @@ import org.apache.commons.cli.CommandLineParser;
import org.apache.commons.cli.HelpFormatter; import org.apache.commons.cli.HelpFormatter;
import org.apache.commons.cli.Options; import org.apache.commons.cli.Options;
import org.apache.commons.cli.PosixParser; import org.apache.commons.cli.PosixParser;
import org.apache.commons.collections.CollectionUtils; import org.apache.commons.collections4.CollectionUtils;
import org.dspace.authorize.AuthorizeException; import org.dspace.authorize.AuthorizeException;
import org.dspace.content.Community; import org.dspace.content.Community;
import org.dspace.content.factory.ContentServiceFactory; import org.dspace.content.factory.ContentServiceFactory;
@@ -35,8 +35,7 @@ import org.dspace.handle.service.HandleService;
* @version $Revision$ * @version $Revision$
*/ */
public class CommunityFiliator public class CommunityFiliator {
{
protected CommunityService communityService; protected CommunityService communityService;
protected HandleService handleService; protected HandleService handleService;
@@ -47,12 +46,10 @@ public class CommunityFiliator
} }
/** /**
*
* @param argv the command line arguments given * @param argv the command line arguments given
* @throws Exception if error * @throws Exception if error
*/ */
public static void main(String[] argv) throws Exception public static void main(String[] argv) throws Exception {
{
// create an options object and populate it // create an options object and populate it
CommandLineParser parser = new PosixParser(); CommandLineParser parser = new PosixParser();
@@ -60,11 +57,11 @@ public class CommunityFiliator
options.addOption("s", "set", false, "set a parent/child relationship"); options.addOption("s", "set", false, "set a parent/child relationship");
options.addOption("r", "remove", false, options.addOption("r", "remove", false,
"remove a parent/child relationship"); "remove a parent/child relationship");
options.addOption("p", "parent", true, options.addOption("p", "parent", true,
"parent community (handle or database ID)"); "parent community (handle or database ID)");
options.addOption("c", "child", true, options.addOption("c", "child", true,
"child community (handle or databaseID)"); "child community (handle or databaseID)");
options.addOption("h", "help", false, "help"); options.addOption("h", "help", false, "help");
CommandLine line = parser.parse(options, argv); CommandLine line = parser.parse(options, argv);
@@ -73,57 +70,48 @@ public class CommunityFiliator
String parentID = null; String parentID = null;
String childID = null; String childID = null;
if (line.hasOption('h')) if (line.hasOption('h')) {
{
HelpFormatter myhelp = new HelpFormatter(); HelpFormatter myhelp = new HelpFormatter();
myhelp.printHelp("CommunityFiliator\n", options); myhelp.printHelp("CommunityFiliator\n", options);
System.out System.out
.println("\nestablish a relationship: CommunityFiliator -s -p parentID -c childID"); .println("\nestablish a relationship: CommunityFiliator -s -p parentID -c childID");
System.out System.out
.println("remove a relationship: CommunityFiliator -r -p parentID -c childID"); .println("remove a relationship: CommunityFiliator -r -p parentID -c childID");
System.exit(0); System.exit(0);
} }
if (line.hasOption('s')) if (line.hasOption('s')) {
{
command = "set"; command = "set";
} }
if (line.hasOption('r')) if (line.hasOption('r')) {
{
command = "remove"; command = "remove";
} }
if (line.hasOption('p')) // parent if (line.hasOption('p')) { // parent
{
parentID = line.getOptionValue('p'); parentID = line.getOptionValue('p');
} }
if (line.hasOption('c')) // child if (line.hasOption('c')) { // child
{
childID = line.getOptionValue('c'); childID = line.getOptionValue('c');
} }
// now validate // now validate
// must have a command set // must have a command set
if (command == null) if (command == null) {
{
System.out System.out
.println("Error - must run with either set or remove (run with -h flag for details)"); .println("Error - must run with either set or remove (run with -h flag for details)");
System.exit(1); System.exit(1);
} }
if ("set".equals(command) || "remove".equals(command)) if ("set".equals(command) || "remove".equals(command)) {
{ if (parentID == null) {
if (parentID == null)
{
System.out.println("Error - a parentID must be specified (run with -h flag for details)"); System.out.println("Error - a parentID must be specified (run with -h flag for details)");
System.exit(1); System.exit(1);
} }
if (childID == null) if (childID == null) {
{
System.out.println("Error - a childID must be specified (run with -h flag for details)"); System.out.println("Error - a childID must be specified (run with -h flag for details)");
System.exit(1); System.exit(1);
} }
@@ -135,86 +123,66 @@ public class CommunityFiliator
// we are superuser! // we are superuser!
c.turnOffAuthorisationSystem(); c.turnOffAuthorisationSystem();
try try {
{
// validate and resolve the parent and child IDs into commmunities // validate and resolve the parent and child IDs into commmunities
Community parent = filiator.resolveCommunity(c, parentID); Community parent = filiator.resolveCommunity(c, parentID);
Community child = filiator.resolveCommunity(c, childID); Community child = filiator.resolveCommunity(c, childID);
if (parent == null) if (parent == null) {
{
System.out.println("Error, parent community cannot be found: " System.out.println("Error, parent community cannot be found: "
+ parentID); + parentID);
System.exit(1); System.exit(1);
} }
if (child == null) if (child == null) {
{
System.out.println("Error, child community cannot be found: " System.out.println("Error, child community cannot be found: "
+ childID); + childID);
System.exit(1); System.exit(1);
} }
if ("set".equals(command)) if ("set".equals(command)) {
{
filiator.filiate(c, parent, child); filiator.filiate(c, parent, child);
} } else {
else
{
filiator.defiliate(c, parent, child); filiator.defiliate(c, parent, child);
} }
} } catch (SQLException sqlE) {
catch (SQLException sqlE)
{
System.out.println("Error - SQL exception: " + sqlE.toString()); System.out.println("Error - SQL exception: " + sqlE.toString());
} } catch (AuthorizeException authE) {
catch (AuthorizeException authE)
{
System.out.println("Error - Authorize exception: " System.out.println("Error - Authorize exception: "
+ authE.toString()); + authE.toString());
} } catch (IOException ioE) {
catch (IOException ioE)
{
System.out.println("Error - IO exception: " + ioE.toString()); System.out.println("Error - IO exception: " + ioE.toString());
} }
} }
/** /**
* * @param c context
* @param c context
* @param parent parent Community * @param parent parent Community
* @param child child community * @param child child community
* @throws SQLException if database error * @throws SQLException if database error
* @throws AuthorizeException if authorize error * @throws AuthorizeException if authorize error
* @throws IOException if IO error * @throws IOException if IO error
*/ */
public void filiate(Context c, Community parent, Community child) public void filiate(Context c, Community parent, Community child)
throws SQLException, AuthorizeException, IOException throws SQLException, AuthorizeException, IOException {
{
// check that a valid filiation would be established // check that a valid filiation would be established
// first test - proposed child must currently be an orphan (i.e. // first test - proposed child must currently be an orphan (i.e.
// top-level) // top-level)
Community childDad = CollectionUtils.isNotEmpty(child.getParentCommunities()) ? child.getParentCommunities().iterator().next() : null; Community childDad = CollectionUtils.isNotEmpty(child.getParentCommunities()) ? child.getParentCommunities()
.iterator().next() : null;
if (childDad != null) if (childDad != null) {
{
System.out.println("Error, child community: " + child.getID() System.out.println("Error, child community: " + child.getID()
+ " already a child of: " + childDad.getID()); + " already a child of: " + childDad.getID());
System.exit(1); System.exit(1);
} }
// second test - circularity: parent's parents can't include proposed // second test - circularity: parent's parents can't include proposed
// child // child
List<Community> parentDads = parent.getParentCommunities(); List<Community> parentDads = parent.getParentCommunities();
if (parentDads.contains(child)) {
for (int i = 0; i < parentDads.size(); i++) System.out.println("Error, circular parentage - child is parent of parent");
{ System.exit(1);
if (parentDads.get(i).getID().equals(child.getID()))
{
System.out
.println("Error, circular parentage - child is parent of parent");
System.exit(1);
}
} }
// everthing's OK // everthing's OK
@@ -223,83 +191,63 @@ public class CommunityFiliator
// complete the pending transaction // complete the pending transaction
c.complete(); c.complete();
System.out.println("Filiation complete. Community: '" + parent.getID() System.out.println("Filiation complete. Community: '" + parent.getID()
+ "' is parent of community: '" + child.getID() + "'"); + "' is parent of community: '" + child.getID() + "'");
} }
/** /**
* * @param c context
* @param c context
* @param parent parent Community * @param parent parent Community
* @param child child community * @param child child community
* @throws SQLException if database error * @throws SQLException if database error
* @throws AuthorizeException if authorize error * @throws AuthorizeException if authorize error
* @throws IOException if IO error * @throws IOException if IO error
*/ */
public void defiliate(Context c, Community parent, Community child) public void defiliate(Context c, Community parent, Community child)
throws SQLException, AuthorizeException, IOException throws SQLException, AuthorizeException, IOException {
{
// verify that child is indeed a child of parent // verify that child is indeed a child of parent
List<Community> parentKids = parent.getSubcommunities(); List<Community> parentKids = parent.getSubcommunities();
boolean isChild = false; if (!parentKids.contains(child)) {
System.out.println("Error, child community not a child of parent community");
for (int i = 0; i < parentKids.size(); i++)
{
if (parentKids.get(i).getID().equals(child.getID()))
{
isChild = true;
break;
}
}
if (!isChild)
{
System.out
.println("Error, child community not a child of parent community");
System.exit(1); System.exit(1);
} }
// OK remove the mappings - but leave the community, which will become // OK remove the mappings - but leave the community, which will become
// top-level // top-level
child.getParentCommunities().remove(parent); child.removeParentCommunity(parent);
parent.getSubcommunities().remove(child); parent.removeSubCommunity(child);
communityService.update(c, child); communityService.update(c, child);
communityService.update(c, parent); communityService.update(c, parent);
// complete the pending transaction // complete the pending transaction
c.complete(); c.complete();
System.out.println("Defiliation complete. Community: '" + child.getID() System.out.println("Defiliation complete. Community: '" + child.getID()
+ "' is no longer a child of community: '" + parent.getID() + "' is no longer a child of community: '" + parent.getID()
+ "'"); + "'");
} }
/** /**
* Find a community by ID * Find a community by ID
* @param c context *
* @param c context
* @param communityID community ID * @param communityID community ID
* @return Community object * @return Community object
* @throws SQLException if database error * @throws SQLException if database error
*/ */
protected Community resolveCommunity(Context c, String communityID) protected Community resolveCommunity(Context c, String communityID)
throws SQLException throws SQLException {
{
Community community = null; Community community = null;
if (communityID.indexOf('/') != -1) if (communityID.indexOf('/') != -1) {
{
// has a / must be a handle // has a / must be a handle
community = (Community) handleService.resolveToObject(c, community = (Community) handleService.resolveToObject(c,
communityID); communityID);
// ensure it's a community // ensure it's a community
if ((community == null) if ((community == null)
|| (community.getType() != Constants.COMMUNITY)) || (community.getType() != Constants.COMMUNITY)) {
{
community = null; community = null;
} }
} } else {
else
{
community = communityService.find(c, UUID.fromString(communityID)); community = communityService.find(c, UUID.fromString(communityID));
} }

View File

@@ -15,8 +15,7 @@ import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.CommandLineParser; import org.apache.commons.cli.CommandLineParser;
import org.apache.commons.cli.Options; import org.apache.commons.cli.Options;
import org.apache.commons.cli.PosixParser; import org.apache.commons.cli.PosixParser;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang.StringUtils;
import org.dspace.core.ConfigurationManager; import org.dspace.core.ConfigurationManager;
import org.dspace.core.Context; import org.dspace.core.Context;
import org.dspace.core.I18nUtil; import org.dspace.core.I18nUtil;
@@ -42,12 +41,12 @@ import org.dspace.eperson.service.GroupService;
* *
* @author Robert Tansley * @author Robert Tansley
* @author Richard Jones * @author Richard Jones
*
* @version $Revision$ * @version $Revision$
*/ */
public final class CreateAdministrator public final class CreateAdministrator {
{ /**
/** DSpace Context object */ * DSpace Context object
*/
private final Context context; private final Context context;
protected EPersonService ePersonService; protected EPersonService ePersonService;
@@ -61,8 +60,7 @@ public final class CreateAdministrator
* @throws Exception if error * @throws Exception if error
*/ */
public static void main(String[] argv) public static void main(String[] argv)
throws Exception throws Exception {
{
CommandLineParser parser = new PosixParser(); CommandLineParser parser = new PosixParser();
Options options = new Options(); Options options = new Options();
@@ -77,14 +75,11 @@ public final class CreateAdministrator
CommandLine line = parser.parse(options, argv); CommandLine line = parser.parse(options, argv);
if (line.hasOption("e") && line.hasOption("f") && line.hasOption("l") && if (line.hasOption("e") && line.hasOption("f") && line.hasOption("l") &&
line.hasOption("c") && line.hasOption("p")) line.hasOption("c") && line.hasOption("p")) {
{
ca.createAdministrator(line.getOptionValue("e"), ca.createAdministrator(line.getOptionValue("e"),
line.getOptionValue("f"), line.getOptionValue("l"), line.getOptionValue("f"), line.getOptionValue("l"),
line.getOptionValue("c"), line.getOptionValue("p")); line.getOptionValue("c"), line.getOptionValue("p"));
} } else {
else
{
ca.negotiateAdministratorDetails(); ca.negotiateAdministratorDetails();
} }
} }
@@ -95,8 +90,7 @@ public final class CreateAdministrator
* @throws Exception if error * @throws Exception if error
*/ */
protected CreateAdministrator() protected CreateAdministrator()
throws Exception throws Exception {
{
context = new Context(); context = new Context();
groupService = EPersonServiceFactory.getInstance().getGroupService(); groupService = EPersonServiceFactory.getInstance().getGroupService();
ePersonService = EPersonServiceFactory.getInstance().getEPersonService(); ePersonService = EPersonServiceFactory.getInstance().getEPersonService();
@@ -109,8 +103,7 @@ public final class CreateAdministrator
* @throws Exception if error * @throws Exception if error
*/ */
protected void negotiateAdministratorDetails() protected void negotiateAdministratorDetails()
throws Exception throws Exception {
{
Console console = System.console(); Console console = System.console();
System.out.println("Creating an initial administrator account"); System.out.println("Creating an initial administrator account");
@@ -122,20 +115,16 @@ public final class CreateAdministrator
String lastName = null; String lastName = null;
char[] password1 = null; char[] password1 = null;
char[] password2 = null; char[] password2 = null;
String language = I18nUtil.DEFAULTLOCALE.getLanguage(); String language = I18nUtil.getDefaultLocale().getLanguage();
while (!dataOK) while (!dataOK) {
{
System.out.print("E-mail address: "); System.out.print("E-mail address: ");
System.out.flush(); System.out.flush();
email = console.readLine(); email = console.readLine();
if (!StringUtils.isBlank(email)) if (!StringUtils.isBlank(email)) {
{
email = email.trim(); email = email.trim();
} } else {
else
{
System.out.println("Please provide an email address."); System.out.println("Please provide an email address.");
continue; continue;
} }
@@ -145,8 +134,7 @@ public final class CreateAdministrator
firstName = console.readLine(); firstName = console.readLine();
if (firstName != null) if (firstName != null) {
{
firstName = firstName.trim(); firstName = firstName.trim();
} }
@@ -155,21 +143,19 @@ public final class CreateAdministrator
lastName = console.readLine(); lastName = console.readLine();
if (lastName != null) if (lastName != null) {
{
lastName = lastName.trim(); lastName = lastName.trim();
} }
if (ConfigurationManager.getProperty("webui.supported.locales") != null) if (ConfigurationManager.getProperty("webui.supported.locales") != null) {
{ System.out.println("Select one of the following languages: " + ConfigurationManager
System.out.println("Select one of the following languages: " + ConfigurationManager.getProperty("webui.supported.locales")); .getProperty("webui.supported.locales"));
System.out.print("Language: "); System.out.print("Language: ");
System.out.flush(); System.out.flush();
language = console.readLine(); language = console.readLine();
if (language != null) if (language != null) {
{
language = language.trim(); language = language.trim();
language = I18nUtil.getSupportedLocale(new Locale(language)).getLanguage(); language = I18nUtil.getSupportedLocale(new Locale(language)).getLanguage();
} }
@@ -187,25 +173,20 @@ public final class CreateAdministrator
password2 = console.readPassword(); password2 = console.readPassword();
//TODO real password validation //TODO real password validation
if (password1.length > 1 && Arrays.equals(password1, password2)) if (password1.length > 1 && Arrays.equals(password1, password2)) {
{
// password OK // password OK
System.out.print("Is the above data correct? (y or n): "); System.out.print("Is the above data correct? (y or n): ");
System.out.flush(); System.out.flush();
String s = console.readLine(); String s = console.readLine();
if (s != null) if (s != null) {
{
s = s.trim(); s = s.trim();
if (s.toLowerCase().startsWith("y")) if (s.toLowerCase().startsWith("y")) {
{
dataOK = true; dataOK = true;
} }
} }
} } else {
else
{
System.out.println("Passwords don't match"); System.out.println("Passwords don't match");
} }
} }
@@ -224,16 +205,14 @@ public final class CreateAdministrator
* *
* @param email the email for the user * @param email the email for the user
* @param first user's first name * @param first user's first name
* @param last user's last name * @param last user's last name
* @param language preferred language * @param language preferred language
* @param pw desired password * @param pw desired password
*
* @throws Exception if error * @throws Exception if error
*/ */
protected void createAdministrator(String email, String first, String last, protected void createAdministrator(String email, String first, String last,
String language, String pw) String language, String pw)
throws Exception throws Exception {
{
// Of course we aren't an administrator yet so we need to // Of course we aren't an administrator yet so we need to
// circumvent authorisation // circumvent authorisation
context.turnOffAuthorisationSystem(); context.turnOffAuthorisationSystem();
@@ -241,18 +220,16 @@ public final class CreateAdministrator
// Find administrator group // Find administrator group
Group admins = groupService.findByName(context, Group.ADMIN); Group admins = groupService.findByName(context, Group.ADMIN);
if (admins == null) if (admins == null) {
{
throw new IllegalStateException("Error, no admin group (group 1) found"); throw new IllegalStateException("Error, no admin group (group 1) found");
} }
// Create the administrator e-person // Create the administrator e-person
EPerson eperson = ePersonService.findByEmail(context,email); EPerson eperson = ePersonService.findByEmail(context, email);
// check if the email belongs to a registered user, // check if the email belongs to a registered user,
// if not create a new user with this email // if not create a new user with this email
if (eperson == null) if (eperson == null) {
{
eperson = ePersonService.create(context); eperson = ePersonService.create(context);
eperson.setEmail(email); eperson.setEmail(email);
eperson.setCanLogIn(true); eperson.setCanLogIn(true);

View File

@@ -7,7 +7,19 @@
*/ */
package org.dspace.administer; package org.dspace.administer;
import org.apache.commons.cli.*; import java.io.BufferedWriter;
import java.io.FileWriter;
import java.io.IOException;
import java.sql.SQLException;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.CommandLineParser;
import org.apache.commons.cli.Options;
import org.apache.commons.cli.ParseException;
import org.apache.commons.cli.PosixParser;
import org.apache.xml.serialize.Method; import org.apache.xml.serialize.Method;
import org.apache.xml.serialize.OutputFormat; import org.apache.xml.serialize.OutputFormat;
import org.apache.xml.serialize.XMLSerializer; import org.apache.xml.serialize.XMLSerializer;
@@ -19,14 +31,6 @@ import org.dspace.content.service.MetadataSchemaService;
import org.dspace.core.Context; import org.dspace.core.Context;
import org.xml.sax.SAXException; import org.xml.sax.SAXException;
import java.io.BufferedWriter;
import java.io.FileWriter;
import java.io.IOException;
import java.sql.SQLException;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
/** /**
* @author Graham Triggs * @author Graham Triggs
@@ -37,51 +41,53 @@ import java.util.Map;
* The form of the XML is as follows * The form of the XML is as follows
* {@code * {@code
* <metadata-schemas> * <metadata-schemas>
* <schema> * <schema>
* <name>dc</name> * <name>dc</name>
* <namespace>http://dublincore.org/documents/dcmi-terms/</namespace> * <namespace>http://dublincore.org/documents/dcmi-terms/</namespace>
* </schema> * </schema>
* </metadata-schemas> * </metadata-schemas>
* } * }
*/ */
public class MetadataExporter public class MetadataExporter {
{
protected static MetadataSchemaService metadataSchemaService = ContentServiceFactory.getInstance().getMetadataSchemaService(); protected static MetadataSchemaService metadataSchemaService = ContentServiceFactory.getInstance()
protected static MetadataFieldService metadataFieldService = ContentServiceFactory.getInstance().getMetadataFieldService(); .getMetadataSchemaService();
protected static MetadataFieldService metadataFieldService = ContentServiceFactory.getInstance()
.getMetadataFieldService();
/**
* Default constructor
*/
private MetadataExporter() { }
/** /**
* @param args commandline arguments * @param args commandline arguments
* @throws ParseException if parser error * @throws ParseException if parser error
* @throws SAXException if XML parse error * @throws SAXException if XML parse error
* @throws IOException if IO error * @throws IOException if IO error
* @throws SQLException if database error * @throws SQLException if database error
* @throws RegistryExportException if export error * @throws RegistryExportException if export error
*/ */
public static void main(String[] args) throws ParseException, SQLException, IOException, SAXException, RegistryExportException public static void main(String[] args)
{ throws ParseException, SQLException, IOException, SAXException, RegistryExportException {
// create an options object and populate it // create an options object and populate it
CommandLineParser parser = new PosixParser(); CommandLineParser parser = new PosixParser();
Options options = new Options(); Options options = new Options();
options.addOption("f", "file", true, "output xml file for registry"); options.addOption("f", "file", true, "output xml file for registry");
options.addOption("s", "schema", true, "the name of the schema to export"); options.addOption("s", "schema", true, "the name of the schema to export");
CommandLine line = parser.parse(options, args); CommandLine line = parser.parse(options, args);
String file = null; String file = null;
String schema = null; String schema = null;
if (line.hasOption('f')) if (line.hasOption('f')) {
{ file = line.getOptionValue('f');
file = line.getOptionValue('f'); } else {
}
else
{
usage(); usage();
System.exit(0); System.exit(0);
} }
if (line.hasOption('s')) if (line.hasOption('s')) {
{
schema = line.getOptionValue('s'); schema = line.getOptionValue('s');
} }
@@ -90,15 +96,16 @@ public class MetadataExporter
/** /**
* Save a registry to a filepath * Save a registry to a filepath
* @param file filepath *
* @param file filepath
* @param schema schema definition to save * @param schema schema definition to save
* @throws SQLException if database error * @throws SQLException if database error
* @throws IOException if IO error * @throws IOException if IO error
* @throws SAXException if XML error * @throws SAXException if XML error
* @throws RegistryExportException if export error * @throws RegistryExportException if export error
*/ */
public static void saveRegistry(String file, String schema) throws SQLException, IOException, SAXException, RegistryExportException public static void saveRegistry(String file, String schema)
{ throws SQLException, IOException, SAXException, RegistryExportException {
// create a context // create a context
Context context = new Context(); Context context = new Context();
context.turnOffAuthorisationSystem(); context.turnOffAuthorisationSystem();
@@ -118,27 +125,22 @@ public class MetadataExporter
List<MetadataField> mdFields = null; List<MetadataField> mdFields = null;
// If a single schema has been specified // If a single schema has been specified
if (schema != null && !"".equals(schema)) if (schema != null && !"".equals(schema)) {
{
// Get the id of that schema // Get the id of that schema
MetadataSchema mdSchema = metadataSchemaService.find(context, schema); MetadataSchema mdSchema = metadataSchemaService.find(context, schema);
if (mdSchema == null) if (mdSchema == null) {
{
throw new RegistryExportException("no schema to export"); throw new RegistryExportException("no schema to export");
} }
// Get the metadata fields only for the specified schema // Get the metadata fields only for the specified schema
mdFields = metadataFieldService.findAllInSchema(context, mdSchema); mdFields = metadataFieldService.findAllInSchema(context, mdSchema);
} } else {
else
{
// Get the metadata fields for all the schemas // Get the metadata fields for all the schemas
mdFields = metadataFieldService.findAll(context); mdFields = metadataFieldService.findAll(context);
} }
// Output the metadata fields // Output the metadata fields
for (MetadataField mdField : mdFields) for (MetadataField mdField : mdFields) {
{
saveType(context, xmlSerializer, mdField); saveType(context, xmlSerializer, mdField);
} }
@@ -151,29 +153,26 @@ public class MetadataExporter
/** /**
* Serialize the schema registry. If the parameter 'schema' is null or empty, save all schemas * Serialize the schema registry. If the parameter 'schema' is null or empty, save all schemas
* @param context DSpace Context *
* @param context DSpace Context
* @param xmlSerializer XML serializer * @param xmlSerializer XML serializer
* @param schema schema (may be null to save all) * @param schema schema (may be null to save all)
* @throws SQLException if database error * @throws SQLException if database error
* @throws SAXException if XML error * @throws SAXException if XML error
* @throws RegistryExportException if export error * @throws RegistryExportException if export error
*/ */
public static void saveSchema(Context context, XMLSerializer xmlSerializer, String schema) throws SQLException, SAXException, RegistryExportException public static void saveSchema(Context context, XMLSerializer xmlSerializer, String schema)
{ throws SQLException, SAXException, RegistryExportException {
if (schema != null && !"".equals(schema)) if (schema != null && !"".equals(schema)) {
{
// Find a single named schema // Find a single named schema
MetadataSchema mdSchema = metadataSchemaService.find(context, schema); MetadataSchema mdSchema = metadataSchemaService.find(context, schema);
saveSchema(xmlSerializer, mdSchema); saveSchema(xmlSerializer, mdSchema);
} } else {
else
{
// Find all schemas // Find all schemas
List<MetadataSchema> mdSchemas = metadataSchemaService.findAll(context); List<MetadataSchema> mdSchemas = metadataSchemaService.findAll(context);
for (MetadataSchema mdSchema : mdSchemas) for (MetadataSchema mdSchema : mdSchemas) {
{
saveSchema(xmlSerializer, mdSchema); saveSchema(xmlSerializer, mdSchema);
} }
} }
@@ -183,29 +182,26 @@ public class MetadataExporter
* Serialize a single schema (namespace) registry entry * Serialize a single schema (namespace) registry entry
* *
* @param xmlSerializer XML serializer * @param xmlSerializer XML serializer
* @param mdSchema DSpace metadata schema * @param mdSchema DSpace metadata schema
* @throws SAXException if XML error * @throws SAXException if XML error
* @throws RegistryExportException if export error * @throws RegistryExportException if export error
*/ */
private static void saveSchema(XMLSerializer xmlSerializer, MetadataSchema mdSchema) throws SAXException, RegistryExportException private static void saveSchema(XMLSerializer xmlSerializer, MetadataSchema mdSchema)
{ throws SAXException, RegistryExportException {
// If we haven't got a schema, it's an error // If we haven't got a schema, it's an error
if (mdSchema == null) if (mdSchema == null) {
{
throw new RegistryExportException("no schema to export"); throw new RegistryExportException("no schema to export");
} }
String name = mdSchema.getName(); String name = mdSchema.getName();
String namespace = mdSchema.getNamespace(); String namespace = mdSchema.getNamespace();
if (name == null || "".equals(name)) if (name == null || "".equals(name)) {
{
System.out.println("name is null, skipping"); System.out.println("name is null, skipping");
return; return;
} }
if (namespace == null || "".equals(namespace)) if (namespace == null || "".equals(namespace)) {
{
System.out.println("namespace is null, skipping"); System.out.println("namespace is null, skipping");
return; return;
} }
@@ -229,19 +225,18 @@ public class MetadataExporter
/** /**
* Serialize a single metadata field registry entry to xml * Serialize a single metadata field registry entry to xml
* *
* @param context DSpace context * @param context DSpace context
* @param xmlSerializer xml serializer * @param xmlSerializer xml serializer
* @param mdField DSpace metadata field * @param mdField DSpace metadata field
* @throws SAXException if XML error * @throws SAXException if XML error
* @throws RegistryExportException if export error * @throws RegistryExportException if export error
* @throws SQLException if database error * @throws SQLException if database error
* @throws IOException if IO error * @throws IOException if IO error
*/ */
private static void saveType(Context context, XMLSerializer xmlSerializer, MetadataField mdField) throws SAXException, RegistryExportException, SQLException, IOException private static void saveType(Context context, XMLSerializer xmlSerializer, MetadataField mdField)
{ throws SAXException, RegistryExportException, SQLException, IOException {
// If we haven't been given a field, it's an error // If we haven't been given a field, it's an error
if (mdField == null) if (mdField == null) {
{
throw new RegistryExportException("no field to export"); throw new RegistryExportException("no field to export");
} }
@@ -252,8 +247,7 @@ public class MetadataExporter
String scopeNote = mdField.getScopeNote(); String scopeNote = mdField.getScopeNote();
// We must have a schema and element // We must have a schema and element
if (schemaName == null || element == null) if (schemaName == null || element == null) {
{
throw new RegistryExportException("incomplete field information"); throw new RegistryExportException("incomplete field information");
} }
@@ -271,26 +265,20 @@ public class MetadataExporter
xmlSerializer.endElement("element"); xmlSerializer.endElement("element");
// Output the qualifier, if present // Output the qualifier, if present
if (qualifier != null) if (qualifier != null) {
{
xmlSerializer.startElement("qualifier", null); xmlSerializer.startElement("qualifier", null);
xmlSerializer.characters(qualifier.toCharArray(), 0, qualifier.length()); xmlSerializer.characters(qualifier.toCharArray(), 0, qualifier.length());
xmlSerializer.endElement("qualifier"); xmlSerializer.endElement("qualifier");
} } else {
else
{
xmlSerializer.comment("unqualified"); xmlSerializer.comment("unqualified");
} }
// Output the scope note, if present // Output the scope note, if present
if (scopeNote != null) if (scopeNote != null) {
{
xmlSerializer.startElement("scope_note", null); xmlSerializer.startElement("scope_note", null);
xmlSerializer.characters(scopeNote.toCharArray(), 0, scopeNote.length()); xmlSerializer.characters(scopeNote.toCharArray(), 0, scopeNote.length());
xmlSerializer.endElement("scope_note"); xmlSerializer.endElement("scope_note");
} } else {
else
{
xmlSerializer.comment("no scope note"); xmlSerializer.comment("no scope note");
} }
@@ -298,31 +286,29 @@ public class MetadataExporter
} }
static Map<Integer, String> schemaMap = new HashMap<Integer, String>(); static Map<Integer, String> schemaMap = new HashMap<Integer, String>();
/** /**
* Helper method to retrieve a schema name for the field. * Helper method to retrieve a schema name for the field.
* Caches the name after looking up the id. * Caches the name after looking up the id.
*
* @param context DSpace Context * @param context DSpace Context
* @param mdField DSpace metadata field * @param mdField DSpace metadata field
* @return name of schema * @return name of schema
* @throws SQLException if database error * @throws SQLException if database error
* @throws RegistryExportException if export error * @throws RegistryExportException if export error
*/ */
private static String getSchemaName(Context context, MetadataField mdField) throws SQLException, RegistryExportException private static String getSchemaName(Context context, MetadataField mdField)
{ throws SQLException, RegistryExportException {
// Get name from cache // Get name from cache
String name = schemaMap.get(mdField.getMetadataSchema().getID()); String name = schemaMap.get(mdField.getMetadataSchema().getID());
if (name == null) if (name == null) {
{
// Name not retrieved before, so get the schema now // Name not retrieved before, so get the schema now
MetadataSchema mdSchema = metadataSchemaService.find(context, mdField.getMetadataSchema().getID()); MetadataSchema mdSchema = metadataSchemaService.find(context, mdField.getMetadataSchema().getID());
if (mdSchema != null) if (mdSchema != null) {
{
name = mdSchema.getName(); name = mdSchema.getName();
schemaMap.put(mdSchema.getID(), name); schemaMap.put(mdSchema.getID(), name);
} } else {
else
{
// Can't find the schema // Can't find the schema
throw new RegistryExportException("Can't get schema name for field"); throw new RegistryExportException("Can't get schema name for field");
} }
@@ -333,11 +319,10 @@ public class MetadataExporter
/** /**
* Print the usage message to stdout * Print the usage message to stdout
*/ */
public static void usage() public static void usage() {
{
String usage = "Use this class with the following options:\n" + String usage = "Use this class with the following options:\n" +
" -f <xml output file> : specify the output file for the schemas\n" + " -f <xml output file> : specify the output file for the schemas\n" +
" -s <schema> : name of the schema to export\n"; " -s <schema> : name of the schema to export\n";
System.out.println(usage); System.out.println(usage);
} }
} }

View File

@@ -9,7 +9,6 @@ package org.dspace.administer;
import java.io.IOException; import java.io.IOException;
import java.sql.SQLException; import java.sql.SQLException;
import javax.xml.parsers.ParserConfigurationException; import javax.xml.parsers.ParserConfigurationException;
import javax.xml.transform.TransformerException; import javax.xml.transform.TransformerException;
@@ -18,12 +17,11 @@ import org.apache.commons.cli.CommandLineParser;
import org.apache.commons.cli.Options; import org.apache.commons.cli.Options;
import org.apache.commons.cli.ParseException; import org.apache.commons.cli.ParseException;
import org.apache.commons.cli.PosixParser; import org.apache.commons.cli.PosixParser;
import org.apache.xpath.XPathAPI; import org.apache.xpath.XPathAPI;
import org.dspace.authorize.AuthorizeException; import org.dspace.authorize.AuthorizeException;
import org.dspace.content.MetadataField; import org.dspace.content.MetadataField;
import org.dspace.content.MetadataSchema; import org.dspace.content.MetadataSchema;
import org.dspace.content.MetadataSchemaEnum;
import org.dspace.content.NonUniqueMetadataException; import org.dspace.content.NonUniqueMetadataException;
import org.dspace.content.factory.ContentServiceFactory; import org.dspace.content.factory.ContentServiceFactory;
import org.dspace.content.service.MetadataFieldService; import org.dspace.content.service.MetadataFieldService;
@@ -31,11 +29,9 @@ import org.dspace.content.service.MetadataSchemaService;
import org.dspace.core.Context; import org.dspace.core.Context;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
import org.w3c.dom.Document; import org.w3c.dom.Document;
import org.w3c.dom.Node; import org.w3c.dom.Node;
import org.w3c.dom.NodeList; import org.w3c.dom.NodeList;
import org.xml.sax.SAXException; import org.xml.sax.SAXException;
/** /**
@@ -49,45 +45,52 @@ import org.xml.sax.SAXException;
* *
* {@code * {@code
* <dspace-dc-types> * <dspace-dc-types>
* <dc-type> * <dc-type>
* <schema>icadmin</schema> * <schema>icadmin</schema>
* <element>status</element> * <element>status</element>
* <qualifier>dateset</qualifier> * <qualifier>dateset</qualifier>
* <scope_note>the workflow status of an item</scope_note> * <scope_note>the workflow status of an item</scope_note>
* </dc-type> * </dc-type>
* *
* [....] * [....]
* *
* </dspace-dc-types> * </dspace-dc-types>
* } * }
*/ */
public class MetadataImporter public class MetadataImporter {
{ protected static MetadataSchemaService metadataSchemaService = ContentServiceFactory.getInstance()
protected static MetadataSchemaService metadataSchemaService = ContentServiceFactory.getInstance().getMetadataSchemaService(); .getMetadataSchemaService();
protected static MetadataFieldService metadataFieldService = ContentServiceFactory.getInstance().getMetadataFieldService(); protected static MetadataFieldService metadataFieldService = ContentServiceFactory.getInstance()
.getMetadataFieldService();
/** logging category */ /**
* logging category
*/
private static final Logger log = LoggerFactory.getLogger(MetadataImporter.class); private static final Logger log = LoggerFactory.getLogger(MetadataImporter.class);
/**
* Default constructor
*/
private MetadataImporter() { }
/** /**
* main method for reading user input from the command line * main method for reading user input from the command line
* *
* @param args the command line arguments given * @param args the command line arguments given
* @throws ParseException if parse error * @throws ParseException if parse error
* @throws SQLException if database error * @throws SQLException if database error
* @throws IOException if IO error * @throws IOException if IO error
* @throws TransformerException if transformer error * @throws TransformerException if transformer error
* @throws ParserConfigurationException if config error * @throws ParserConfigurationException if config error
* @throws AuthorizeException if authorization error * @throws AuthorizeException if authorization error
* @throws SAXException if parser error * @throws SAXException if parser error
* @throws NonUniqueMetadataException if duplicate metadata * @throws NonUniqueMetadataException if duplicate metadata
* @throws RegistryImportException if import fails * @throws RegistryImportException if import fails
**/ **/
public static void main(String[] args) public static void main(String[] args)
throws ParseException, SQLException, IOException, TransformerException, throws ParseException, SQLException, IOException, TransformerException,
ParserConfigurationException, AuthorizeException, SAXException, ParserConfigurationException, AuthorizeException, SAXException,
NonUniqueMetadataException, RegistryImportException NonUniqueMetadataException, RegistryImportException {
{
boolean forceUpdate = false; boolean forceUpdate = false;
// create an options object and populate it // create an options object and populate it
@@ -98,12 +101,9 @@ public class MetadataImporter
CommandLine line = parser.parse(options, args); CommandLine line = parser.parse(options, args);
String file = null; String file = null;
if (line.hasOption('f')) if (line.hasOption('f')) {
{
file = line.getOptionValue('f'); file = line.getOptionValue('f');
} } else {
else
{
usage(); usage();
System.exit(0); System.exit(0);
} }
@@ -115,25 +115,23 @@ public class MetadataImporter
/** /**
* Load the data from the specified file path into the database * Load the data from the specified file path into the database
* *
* @param file the file path containing the source data * @param file the file path containing the source data
* @param forceUpdate whether to force update * @param forceUpdate whether to force update
* @throws SQLException if database error * @throws SQLException if database error
* @throws IOException if IO error * @throws IOException if IO error
* @throws TransformerException if transformer error * @throws TransformerException if transformer error
* @throws ParserConfigurationException if config error * @throws ParserConfigurationException if config error
* @throws AuthorizeException if authorization error * @throws AuthorizeException if authorization error
* @throws SAXException if parser error * @throws SAXException if parser error
* @throws NonUniqueMetadataException if duplicate metadata * @throws NonUniqueMetadataException if duplicate metadata
* @throws RegistryImportException if import fails * @throws RegistryImportException if import fails
*/ */
public static void loadRegistry(String file, boolean forceUpdate) public static void loadRegistry(String file, boolean forceUpdate)
throws SQLException, IOException, TransformerException, ParserConfigurationException, throws SQLException, IOException, TransformerException, ParserConfigurationException,
AuthorizeException, SAXException, NonUniqueMetadataException, RegistryImportException AuthorizeException, SAXException, NonUniqueMetadataException, RegistryImportException {
{
Context context = null; Context context = null;
try try {
{
// create a context // create a context
context = new Context(); context = new Context();
context.turnOffAuthorisationSystem(); context.turnOffAuthorisationSystem();
@@ -145,8 +143,7 @@ public class MetadataImporter
NodeList schemaNodes = XPathAPI.selectNodeList(document, "/dspace-dc-types/dc-schema"); NodeList schemaNodes = XPathAPI.selectNodeList(document, "/dspace-dc-types/dc-schema");
// Add each one as a new format to the registry // Add each one as a new format to the registry
for (int i = 0; i < schemaNodes.getLength(); i++) for (int i = 0; i < schemaNodes.getLength(); i++) {
{
Node n = schemaNodes.item(i); Node n = schemaNodes.item(i);
loadSchema(context, n, forceUpdate); loadSchema(context, n, forceUpdate);
} }
@@ -155,20 +152,18 @@ public class MetadataImporter
NodeList typeNodes = XPathAPI.selectNodeList(document, "/dspace-dc-types/dc-type"); NodeList typeNodes = XPathAPI.selectNodeList(document, "/dspace-dc-types/dc-type");
// Add each one as a new format to the registry // Add each one as a new format to the registry
for (int i = 0; i < typeNodes.getLength(); i++) for (int i = 0; i < typeNodes.getLength(); i++) {
{
Node n = typeNodes.item(i); Node n = typeNodes.item(i);
loadType(context, n); loadType(context, n);
} }
context.restoreAuthSystemState(); context.restoreAuthSystemState();
context.complete(); context.complete();
} } finally {
finally // Clean up our context, if it still exists & it was never completed
{ if (context != null && context.isValid()) {
// Clean up our context, if it still exists & it was never completed
if(context!=null && context.isValid())
context.abort(); context.abort();
}
} }
} }
@@ -176,64 +171,54 @@ public class MetadataImporter
* Process a node in the metadata registry XML file. If the * Process a node in the metadata registry XML file. If the
* schema already exists, it will not be recreated * schema already exists, it will not be recreated
* *
* @param context * @param context DSpace context object
* DSpace context object * @param node the node in the DOM tree
* @param node * @throws SQLException if database error
* the node in the DOM tree * @throws IOException if IO error
* @throws SQLException if database error * @throws TransformerException if transformer error
* @throws IOException if IO error * @throws AuthorizeException if authorization error
* @throws TransformerException if transformer error
* @throws AuthorizeException if authorization error
* @throws NonUniqueMetadataException if duplicate metadata * @throws NonUniqueMetadataException if duplicate metadata
* @throws RegistryImportException if import fails * @throws RegistryImportException if import fails
*/ */
private static void loadSchema(Context context, Node node, boolean updateExisting) private static void loadSchema(Context context, Node node, boolean updateExisting)
throws SQLException, IOException, TransformerException, throws SQLException, IOException, TransformerException,
AuthorizeException, NonUniqueMetadataException, RegistryImportException AuthorizeException, NonUniqueMetadataException, RegistryImportException {
{
// Get the values // Get the values
String name = RegistryImporter.getElementData(node, "name"); String name = RegistryImporter.getElementData(node, "name");
String namespace = RegistryImporter.getElementData(node, "namespace"); String namespace = RegistryImporter.getElementData(node, "namespace");
if (name == null || "".equals(name)) if (name == null || "".equals(name)) {
{
throw new RegistryImportException("Name of schema must be supplied"); throw new RegistryImportException("Name of schema must be supplied");
} }
if (namespace == null || "".equals(namespace)) if (namespace == null || "".equals(namespace)) {
{
throw new RegistryImportException("Namespace of schema must be supplied"); throw new RegistryImportException("Namespace of schema must be supplied");
} }
// check to see if the schema already exists // check to see if the schema already exists
MetadataSchema s = metadataSchemaService.find(context, name); MetadataSchema s = metadataSchemaService.find(context, name);
if (s == null) if (s == null) {
{
// Schema does not exist - create // Schema does not exist - create
log.info("Registering Schema " + name + " (" + namespace + ")"); log.info("Registering Schema " + name + " (" + namespace + ")");
metadataSchemaService.create(context, name, namespace); metadataSchemaService.create(context, name, namespace);
} } else {
else
{
// Schema exists - if it's the same namespace, allow the type imports to continue // Schema exists - if it's the same namespace, allow the type imports to continue
if (s.getNamespace().equals(namespace)) if (s.getNamespace().equals(namespace)) {
{
// This schema already exists with this namespace, skipping it // This schema already exists with this namespace, skipping it
return; return;
} }
// It's a different namespace - have we been told to update? // It's a different namespace - have we been told to update?
if (updateExisting) if (updateExisting) {
{
// Update the existing schema namespace and continue to type import // Update the existing schema namespace and continue to type import
log.info("Updating Schema " + name + ": New namespace " + namespace); log.info("Updating Schema " + name + ": New namespace " + namespace);
s.setNamespace(namespace); s.setNamespace(namespace);
metadataSchemaService.update(context, s); metadataSchemaService.update(context, s);
} } else {
else throw new RegistryImportException(
{ "Schema " + name + " already registered with different namespace " + namespace + ". Rerun with " +
throw new RegistryImportException("Schema " + name + " already registered with different namespace " + namespace + ". Rerun with 'update' option enabled if you wish to update this schema."); "'update' option enabled if you wish to update this schema.");
} }
} }
@@ -244,21 +229,18 @@ public class MetadataImporter
* be a "dc-type" node. If the type already exists, then it * be a "dc-type" node. If the type already exists, then it
* will not be reimported * will not be reimported
* *
* @param context * @param context DSpace context object
* DSpace context object * @param node the node in the DOM tree
* @param node * @throws SQLException if database error
* the node in the DOM tree * @throws IOException if IO error
* @throws SQLException if database error * @throws TransformerException if transformer error
* @throws IOException if IO error * @throws AuthorizeException if authorization error
* @throws TransformerException if transformer error
* @throws AuthorizeException if authorization error
* @throws NonUniqueMetadataException if duplicate metadata * @throws NonUniqueMetadataException if duplicate metadata
* @throws RegistryImportException if import fails * @throws RegistryImportException if import fails
*/ */
private static void loadType(Context context, Node node) private static void loadType(Context context, Node node)
throws SQLException, IOException, TransformerException, throws SQLException, IOException, TransformerException,
AuthorizeException, NonUniqueMetadataException, RegistryImportException AuthorizeException, NonUniqueMetadataException, RegistryImportException {
{
// Get the values // Get the values
String schema = RegistryImporter.getElementData(node, "schema"); String schema = RegistryImporter.getElementData(node, "schema");
String element = RegistryImporter.getElementData(node, "element"); String element = RegistryImporter.getElementData(node, "element");
@@ -266,31 +248,29 @@ public class MetadataImporter
String scopeNote = RegistryImporter.getElementData(node, "scope_note"); String scopeNote = RegistryImporter.getElementData(node, "scope_note");
// If the schema is not provided default to DC // If the schema is not provided default to DC
if (schema == null) if (schema == null) {
{ schema = MetadataSchemaEnum.DC.getName();
schema = MetadataSchema.DC_SCHEMA;
} }
// Find the matching schema object // Find the matching schema object
MetadataSchema schemaObj = metadataSchemaService.find(context, schema); MetadataSchema schemaObj = metadataSchemaService.find(context, schema);
if (schemaObj == null) if (schemaObj == null) {
{
throw new RegistryImportException("Schema '" + schema + "' is not registered and does not exist."); throw new RegistryImportException("Schema '" + schema + "' is not registered and does not exist.");
} }
MetadataField mf = metadataFieldService.findByElement(context, schemaObj, element, qualifier); MetadataField mf = metadataFieldService.findByElement(context, schemaObj, element, qualifier);
if (mf != null) if (mf != null) {
{
// Metadata field already exists, skipping it // Metadata field already exists, skipping it
return; return;
} }
// Actually create this metadata field as it doesn't yet exist // Actually create this metadata field as it doesn't yet exist
String fieldName = schema + "." + element + "." + qualifier; String fieldName = schema + "." + element + "." + qualifier;
if(qualifier==null) if (qualifier == null) {
fieldName = schema + "." + element; fieldName = schema + "." + element;
}
log.info("Registering metadata field " + fieldName); log.info("Registering metadata field " + fieldName);
MetadataField field = metadataFieldService.create(context, schemaObj, element, qualifier, scopeNote); MetadataField field = metadataFieldService.create(context, schemaObj, element, qualifier, scopeNote);
metadataFieldService.update(context, field); metadataFieldService.update(context, field);
@@ -299,11 +279,10 @@ public class MetadataImporter
/** /**
* Print the usage message to stdout * Print the usage message to stdout
*/ */
public static void usage() public static void usage() {
{
String usage = "Use this class with the following option:\n" + String usage = "Use this class with the following option:\n" +
" -f <xml source file> : specify which xml source file " + " -f <xml source file> : specify which xml source file " +
"contains the DC fields to import.\n"; "contains the DC fields to import.\n";
System.out.println(usage); System.out.println(usage);
} }
} }

View File

@@ -12,13 +12,11 @@ package org.dspace.administer;
* *
* An exception to report any problems with registry exports * An exception to report any problems with registry exports
*/ */
public class RegistryExportException extends Exception public class RegistryExportException extends Exception {
{
/** /**
* Create an empty authorize exception * Create an empty authorize exception
*/ */
public RegistryExportException() public RegistryExportException() {
{
super(); super();
} }
@@ -27,8 +25,7 @@ public class RegistryExportException extends Exception
* *
* @param message exception message * @param message exception message
*/ */
public RegistryExportException(String message) public RegistryExportException(String message) {
{
super(message); super(message);
} }
@@ -36,10 +33,9 @@ public class RegistryExportException extends Exception
* create an exception with an inner exception and a message * create an exception with an inner exception and a message
* *
* @param message exception message * @param message exception message
* @param e reference to Throwable * @param e reference to Throwable
*/ */
public RegistryExportException(String message, Throwable e) public RegistryExportException(String message, Throwable e) {
{
super(message, e); super(message, e);
} }
@@ -48,8 +44,7 @@ public class RegistryExportException extends Exception
* *
* @param e reference to Throwable * @param e reference to Throwable
*/ */
public RegistryExportException(Throwable e) public RegistryExportException(Throwable e) {
{
super(e); super(e);
} }

View File

@@ -12,13 +12,11 @@ package org.dspace.administer;
* *
* An exception to report any problems with registry imports * An exception to report any problems with registry imports
*/ */
public class RegistryImportException extends Exception public class RegistryImportException extends Exception {
{
/** /**
* Create an empty authorize exception * Create an empty authorize exception
*/ */
public RegistryImportException() public RegistryImportException() {
{
super(); super();
} }
@@ -27,30 +25,27 @@ public class RegistryImportException extends Exception
* *
* @param message error message * @param message error message
*/ */
public RegistryImportException(String message) public RegistryImportException(String message) {
{
super(message); super(message);
} }
/** /**
* create an exception with an inner exception and a message * create an exception with an inner exception and a message
* *
* @param message error message * @param message error message
* @param e throwable * @param e throwable
*/ */
public RegistryImportException(String message, Throwable e) public RegistryImportException(String message, Throwable e) {
{ super(message, e);
super(message, e);
} }
/** /**
* create an exception with an inner exception * create an exception with an inner exception
* *
* @param e throwable * @param e throwable
*/ */
public RegistryImportException(Throwable e) public RegistryImportException(Throwable e) {
{ super(e);
super(e);
} }
} }

View File

@@ -9,18 +9,15 @@ package org.dspace.administer;
import java.io.File; import java.io.File;
import java.io.IOException; import java.io.IOException;
import javax.xml.parsers.DocumentBuilder; import javax.xml.parsers.DocumentBuilder;
import javax.xml.parsers.DocumentBuilderFactory; import javax.xml.parsers.DocumentBuilderFactory;
import javax.xml.parsers.ParserConfigurationException; import javax.xml.parsers.ParserConfigurationException;
import javax.xml.transform.TransformerException; import javax.xml.transform.TransformerException;
import org.apache.xpath.XPathAPI; import org.apache.xpath.XPathAPI;
import org.w3c.dom.Document; import org.w3c.dom.Document;
import org.w3c.dom.Node; import org.w3c.dom.Node;
import org.w3c.dom.NodeList; import org.w3c.dom.NodeList;
import org.xml.sax.SAXException; import org.xml.sax.SAXException;
/** /**
@@ -31,24 +28,26 @@ import org.xml.sax.SAXException;
* I am the author, really I ripped these methods off from other * I am the author, really I ripped these methods off from other
* classes * classes
*/ */
public class RegistryImporter public class RegistryImporter {
{
/**
* Default constructor
*/
private RegistryImporter() { }
/** /**
* Load in the XML from file. * Load in the XML from file.
* *
* @param filename * @param filename the filename to load from
* the filename to load from
*
* @return the DOM representation of the XML file * @return the DOM representation of the XML file
* @throws IOException if IO error * @throws IOException if IO error
* @throws ParserConfigurationException if configuration parse error * @throws ParserConfigurationException if configuration parse error
* @throws SAXException if XML parse error * @throws SAXException if XML parse error
*/ */
public static Document loadXML(String filename) public static Document loadXML(String filename)
throws IOException, ParserConfigurationException, SAXException throws IOException, ParserConfigurationException, SAXException {
{
DocumentBuilder builder = DocumentBuilderFactory.newInstance() DocumentBuilder builder = DocumentBuilderFactory.newInstance()
.newDocumentBuilder(); .newDocumentBuilder();
Document document = builder.parse(new File(filename)); Document document = builder.parse(new File(filename));
@@ -67,21 +66,17 @@ public class RegistryImporter
* </P> * </P>
* Why this isn't a core part of the XML API I do not know... * Why this isn't a core part of the XML API I do not know...
* *
* @param parentElement * @param parentElement the element, whose child element you want the CDATA from
* the element, whose child element you want the CDATA from * @param childName the name of the element you want the CDATA from
* @param childName
* the name of the element you want the CDATA from
* @throws TransformerException if error
* @return the CDATA as a <code>String</code> * @return the CDATA as a <code>String</code>
* @throws TransformerException if error
*/ */
public static String getElementData(Node parentElement, String childName) public static String getElementData(Node parentElement, String childName)
throws TransformerException throws TransformerException {
{
// Grab the child node // Grab the child node
Node childNode = XPathAPI.selectSingleNode(parentElement, childName); Node childNode = XPathAPI.selectSingleNode(parentElement, childName);
if (childNode == null) if (childNode == null) {
{
// No child node, so no values // No child node, so no values
return null; return null;
} }
@@ -89,8 +84,7 @@ public class RegistryImporter
// Get the #text // Get the #text
Node dataNode = childNode.getFirstChild(); Node dataNode = childNode.getFirstChild();
if (dataNode == null) if (dataNode == null) {
{
return null; return null;
} }
@@ -106,8 +100,8 @@ public class RegistryImporter
* <P> * <P>
* <code> * <code>
* &lt;foo&gt; * &lt;foo&gt;
* &lt;bar&gt;val1&lt;/bar&gt; * &lt;bar&gt;val1&lt;/bar&gt;
* &lt;bar&gt;val2&lt;/bar&gt; * &lt;bar&gt;val2&lt;/bar&gt;
* &lt;/foo&gt; * &lt;/foo&gt;
* </code> * </code>
* passing this the <code>foo</code> node and <code>bar</code> will * passing this the <code>foo</code> node and <code>bar</code> will
@@ -115,23 +109,19 @@ public class RegistryImporter
* </P> * </P>
* Why this also isn't a core part of the XML API I do not know... * Why this also isn't a core part of the XML API I do not know...
* *
* @param parentElement * @param parentElement the element, whose child element you want the CDATA from
* the element, whose child element you want the CDATA from * @param childName the name of the element you want the CDATA from
* @param childName
* the name of the element you want the CDATA from
* @throws TransformerException if error
* @return the CDATA as a <code>String</code> * @return the CDATA as a <code>String</code>
* @throws TransformerException if error
*/ */
public static String[] getRepeatedElementData(Node parentElement, public static String[] getRepeatedElementData(Node parentElement,
String childName) throws TransformerException String childName) throws TransformerException {
{
// Grab the child node // Grab the child node
NodeList childNodes = XPathAPI.selectNodeList(parentElement, childName); NodeList childNodes = XPathAPI.selectNodeList(parentElement, childName);
String[] data = new String[childNodes.getLength()]; String[] data = new String[childNodes.getLength()];
for (int i = 0; i < childNodes.getLength(); i++) for (int i = 0; i < childNodes.getLength(); i++) {
{
// Get the #text node // Get the #text node
Node dataNode = childNodes.item(i).getFirstChild(); Node dataNode = childNodes.item(i).getFirstChild();

View File

@@ -12,13 +12,12 @@ import java.io.IOException;
import java.sql.SQLException; import java.sql.SQLException;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Arrays; import java.util.Arrays;
import javax.xml.parsers.DocumentBuilder; import javax.xml.parsers.DocumentBuilder;
import javax.xml.parsers.DocumentBuilderFactory; import javax.xml.parsers.DocumentBuilderFactory;
import javax.xml.parsers.ParserConfigurationException; import javax.xml.parsers.ParserConfigurationException;
import javax.xml.transform.TransformerException; import javax.xml.transform.TransformerException;
import org.apache.log4j.Logger; import org.apache.logging.log4j.Logger;
import org.apache.xpath.XPathAPI; import org.apache.xpath.XPathAPI;
import org.dspace.authorize.AuthorizeException; import org.dspace.authorize.AuthorizeException;
import org.dspace.content.BitstreamFormat; import org.dspace.content.BitstreamFormat;
@@ -44,12 +43,19 @@ import org.xml.sax.SAXException;
* @author Robert Tansley * @author Robert Tansley
* @version $Revision$ * @version $Revision$
*/ */
public class RegistryLoader public class RegistryLoader {
{ /**
/** log4j category */ * log4j category
private static Logger log = Logger.getLogger(RegistryLoader.class); */
private static Logger log = org.apache.logging.log4j.LogManager.getLogger(RegistryLoader.class);
protected static BitstreamFormatService bitstreamFormatService = ContentServiceFactory.getInstance().getBitstreamFormatService(); protected static BitstreamFormatService bitstreamFormatService = ContentServiceFactory.getInstance()
.getBitstreamFormatService();
/**
* Default constructor
*/
private RegistryLoader() { }
/** /**
* For invoking via the command line * For invoking via the command line
@@ -57,15 +63,13 @@ public class RegistryLoader
* @param argv the command line arguments given * @param argv the command line arguments given
* @throws Exception if error * @throws Exception if error
*/ */
public static void main(String[] argv) throws Exception public static void main(String[] argv) throws Exception {
{
String usage = "Usage: " + RegistryLoader.class.getName() String usage = "Usage: " + RegistryLoader.class.getName()
+ " (-bitstream | -metadata) registry-file.xml"; + " (-bitstream | -metadata) registry-file.xml";
Context context = null; Context context = null;
try try {
{
context = new Context(); context = new Context();
// Can't update registries anonymously, so we need to turn off // Can't update registries anonymously, so we need to turn off
@@ -73,17 +77,12 @@ public class RegistryLoader
context.turnOffAuthorisationSystem(); context.turnOffAuthorisationSystem();
// Work out what we're loading // Work out what we're loading
if (argv[0].equalsIgnoreCase("-bitstream")) if (argv[0].equalsIgnoreCase("-bitstream")) {
{
RegistryLoader.loadBitstreamFormats(context, argv[1]); RegistryLoader.loadBitstreamFormats(context, argv[1]);
} } else if (argv[0].equalsIgnoreCase("-metadata")) {
else if (argv[0].equalsIgnoreCase("-metadata"))
{
// Call MetadataImporter, as it handles Metadata schema updates // Call MetadataImporter, as it handles Metadata schema updates
MetadataImporter.loadRegistry(argv[1], true); MetadataImporter.loadRegistry(argv[1], true);
} } else {
else
{
System.err.println(usage); System.err.println(usage);
} }
@@ -91,81 +90,69 @@ public class RegistryLoader
context.complete(); context.complete();
System.exit(0); System.exit(0);
} } catch (ArrayIndexOutOfBoundsException ae) {
catch (ArrayIndexOutOfBoundsException ae)
{
System.err.println(usage); System.err.println(usage);
System.exit(1); System.exit(1);
} } catch (Exception e) {
catch (Exception e)
{
log.fatal(LogManager.getHeader(context, "error_loading_registries", log.fatal(LogManager.getHeader(context, "error_loading_registries",
""), e); ""), e);
System.err.println("Error: \n - " + e.getMessage()); System.err.println("Error: \n - " + e.getMessage());
System.exit(1); System.exit(1);
} } finally {
finally
{
// Clean up our context, if it still exists & it was never completed // Clean up our context, if it still exists & it was never completed
if(context!=null && context.isValid()) if (context != null && context.isValid()) {
context.abort(); context.abort();
}
} }
} }
/** /**
* Load Bitstream Format metadata * Load Bitstream Format metadata
* *
* @param context * @param context DSpace context object
* DSpace context object * @param filename the filename of the XML file to load
* @param filename * @throws SQLException if database error
* the filename of the XML file to load * @throws IOException if IO error
* @throws SQLException if database error * @throws TransformerException if transformer error
* @throws IOException if IO error
* @throws TransformerException if transformer error
* @throws ParserConfigurationException if config error * @throws ParserConfigurationException if config error
* @throws AuthorizeException if authorization error * @throws AuthorizeException if authorization error
* @throws SAXException if parser error * @throws SAXException if parser error
*/ */
public static void loadBitstreamFormats(Context context, String filename) public static void loadBitstreamFormats(Context context, String filename)
throws SQLException, IOException, ParserConfigurationException, throws SQLException, IOException, ParserConfigurationException,
SAXException, TransformerException, AuthorizeException SAXException, TransformerException, AuthorizeException {
{
Document document = loadXML(filename); Document document = loadXML(filename);
// Get the nodes corresponding to formats // Get the nodes corresponding to formats
NodeList typeNodes = XPathAPI.selectNodeList(document, NodeList typeNodes = XPathAPI.selectNodeList(document,
"dspace-bitstream-types/bitstream-type"); "dspace-bitstream-types/bitstream-type");
// Add each one as a new format to the registry // Add each one as a new format to the registry
for (int i = 0; i < typeNodes.getLength(); i++) for (int i = 0; i < typeNodes.getLength(); i++) {
{
Node n = typeNodes.item(i); Node n = typeNodes.item(i);
loadFormat(context, n); loadFormat(context, n);
} }
log.info(LogManager.getHeader(context, "load_bitstream_formats", log.info(LogManager.getHeader(context, "load_bitstream_formats",
"number_loaded=" + typeNodes.getLength())); "number_loaded=" + typeNodes.getLength()));
} }
/** /**
* Process a node in the bitstream format registry XML file. The node must * Process a node in the bitstream format registry XML file. The node must
* be a "bitstream-type" node * be a "bitstream-type" node
* *
* @param context * @param context DSpace context object
* DSpace context object * @param node the node in the DOM tree
* @param node * @throws SQLException if database error
* the node in the DOM tree * @throws IOException if IO error
* @throws SQLException if database error
* @throws IOException if IO error
* @throws TransformerException if transformer error * @throws TransformerException if transformer error
* @throws AuthorizeException if authorization error * @throws AuthorizeException if authorization error
*/ */
private static void loadFormat(Context context, Node node) private static void loadFormat(Context context, Node node)
throws SQLException, IOException, TransformerException, throws SQLException, IOException, TransformerException,
AuthorizeException AuthorizeException {
{
// Get the values // Get the values
String mimeType = getElementData(node, "mimetype"); String mimeType = getElementData(node, "mimetype");
String shortDesc = getElementData(node, "short_description"); String shortDesc = getElementData(node, "short_description");
@@ -183,14 +170,12 @@ public class RegistryLoader
BitstreamFormat exists = bitstreamFormatService.findByMIMEType(context, mimeType); BitstreamFormat exists = bitstreamFormatService.findByMIMEType(context, mimeType);
// If not found by mimeType, check by short description (since this must also be unique) // If not found by mimeType, check by short description (since this must also be unique)
if(exists==null) if (exists == null) {
{
exists = bitstreamFormatService.findByShortDescription(context, shortDesc); exists = bitstreamFormatService.findByShortDescription(context, shortDesc);
} }
// If it doesn't exist, create it..otherwise skip it. // If it doesn't exist, create it..otherwise skip it.
if(exists==null) if (exists == null) {
{
// Create the format object // Create the format object
BitstreamFormat format = bitstreamFormatService.create(context); BitstreamFormat format = bitstreamFormatService.create(context);
@@ -214,18 +199,16 @@ public class RegistryLoader
/** /**
* Load in the XML from file. * Load in the XML from file.
* *
* @param filename * @param filename the filename to load from
* the filename to load from
* @throws IOException if IO error
* @throws ParserConfigurationException if config error
* @throws SAXException if parser error
* @return the DOM representation of the XML file * @return the DOM representation of the XML file
* @throws IOException if IO error
* @throws ParserConfigurationException if config error
* @throws SAXException if parser error
*/ */
private static Document loadXML(String filename) throws IOException, private static Document loadXML(String filename) throws IOException,
ParserConfigurationException, SAXException ParserConfigurationException, SAXException {
{
DocumentBuilder builder = DocumentBuilderFactory.newInstance() DocumentBuilder builder = DocumentBuilderFactory.newInstance()
.newDocumentBuilder(); .newDocumentBuilder();
return builder.parse(new File(filename)); return builder.parse(new File(filename));
} }
@@ -242,21 +225,17 @@ public class RegistryLoader
* </P> * </P>
* Why this isn't a core part of the XML API I do not know... * Why this isn't a core part of the XML API I do not know...
* *
* @param parentElement * @param parentElement the element, whose child element you want the CDATA from
* the element, whose child element you want the CDATA from * @param childName the name of the element you want the CDATA from
* @param childName
* the name of the element you want the CDATA from
* @throws TransformerException if transformer error
* @return the CDATA as a <code>String</code> * @return the CDATA as a <code>String</code>
* @throws TransformerException if transformer error
*/ */
private static String getElementData(Node parentElement, String childName) private static String getElementData(Node parentElement, String childName)
throws TransformerException throws TransformerException {
{
// Grab the child node // Grab the child node
Node childNode = XPathAPI.selectSingleNode(parentElement, childName); Node childNode = XPathAPI.selectSingleNode(parentElement, childName);
if (childNode == null) if (childNode == null) {
{
// No child node, so no values // No child node, so no values
return null; return null;
} }
@@ -264,8 +243,7 @@ public class RegistryLoader
// Get the #text // Get the #text
Node dataNode = childNode.getFirstChild(); Node dataNode = childNode.getFirstChild();
if (dataNode == null) if (dataNode == null) {
{
return null; return null;
} }
@@ -281,8 +259,8 @@ public class RegistryLoader
* <P> * <P>
* <code> * <code>
* &lt;foo&gt; * &lt;foo&gt;
* &lt;bar&gt;val1&lt;/bar&gt; * &lt;bar&gt;val1&lt;/bar&gt;
* &lt;bar&gt;val2&lt;/bar&gt; * &lt;bar&gt;val2&lt;/bar&gt;
* &lt;/foo&gt; * &lt;/foo&gt;
* </code> * </code>
* passing this the <code>foo</code> node and <code>bar</code> will * passing this the <code>foo</code> node and <code>bar</code> will
@@ -290,23 +268,19 @@ public class RegistryLoader
* </P> * </P>
* Why this also isn't a core part of the XML API I do not know... * Why this also isn't a core part of the XML API I do not know...
* *
* @param parentElement * @param parentElement the element, whose child element you want the CDATA from
* the element, whose child element you want the CDATA from * @param childName the name of the element you want the CDATA from
* @param childName
* the name of the element you want the CDATA from
* @throws TransformerException if transformer error
* @return the CDATA as a <code>String</code> * @return the CDATA as a <code>String</code>
* @throws TransformerException if transformer error
*/ */
private static String[] getRepeatedElementData(Node parentElement, private static String[] getRepeatedElementData(Node parentElement,
String childName) throws TransformerException String childName) throws TransformerException {
{
// Grab the child node // Grab the child node
NodeList childNodes = XPathAPI.selectNodeList(parentElement, childName); NodeList childNodes = XPathAPI.selectNodeList(parentElement, childName);
String[] data = new String[childNodes.getLength()]; String[] data = new String[childNodes.getLength()];
for (int i = 0; i < childNodes.getLength(); i++) for (int i = 0; i < childNodes.getLength(); i++) {
{
// Get the #text node // Get the #text node
Node dataNode = childNodes.item(i).getFirstChild(); Node dataNode = childNodes.item(i).getFirstChild();

View File

@@ -7,14 +7,17 @@
*/ */
package org.dspace.administer; package org.dspace.administer;
import java.io.BufferedWriter; import java.io.FileInputStream;
import java.io.File; import java.io.FileNotFoundException;
import java.io.FileWriter; import java.io.FileOutputStream;
import java.io.IOException; import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.io.PrintWriter;
import java.sql.SQLException; import java.sql.SQLException;
import java.util.HashMap; import java.util.HashMap;
import java.util.List;
import java.util.Map; import java.util.Map;
import javax.xml.parsers.DocumentBuilder; import javax.xml.parsers.DocumentBuilder;
import javax.xml.parsers.DocumentBuilderFactory; import javax.xml.parsers.DocumentBuilderFactory;
import javax.xml.parsers.ParserConfigurationException; import javax.xml.parsers.ParserConfigurationException;
@@ -22,12 +25,18 @@ import javax.xml.transform.TransformerException;
import org.apache.commons.cli.CommandLine; import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.CommandLineParser; import org.apache.commons.cli.CommandLineParser;
import org.apache.commons.cli.DefaultParser;
import org.apache.commons.cli.HelpFormatter;
import org.apache.commons.cli.Option;
import org.apache.commons.cli.Options; import org.apache.commons.cli.Options;
import org.apache.commons.cli.PosixParser; import org.apache.commons.cli.ParseException;
import org.apache.xpath.XPathAPI; import org.apache.xpath.XPathAPI;
import org.dspace.authorize.AuthorizeException; import org.dspace.authorize.AuthorizeException;
import org.dspace.content.Collection; import org.dspace.content.Collection;
import org.dspace.content.Community; import org.dspace.content.Community;
import org.dspace.content.Item;
import org.dspace.content.MetadataSchemaEnum;
import org.dspace.content.MetadataValue;
import org.dspace.content.factory.ContentServiceFactory; import org.dspace.content.factory.ContentServiceFactory;
import org.dspace.content.service.CollectionService; import org.dspace.content.service.CollectionService;
import org.dspace.content.service.CommunityService; import org.dspace.content.service.CommunityService;
@@ -35,6 +44,7 @@ import org.dspace.core.Context;
import org.dspace.eperson.factory.EPersonServiceFactory; import org.dspace.eperson.factory.EPersonServiceFactory;
import org.dspace.eperson.service.EPersonService; import org.dspace.eperson.service.EPersonService;
import org.jdom.Element; import org.jdom.Element;
import org.jdom.output.Format;
import org.jdom.output.XMLOutputter; import org.jdom.output.XMLOutputter;
import org.w3c.dom.Document; import org.w3c.dom.Document;
import org.w3c.dom.Node; import org.w3c.dom.Node;
@@ -46,106 +56,209 @@ import org.xml.sax.SAXException;
* an XML file. * an XML file.
* *
* The XML file structure needs to be: * The XML file structure needs to be:
* {@code * <pre>{@code
* <import_structure> * <import_structure>
* <community> * <community>
* <name>....</name> * <name>....</name>
* <community>...</community> * <community>...</community>
* <collection> * <collection>
* <name>....</name> * <name>....</name>
* </collection> * </collection>
* </community> * </community>
* </import_structure> * </import_structure>
* } * }</pre>
* it can be arbitrarily deep, and supports all the metadata elements * <p>
* It can be arbitrarily deep, and supports all the metadata elements
* that make up the community and collection metadata. See the system * that make up the community and collection metadata. See the system
* documentation for more details * documentation for more details.
* *
* @author Richard Jones * @author Richard Jones
*
*/ */
public class StructBuilder public class StructBuilder {
{ /** Name of the root element for the document to be imported. */
/** the output xml document which will contain updated information about the static final String INPUT_ROOT = "import_structure";
* imported structure
/*
* Name of the root element for the document produced by importing.
* Community and collection elements are annotated with their identifiers.
*/ */
private static org.jdom.Document xmlOutput = new org.jdom.Document(new Element("imported_structure")); static final String RESULT_ROOT = "imported_structure";
/** a hashtable to hold metadata for the collection being worked on */ /**
private static Map<String, String> collectionMap = new HashMap<String, String>(); * A table to hold metadata for the collection being worked on.
*/
private static final Map<String, String> collectionMap = new HashMap<>();
/** a hashtable to hold metadata for the community being worked on */ /**
private static Map<String, String> communityMap = new HashMap<String, String>(); * A table to hold metadata for the community being worked on.
*/
private static final Map<String, String> communityMap = new HashMap<>();
protected static CommunityService communityService = ContentServiceFactory.getInstance().getCommunityService(); protected static CommunityService communityService
protected static CollectionService collectionService = ContentServiceFactory.getInstance().getCollectionService(); = ContentServiceFactory.getInstance().getCommunityService();
protected static EPersonService ePersonService = EPersonServiceFactory.getInstance().getEPersonService(); protected static CollectionService collectionService
= ContentServiceFactory.getInstance().getCollectionService();
protected static EPersonService ePersonService
= EPersonServiceFactory.getInstance().getEPersonService();
/**
* Default constructor
*/
private StructBuilder() { }
/** /**
* Main method to be run from the command line to import a structure into * Main method to be run from the command line to import a structure into
* DSpace * DSpacee or export existing structure to a file.The command is of the form:
* *
* This is of the form: * <p>{@code StructBuilder -f [XML source] -e [administrator email] -o [output file]}
* *
* {@code StructBuilder -f [xml source] -e [administrator email] -o [output file]} * <p>to import, or
* *
* The output file will contain exactly the same as the source xml document, but * <p>{@code StructBuilder -x -e [administrator email] -o [output file]}</p>
* with the handle for each imported item added as an attribute.
* *
* @param argv the command line arguments given * <p>to export. The output will contain exactly the same as the source XML
* @throws Exception if an error occurs * document, but with the Handle for each imported item added as an attribute.
*
*
* @param argv command line arguments.
* @throws ParserConfigurationException passed through.
* @throws SQLException passed through.
* @throws FileNotFoundException if input or output could not be opened.
* @throws TransformerException if the input document is invalid.
*/ */
public static void main(String[] argv) public static void main(String[] argv)
throws Exception throws ParserConfigurationException, SQLException,
{ FileNotFoundException, IOException, TransformerException {
CommandLineParser parser = new PosixParser(); // Define command line options.
Options options = new Options(); Options options = new Options();
options.addOption( "f", "file", true, "file"); options.addOption("h", "help", false, "Print this help message.");
options.addOption( "e", "eperson", true, "eperson"); options.addOption("?", "help");
options.addOption("o", "output", true, "output"); options.addOption("x", "export", false, "Export the current structure as XML.");
CommandLine line = parser.parse( options, argv ); options.addOption(Option.builder("e").longOpt("eperson")
.desc("User who is manipulating the repository's structure.")
.hasArg().argName("eperson").required().build());
String file = null; options.addOption(Option.builder("f").longOpt("file")
String eperson = null; .desc("File of new structure information.")
String output = null; .hasArg().argName("input").build());
if (line.hasOption('f')) options.addOption(Option.builder("o").longOpt("output")
{ .desc("File to receive the structure map ('-' for standard out).")
file = line.getOptionValue('f'); .hasArg().argName("output").required().build());
// Parse the command line.
CommandLineParser parser = new DefaultParser();
CommandLine line = null;
try {
line = parser.parse(options, argv);
} catch (ParseException ex) {
System.err.println(ex.getMessage());
usage(options);
System.exit(1);
} }
if (line.hasOption('e')) // If the user asked for help, give it and exit.
{ if (line.hasOption('h') || line.hasOption('?')) {
eperson = line.getOptionValue('e'); giveHelp(options);
}
if (line.hasOption('o'))
{
output = line.getOptionValue('o');
}
if (output == null || eperson == null || file == null)
{
usage();
System.exit(0); System.exit(0);
} }
// Otherwise, analyze the command.
// Must be import or export.
if (!(line.hasOption('f') || line.hasOption('x'))) {
giveHelp(options);
System.exit(1);
}
// Open the output stream.
String output = line.getOptionValue('o');
OutputStream outputStream;
if ("-".equals(output)) {
outputStream = System.out;
} else {
outputStream = new FileOutputStream(output);
}
// create a context // create a context
Context context = new Context(); Context context = new Context();
// set the context // set the context.
context.setCurrentUser(ePersonService.findByEmail(context, eperson)); String eperson = line.getOptionValue('e');
try {
context.setCurrentUser(ePersonService.findByEmail(context, eperson));
} catch (SQLException ex) {
System.err.format("That user could not be found: %s%n", ex.getMessage());
System.exit(1);
}
// Export? Import?
if (line.hasOption('x')) { // export
exportStructure(context, outputStream);
} else { // Must be import
String input = line.getOptionValue('f');
if (null == input) {
usage(options);
System.exit(1);
}
InputStream inputStream;
if ("-".equals(input)) {
inputStream = System.in;
} else {
inputStream = new FileInputStream(input);
}
importStructure(context, inputStream, outputStream);
// save changes from import
context.complete();
}
System.exit(0);
}
/**
* Import new Community/Collection structure.
*
* @param context
* @param input XML which describes the new communities and collections.
* @param output input, annotated with the new objects' identifiers.
* @throws IOException
* @throws ParserConfigurationException
* @throws SAXException
* @throws TransformerException
* @throws SQLException
*/
static void importStructure(Context context, InputStream input, OutputStream output)
throws IOException, ParserConfigurationException, SQLException, TransformerException {
// load the XML // load the XML
Document document = loadXML(file); Document document = null;
try {
document = loadXML(input);
} catch (IOException ex) {
System.err.format("The input document could not be read: %s%n", ex.getMessage());
System.exit(1);
} catch (SAXException ex) {
System.err.format("The input document could not be parsed: %s%n", ex.getMessage());
System.exit(1);
}
// run the preliminary validation, to be sure that the the XML document // run the preliminary validation, to be sure that the the XML document
// is properly structured // is properly structured.
validate(document); try {
validate(document);
} catch (TransformerException ex) {
System.err.format("The input document is invalid: %s%n", ex.getMessage());
System.exit(1);
}
// Check for 'identifier' attributes -- possibly output by this class.
NodeList identifierNodes = XPathAPI.selectNodeList(document, "//*[@identifier]");
if (identifierNodes.getLength() > 0) {
System.err.println("The input document has 'identifier' attributes, which will be ignored.");
}
// load the mappings into the member variable hashmaps // load the mappings into the member variable hashmaps
communityMap.put("name", "name"); communityMap.put("name", "name");
@@ -162,119 +275,235 @@ public class StructBuilder
collectionMap.put("license", "license"); collectionMap.put("license", "license");
collectionMap.put("provenance", "provenance_description"); collectionMap.put("provenance", "provenance_description");
// get the top level community list Element[] elements = new Element[]{};
NodeList first = XPathAPI.selectNodeList(document, "/import_structure/community"); try {
// get the top level community list
NodeList first = XPathAPI.selectNodeList(document, "/import_structure/community");
// run the import starting with the top level communities // run the import starting with the top level communities
Element[] elements = handleCommunities(context, first, null); elements = handleCommunities(context, first, null);
} catch (TransformerException ex) {
System.err.format("Input content not understood: %s%n", ex.getMessage());
System.exit(1);
} catch (AuthorizeException ex) {
System.err.format("Not authorized: %s%n", ex.getMessage());
System.exit(1);
}
// generate the output // generate the output
Element root = xmlOutput.getRootElement(); final Element root = new Element(RESULT_ROOT);
for (int i = 0; i < elements.length; i++)
{ for (Element element : elements) {
root.addContent(elements[i]); root.addContent(element);
} }
// finally write the string into the output file // finally write the string into the output file.
try final org.jdom.Document xmlOutput = new org.jdom.Document(root);
{ try {
BufferedWriter out = new BufferedWriter(new FileWriter(output)); new XMLOutputter().output(xmlOutput, output);
out.write(new XMLOutputter().outputString(xmlOutput)); } catch (IOException e) {
out.close(); System.out.printf("Unable to write to output file %s: %s%n",
output, e.getMessage());
System.exit(1);
} }
catch (IOException e)
{
System.out.println("Unable to write to output file " + output);
System.exit(0);
}
context.complete();
} }
/** /**
* Output the usage information * Add a single community, and its children, to the Document.
*
* @param community
* @return a fragment representing this Community.
*/ */
private static void usage() private static Element exportACommunity(Community community) {
{ // Export this Community.
System.out.println("Usage: java StructBuilder -f <source XML file> -o <output file> -e <eperson email>"); Element element = new Element("community");
System.out.println("Communities will be created from the top level, and a map of communities to handles will be returned in the output file"); element.setAttribute("identifier", community.getHandle());
return; element.addContent(new Element("name").setText(community.getName()));
element.addContent(new Element("description")
.setText(communityService.getMetadataFirstValue(community,
MetadataSchemaEnum.DC.getName(), "description", "abstract", Item.ANY)));
element.addContent(new Element("intro")
.setText(communityService.getMetadataFirstValue(community,
MetadataSchemaEnum.DC.getName(), "description", null, Item.ANY)));
element.addContent(new Element("copyright")
.setText(communityService.getMetadataFirstValue(community,
MetadataSchemaEnum.DC.getName(), "rights", null, Item.ANY)));
element.addContent(new Element("sidebar")
.setText(communityService.getMetadataFirstValue(community,
MetadataSchemaEnum.DC.getName(), "description", "tableofcontents", Item.ANY)));
// Export this Community's Community children.
for (Community subCommunity : community.getSubcommunities()) {
element.addContent(exportACommunity(subCommunity));
}
// Export this Community's Collection children.
for (Collection collection : community.getCollections()) {
element.addContent(exportACollection(collection));
}
return element;
} }
/** /**
* Validate the XML document. This method does not return, but if validation * Add a single Collection to the Document.
* fails it generates an error and ceases execution
* *
* @param document the XML document object * @param collection
* @return a fragment representing this Collection.
*/
private static Element exportACollection(Collection collection) {
// Export this Collection.
Element element = new Element("collection");
element.setAttribute("identifier", collection.getHandle());
element.addContent(new Element("name").setText(collection.getName()));
element.addContent(new Element("description")
.setText(collectionService.getMetadataFirstValue(collection,
MetadataSchemaEnum.DC.getName(), "description", "abstract", Item.ANY)));
element.addContent(new Element("intro")
.setText(collectionService.getMetadataFirstValue(collection,
MetadataSchemaEnum.DC.getName(), "description", null, Item.ANY)));
element.addContent(new Element("copyright")
.setText(collectionService.getMetadataFirstValue(collection,
MetadataSchemaEnum.DC.getName(), "rights", null, Item.ANY)));
element.addContent(new Element("sidebar")
.setText(collectionService.getMetadataFirstValue(collection,
MetadataSchemaEnum.DC.getName(), "description", "tableofcontents", Item.ANY)));
element.addContent(new Element("license")
.setText(collectionService.getMetadataFirstValue(collection,
MetadataSchemaEnum.DC.getName(), "rights", "license", Item.ANY)));
// Provenance is special: multivalued
for (MetadataValue value : collectionService.getMetadata(collection,
MetadataSchemaEnum.DC.getName(), "provenance", null, Item.ANY)) {
element.addContent(new Element("provenance")
.setText(value.getValue()));
}
return element;
}
/**
* Write out the existing Community/Collection structure.
*/
static void exportStructure(Context context, OutputStream output) {
// Build a document from the Community/Collection hierarchy.
Element rootElement = new Element(INPUT_ROOT); // To be read by importStructure, perhaps
List<Community> communities = null;
try {
communities = communityService.findAllTop(context);
} catch (SQLException ex) {
System.out.printf("Unable to get the list of top-level communities: %s%n",
ex.getMessage());
System.exit(1);
}
for (Community community : communities) {
rootElement.addContent(exportACommunity(community));
}
// Now write the structure out.
org.jdom.Document xmlOutput = new org.jdom.Document(rootElement);
try {
XMLOutputter outputter = new XMLOutputter(Format.getPrettyFormat());
outputter.output(xmlOutput, output);
} catch (IOException e) {
System.out.printf("Unable to write to output file %s: %s%n",
output, e.getMessage());
System.exit(1);
}
}
/**
* Output the usage information.
*/
private static void usage(Options options) {
HelpFormatter helper = new HelpFormatter();
try (PrintWriter writer = new PrintWriter(System.out);) {
helper.printUsage(writer, 80/* FIXME Magic */,
"structure-builder", options);
}
}
/**
* Help the user more.
*/
private static void giveHelp(Options options) {
HelpFormatter formatter = new HelpFormatter();
formatter.printHelp("struct-builder",
"Import or export Community/Collection structure.",
options,
"When importing (-f), communities will be created from the "
+ "top level, and a map of communities to handles will "
+ "be returned in the output file. When exporting (-x),"
+ "the current structure will be written to the map file.",
true);
}
/**
* Validate the XML document. This method returns if the document is valid.
* If validation fails it generates an error and ceases execution.
*
* @param document the XML document object
* @throws TransformerException if transformer error * @throws TransformerException if transformer error
*
*/ */
private static void validate(org.w3c.dom.Document document) private static void validate(org.w3c.dom.Document document)
throws TransformerException throws TransformerException {
{ StringBuilder err = new StringBuilder();
StringBuffer err = new StringBuffer();
boolean trip = false; boolean trip = false;
err.append("The following errors were encountered parsing the source XML\n"); err.append("The following errors were encountered parsing the source XML.\n");
err.append("No changes have been made to the DSpace instance\n\n"); err.append("No changes have been made to the DSpace instance.\n\n");
NodeList first = XPathAPI.selectNodeList(document, "/import_structure/community"); NodeList first = XPathAPI.selectNodeList(document, "/import_structure/community");
if (first.getLength() == 0) if (first.getLength() == 0) {
{ err.append("-There are no top level communities in the source document.");
err.append("-There are no top level communities in the source document");
System.out.println(err.toString()); System.out.println(err.toString());
System.exit(0); System.exit(1);
} }
String errs = validateCommunities(first, 1); String errs = validateCommunities(first, 1);
if (errs != null) if (errs != null) {
{
err.append(errs); err.append(errs);
trip = true; trip = true;
} }
if (trip) if (trip) {
{
System.out.println(err.toString()); System.out.println(err.toString());
System.exit(0); System.exit(1);
} }
} }
/** /**
* Validate the communities section of the XML document. This returns a string * Validate the communities section of the XML document. This returns a string
* containing any errors encountered, or null if there were no errors * containing any errors encountered, or null if there were no errors.
* *
* @param communities the NodeList of communities to validate * @param communities the NodeList of communities to validate
* @param level the level in the XML document that we are at, for the purposes * @param level the level in the XML document that we are at, for the purposes
* of error reporting * of error reporting
*
* @return the errors that need to be generated by the calling method, or null if * @return the errors that need to be generated by the calling method, or null if
* no errors. * no errors.
*/ */
private static String validateCommunities(NodeList communities, int level) private static String validateCommunities(NodeList communities, int level)
throws TransformerException throws TransformerException {
{ StringBuilder err = new StringBuilder();
StringBuffer err = new StringBuffer();
boolean trip = false; boolean trip = false;
String errs = null; String errs = null;
for (int i = 0; i < communities.getLength(); i++) for (int i = 0; i < communities.getLength(); i++) {
{
Node n = communities.item(i); Node n = communities.item(i);
NodeList name = XPathAPI.selectNodeList(n, "name"); NodeList name = XPathAPI.selectNodeList(n, "name");
if (name.getLength() != 1) if (name.getLength() != 1) {
{
String pos = Integer.toString(i + 1); String pos = Integer.toString(i + 1);
err.append("-The level " + level + " community in position " + pos); err.append("-The level ").append(level)
err.append(" does not contain exactly one name field\n"); .append(" community in position ").append(pos)
.append(" does not contain exactly one name field.\n");
trip = true; trip = true;
} }
// validate sub communities // validate sub communities
NodeList subCommunities = XPathAPI.selectNodeList(n, "community"); NodeList subCommunities = XPathAPI.selectNodeList(n, "community");
String comErrs = validateCommunities(subCommunities, level + 1); String comErrs = validateCommunities(subCommunities, level + 1);
if (comErrs != null) if (comErrs != null) {
{
err.append(comErrs); err.append(comErrs);
trip = true; trip = true;
} }
@@ -282,15 +511,13 @@ public class StructBuilder
// validate collections // validate collections
NodeList collections = XPathAPI.selectNodeList(n, "collection"); NodeList collections = XPathAPI.selectNodeList(n, "collection");
String colErrs = validateCollections(collections, level + 1); String colErrs = validateCollections(collections, level + 1);
if (colErrs != null) if (colErrs != null) {
{
err.append(colErrs); err.append(colErrs);
trip = true; trip = true;
} }
} }
if (trip) if (trip) {
{
errs = err.toString(); errs = err.toString();
} }
@@ -299,35 +526,31 @@ public class StructBuilder
/** /**
* validate the collection section of the XML document. This generates a * validate the collection section of the XML document. This generates a
* string containing any errors encountered, or returns null if no errors * string containing any errors encountered, or returns null if no errors.
* *
* @param collections a NodeList of collections to validate * @param collections a NodeList of collections to validate
* @param level the level in the XML document for the purposes of error reporting * @param level the level in the XML document for the purposes of error reporting
*
* @return the errors to be generated by the calling method, or null if none * @return the errors to be generated by the calling method, or null if none
*/ */
private static String validateCollections(NodeList collections, int level) private static String validateCollections(NodeList collections, int level)
throws TransformerException throws TransformerException {
{ StringBuilder err = new StringBuilder();
StringBuffer err = new StringBuffer();
boolean trip = false; boolean trip = false;
String errs = null; String errs = null;
for (int i = 0; i < collections.getLength(); i++) for (int i = 0; i < collections.getLength(); i++) {
{
Node n = collections.item(i); Node n = collections.item(i);
NodeList name = XPathAPI.selectNodeList(n, "name"); NodeList name = XPathAPI.selectNodeList(n, "name");
if (name.getLength() != 1) if (name.getLength() != 1) {
{
String pos = Integer.toString(i + 1); String pos = Integer.toString(i + 1);
err.append("-The level " + level + " collection in position " + pos); err.append("-The level ").append(level)
err.append(" does not contain exactly one name field\n"); .append(" collection in position ").append(pos)
.append(" does not contain exactly one name field.\n");
trip = true; trip = true;
} }
} }
if (trip) if (trip) {
{
errs = err.toString(); errs = err.toString();
} }
@@ -335,20 +558,17 @@ public class StructBuilder
} }
/** /**
* Load in the XML from file. * Load the XML document from input.
* *
* @param filename * @param input the filename to load from.
* the filename to load from * @return the DOM representation of the XML input.
*
* @return the DOM representation of the XML file
*/ */
private static org.w3c.dom.Document loadXML(String filename) private static org.w3c.dom.Document loadXML(InputStream input)
throws IOException, ParserConfigurationException, SAXException throws IOException, ParserConfigurationException, SAXException {
{
DocumentBuilder builder = DocumentBuilderFactory.newInstance() DocumentBuilder builder = DocumentBuilderFactory.newInstance()
.newDocumentBuilder(); .newDocumentBuilder();
org.w3c.dom.Document document = builder.parse(new File(filename)); org.w3c.dom.Document document = builder.parse(input);
return document; return document;
} }
@@ -357,19 +577,15 @@ public class StructBuilder
* Return the String value of a Node * Return the String value of a Node
* *
* @param node the node from which we want to extract the string value * @param node the node from which we want to extract the string value
*
* @return the string value of the node * @return the string value of the node
*/ */
public static String getStringValue(Node node) private static String getStringValue(Node node) {
{
String value = node.getNodeValue(); String value = node.getNodeValue();
if (node.hasChildNodes()) if (node.hasChildNodes()) {
{
Node first = node.getFirstChild(); Node first = node.getFirstChild();
if (first.getNodeType() == Node.TEXT_NODE) if (first.getNodeType() == Node.TEXT_NODE) {
{
return first.getNodeValue().trim(); return first.getNodeValue().trim();
} }
} }
@@ -381,30 +597,24 @@ public class StructBuilder
* Take a node list of communities and build the structure from them, delegating * Take a node list of communities and build the structure from them, delegating
* to the relevant methods in this class for sub-communities and collections * to the relevant methods in this class for sub-communities and collections
* *
* @param context the context of the request * @param context the context of the request
* @param communities a nodelist of communities to create along with their sub-structures * @param communities a nodelist of communities to create along with their sub-structures
* @param parent the parent community of the nodelist of communities to create * @param parent the parent community of the nodelist of communities to create
*
* @return an element array containing additional information regarding the * @return an element array containing additional information regarding the
* created communities (e.g. the handles they have been assigned) * created communities (e.g. the handles they have been assigned)
*/ */
private static Element[] handleCommunities(Context context, NodeList communities, Community parent) private static Element[] handleCommunities(Context context, NodeList communities, Community parent)
throws TransformerException, SQLException, Exception throws TransformerException, SQLException, AuthorizeException {
{
Element[] elements = new Element[communities.getLength()]; Element[] elements = new Element[communities.getLength()];
for (int i = 0; i < communities.getLength(); i++) for (int i = 0; i < communities.getLength(); i++) {
{
Community community; Community community;
Element element = new Element("community"); Element element = new Element("community");
// create the community or sub community // create the community or sub community
if (parent != null) if (parent != null) {
{
community = communityService.create(parent, context); community = communityService.create(parent, context);
} } else {
else
{
community = communityService.create(null, context); community = communityService.create(null, context);
} }
@@ -413,22 +623,18 @@ public class StructBuilder
// now update the metadata // now update the metadata
Node tn = communities.item(i); Node tn = communities.item(i);
for (Map.Entry<String, String> entry : communityMap.entrySet()) for (Map.Entry<String, String> entry : communityMap.entrySet()) {
{
NodeList nl = XPathAPI.selectNodeList(tn, entry.getKey()); NodeList nl = XPathAPI.selectNodeList(tn, entry.getKey());
if (nl.getLength() == 1) if (nl.getLength() == 1) {
{
communityService.setMetadata(context, community, entry.getValue(), getStringValue(nl.item(0))); communityService.setMetadata(context, community, entry.getValue(), getStringValue(nl.item(0)));
} }
} }
// FIXME: at the moment, if the community already exists by name // FIXME: at the moment, if the community already exists by name
// then this will throw a PSQLException on a duplicate key // then this will throw an SQLException on a duplicate key
// violation // violation.
// Ideally we'd skip this row and continue to create sub // Ideally we'd skip this row and continue to create sub communities
// communities // and so forth where they don't exist, but it's proving difficult
// and so forth where they don't exist, but it's proving
// difficult
// to isolate the community that already exists without hitting // to isolate the community that already exists without hitting
// the database directly. // the database directly.
communityService.update(context, community); communityService.update(context, community);
@@ -447,29 +653,25 @@ public class StructBuilder
nameElement.setText(communityService.getMetadata(community, "name")); nameElement.setText(communityService.getMetadata(community, "name"));
element.addContent(nameElement); element.addContent(nameElement);
if (communityService.getMetadata(community, "short_description") != null) if (communityService.getMetadata(community, "short_description") != null) {
{
Element descriptionElement = new Element("description"); Element descriptionElement = new Element("description");
descriptionElement.setText(communityService.getMetadata(community, "short_description")); descriptionElement.setText(communityService.getMetadata(community, "short_description"));
element.addContent(descriptionElement); element.addContent(descriptionElement);
} }
if (communityService.getMetadata(community, "introductory_text") != null) if (communityService.getMetadata(community, "introductory_text") != null) {
{
Element introElement = new Element("intro"); Element introElement = new Element("intro");
introElement.setText(communityService.getMetadata(community, "introductory_text")); introElement.setText(communityService.getMetadata(community, "introductory_text"));
element.addContent(introElement); element.addContent(introElement);
} }
if (communityService.getMetadata(community, "copyright_text") != null) if (communityService.getMetadata(community, "copyright_text") != null) {
{
Element copyrightElement = new Element("copyright"); Element copyrightElement = new Element("copyright");
copyrightElement.setText(communityService.getMetadata(community, "copyright_text")); copyrightElement.setText(communityService.getMetadata(community, "copyright_text"));
element.addContent(copyrightElement); element.addContent(copyrightElement);
} }
if (communityService.getMetadata(community, "side_bar_text") != null) if (communityService.getMetadata(community, "side_bar_text") != null) {
{
Element sidebarElement = new Element("sidebar"); Element sidebarElement = new Element("sidebar");
sidebarElement.setText(communityService.getMetadata(community, "side_bar_text")); sidebarElement.setText(communityService.getMetadata(community, "side_bar_text"));
element.addContent(sidebarElement); element.addContent(sidebarElement);
@@ -484,12 +686,10 @@ public class StructBuilder
Element[] collectionElements = handleCollections(context, collections, community); Element[] collectionElements = handleCollections(context, collections, community);
int j; int j;
for (j = 0; j < subCommunityElements.length; j++) for (j = 0; j < subCommunityElements.length; j++) {
{
element.addContent(subCommunityElements[j]); element.addContent(subCommunityElements[j]);
} }
for (j = 0; j < collectionElements.length; j++) for (j = 0; j < collectionElements.length; j++) {
{
element.addContent(collectionElements[j]); element.addContent(collectionElements[j]);
} }
@@ -500,22 +700,19 @@ public class StructBuilder
} }
/** /**
* Take a node list of collections and create the structure from them * Take a node list of collections and create the structure from them
* *
* @param context the context of the request * @param context the context of the request
* @param collections the node list of collections to be created * @param collections the node list of collections to be created
* @param parent the parent community to whom the collections belong * @param parent the parent community to whom the collections belong
*
* @return an Element array containing additional information about the * @return an Element array containing additional information about the
* created collections (e.g. the handle) * created collections (e.g. the handle)
*/ */
private static Element[] handleCollections(Context context, NodeList collections, Community parent) private static Element[] handleCollections(Context context, NodeList collections, Community parent)
throws TransformerException, SQLException, AuthorizeException, IOException, Exception throws TransformerException, SQLException, AuthorizeException {
{
Element[] elements = new Element[collections.getLength()]; Element[] elements = new Element[collections.getLength()];
for (int i = 0; i < collections.getLength(); i++) for (int i = 0; i < collections.getLength(); i++) {
{
Element element = new Element("collection"); Element element = new Element("collection");
Collection collection = collectionService.create(context, parent); Collection collection = collectionService.create(context, parent);
@@ -524,11 +721,9 @@ public class StructBuilder
// import the rest of the metadata // import the rest of the metadata
Node tn = collections.item(i); Node tn = collections.item(i);
for (Map.Entry<String, String> entry : collectionMap.entrySet()) for (Map.Entry<String, String> entry : collectionMap.entrySet()) {
{
NodeList nl = XPathAPI.selectNodeList(tn, entry.getKey()); NodeList nl = XPathAPI.selectNodeList(tn, entry.getKey());
if (nl.getLength() == 1) if (nl.getLength() == 1) {
{
collectionService.setMetadata(context, collection, entry.getValue(), getStringValue(nl.item(0))); collectionService.setMetadata(context, collection, entry.getValue(), getStringValue(nl.item(0)));
} }
} }
@@ -541,43 +736,37 @@ public class StructBuilder
nameElement.setText(collectionService.getMetadata(collection, "name")); nameElement.setText(collectionService.getMetadata(collection, "name"));
element.addContent(nameElement); element.addContent(nameElement);
if (collectionService.getMetadata(collection, "short_description") != null) if (collectionService.getMetadata(collection, "short_description") != null) {
{
Element descriptionElement = new Element("description"); Element descriptionElement = new Element("description");
descriptionElement.setText(collectionService.getMetadata(collection, "short_description")); descriptionElement.setText(collectionService.getMetadata(collection, "short_description"));
element.addContent(descriptionElement); element.addContent(descriptionElement);
} }
if (collectionService.getMetadata(collection, "introductory_text") != null) if (collectionService.getMetadata(collection, "introductory_text") != null) {
{
Element introElement = new Element("intro"); Element introElement = new Element("intro");
introElement.setText(collectionService.getMetadata(collection, "introductory_text")); introElement.setText(collectionService.getMetadata(collection, "introductory_text"));
element.addContent(introElement); element.addContent(introElement);
} }
if (collectionService.getMetadata(collection, "copyright_text") != null) if (collectionService.getMetadata(collection, "copyright_text") != null) {
{
Element copyrightElement = new Element("copyright"); Element copyrightElement = new Element("copyright");
copyrightElement.setText(collectionService.getMetadata(collection, "copyright_text")); copyrightElement.setText(collectionService.getMetadata(collection, "copyright_text"));
element.addContent(copyrightElement); element.addContent(copyrightElement);
} }
if (collectionService.getMetadata(collection, "side_bar_text") != null) if (collectionService.getMetadata(collection, "side_bar_text") != null) {
{
Element sidebarElement = new Element("sidebar"); Element sidebarElement = new Element("sidebar");
sidebarElement.setText(collectionService.getMetadata(collection, "side_bar_text")); sidebarElement.setText(collectionService.getMetadata(collection, "side_bar_text"));
element.addContent(sidebarElement); element.addContent(sidebarElement);
} }
if (collectionService.getMetadata(collection, "license") != null) if (collectionService.getMetadata(collection, "license") != null) {
{
Element sidebarElement = new Element("license"); Element sidebarElement = new Element("license");
sidebarElement.setText(collectionService.getMetadata(collection, "license")); sidebarElement.setText(collectionService.getMetadata(collection, "license"));
element.addContent(sidebarElement); element.addContent(sidebarElement);
} }
if (collectionService.getMetadata(collection, "provenance_description") != null) if (collectionService.getMetadata(collection, "provenance_description") != null) {
{
Element sidebarElement = new Element("provenance"); Element sidebarElement = new Element("provenance");
sidebarElement.setText(collectionService.getMetadata(collection, "provenance_description")); sidebarElement.setText(collectionService.getMetadata(collection, "provenance_description"));
element.addContent(sidebarElement); element.addContent(sidebarElement);

View File

@@ -7,67 +7,94 @@
*/ */
package org.dspace.app.bulkedit; package org.dspace.app.bulkedit;
import org.dspace.content.Item;
import org.dspace.content.Collection;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.List; import java.util.List;
import org.dspace.content.Collection;
import org.dspace.content.Item;
/** /**
* Utility class to store changes to item that may occur during a batch edit. * Utility class to store changes to item that may occur during a batch edit.
* *
* @author Stuart Lewis * @author Stuart Lewis
*/ */
public class BulkEditChange public class BulkEditChange {
{
/** The item these changes relate to */ /**
* The item these changes relate to
*/
private Item item; private Item item;
/** The List of hashtables with the new elements */ /**
* The List of hashtables with the new elements
*/
private List<BulkEditMetadataValue> adds; private List<BulkEditMetadataValue> adds;
/** The List of hashtables with the removed elements */ /**
* The List of hashtables with the removed elements
*/
private List<BulkEditMetadataValue> removes; private List<BulkEditMetadataValue> removes;
/** The List of hashtables with the unchanged elements */ /**
* The List of hashtables with the unchanged elements
*/
private List<BulkEditMetadataValue> constant; private List<BulkEditMetadataValue> constant;
/** The List of the complete set of new values (constant + adds) */ /**
* The List of the complete set of new values (constant + adds)
*/
private List<BulkEditMetadataValue> complete; private List<BulkEditMetadataValue> complete;
/** The list of old collections the item used to be mapped to */ /**
* The list of old collections the item used to be mapped to
*/
private List<Collection> oldMappedCollections; private List<Collection> oldMappedCollections;
/** The list of new collections the item has been mapped into */ /**
* The list of new collections the item has been mapped into
*/
private List<Collection> newMappedCollections; private List<Collection> newMappedCollections;
/** The old owning collection */ /**
* The old owning collection
*/
private Collection oldOwningCollection; private Collection oldOwningCollection;
/** The new owning collection */ /**
* The new owning collection
*/
private Collection newOwningCollection; private Collection newOwningCollection;
/** Is this a new item */ /**
* Is this a new item
*/
private boolean newItem; private boolean newItem;
/** Has this item been deleted? */ /**
* Has this item been deleted?
*/
private boolean deleted; private boolean deleted;
/** Has this item been withdrawn? */ /**
* Has this item been withdrawn?
*/
private boolean withdrawn; private boolean withdrawn;
/** Has this item been reinstated? */ /**
* Has this item been reinstated?
*/
private boolean reinstated; private boolean reinstated;
/** Have any changes actually been made? */ /**
* Have any changes actually been made?
*/
private boolean empty; private boolean empty;
/** /**
* Initialise a change holder for a new item * Initialise a change holder for a new item
*/ */
public BulkEditChange() public BulkEditChange() {
{
// Set the item to be null // Set the item to be null
item = null; item = null;
newItem = true; newItem = true;
@@ -89,8 +116,7 @@ public class BulkEditChange
* *
* @param i The Item to store * @param i The Item to store
*/ */
public BulkEditChange(Item i) public BulkEditChange(Item i) {
{
// Store the item // Store the item
item = i; item = i;
newItem = false; newItem = false;
@@ -110,8 +136,7 @@ public class BulkEditChange
* *
* @param i The item * @param i The item
*/ */
public void setItem(Item i) public void setItem(Item i) {
{
// Store the item // Store the item
item = i; item = i;
} }
@@ -121,8 +146,7 @@ public class BulkEditChange
* *
* @param dcv The value to add * @param dcv The value to add
*/ */
public void registerAdd(BulkEditMetadataValue dcv) public void registerAdd(BulkEditMetadataValue dcv) {
{
// Add the added value // Add the added value
adds.add(dcv); adds.add(dcv);
complete.add(dcv); complete.add(dcv);
@@ -134,8 +158,7 @@ public class BulkEditChange
* *
* @param dcv The value to remove * @param dcv The value to remove
*/ */
public void registerRemove(BulkEditMetadataValue dcv) public void registerRemove(BulkEditMetadataValue dcv) {
{
// Add the removed value // Add the removed value
removes.add(dcv); removes.add(dcv);
empty = false; empty = false;
@@ -146,8 +169,7 @@ public class BulkEditChange
* *
* @param dcv The value to keep unchanged * @param dcv The value to keep unchanged
*/ */
public void registerConstant(BulkEditMetadataValue dcv) public void registerConstant(BulkEditMetadataValue dcv) {
{
// Add the removed value // Add the removed value
constant.add(dcv); constant.add(dcv);
complete.add(dcv); complete.add(dcv);
@@ -158,8 +180,7 @@ public class BulkEditChange
* *
* @param c The new mapped Collection * @param c The new mapped Collection
*/ */
public void registerNewMappedCollection(Collection c) public void registerNewMappedCollection(Collection c) {
{
// Add the new owning Collection // Add the new owning Collection
newMappedCollections.add(c); newMappedCollections.add(c);
empty = false; empty = false;
@@ -170,27 +191,22 @@ public class BulkEditChange
* *
* @param c The old mapped Collection * @param c The old mapped Collection
*/ */
public void registerOldMappedCollection(Collection c) public void registerOldMappedCollection(Collection c) {
{
// Add the old owning Collection (if it isn't there already, or is an old collection) // Add the old owning Collection (if it isn't there already, or is an old collection)
boolean found = false; boolean found = false;
if ((this.getOldOwningCollection() != null) && if ((this.getOldOwningCollection() != null) &&
(this.getOldOwningCollection().getHandle().equals(c.getHandle()))) (this.getOldOwningCollection().getHandle().equals(c.getHandle()))) {
{
found = true; found = true;
} }
for (Collection collection : oldMappedCollections) for (Collection collection : oldMappedCollections) {
{ if (collection.getHandle().equals(c.getHandle())) {
if (collection.getHandle().equals(c.getHandle()))
{
found = true; found = true;
} }
} }
if (!found) if (!found) {
{
oldMappedCollections.add(c); oldMappedCollections.add(c);
empty = false; empty = false;
} }
@@ -202,8 +218,7 @@ public class BulkEditChange
* @param oldC The old owning collection * @param oldC The old owning collection
* @param newC The new owning collection * @param newC The new owning collection
*/ */
public void changeOwningCollection(Collection oldC, Collection newC) public void changeOwningCollection(Collection oldC, Collection newC) {
{
// Store the old owning collection // Store the old owning collection
oldOwningCollection = oldC; oldOwningCollection = oldC;
@@ -217,8 +232,7 @@ public class BulkEditChange
* *
* @param newC The new owning collection * @param newC The new owning collection
*/ */
public void setOwningCollection(Collection newC) public void setOwningCollection(Collection newC) {
{
// Store the new owning collection // Store the new owning collection
newOwningCollection = newC; newOwningCollection = newC;
//empty = false; //empty = false;
@@ -229,8 +243,7 @@ public class BulkEditChange
* *
* @return The item * @return The item
*/ */
public Item getItem() public Item getItem() {
{
// Return the item // Return the item
return item; return item;
} }
@@ -240,8 +253,7 @@ public class BulkEditChange
* *
* @return the list of elements and their values that have been added. * @return the list of elements and their values that have been added.
*/ */
public List<BulkEditMetadataValue> getAdds() public List<BulkEditMetadataValue> getAdds() {
{
// Return the array // Return the array
return adds; return adds;
} }
@@ -251,8 +263,7 @@ public class BulkEditChange
* *
* @return the list of elements and their values that have been removed. * @return the list of elements and their values that have been removed.
*/ */
public List<BulkEditMetadataValue> getRemoves() public List<BulkEditMetadataValue> getRemoves() {
{
// Return the array // Return the array
return removes; return removes;
} }
@@ -262,8 +273,7 @@ public class BulkEditChange
* *
* @return the list of unchanged values * @return the list of unchanged values
*/ */
public List<BulkEditMetadataValue> getConstant() public List<BulkEditMetadataValue> getConstant() {
{
// Return the array // Return the array
return constant; return constant;
} }
@@ -273,8 +283,7 @@ public class BulkEditChange
* *
* @return the list of all values * @return the list of all values
*/ */
public List<BulkEditMetadataValue> getComplete() public List<BulkEditMetadataValue> getComplete() {
{
// Return the array // Return the array
return complete; return complete;
} }
@@ -284,8 +293,7 @@ public class BulkEditChange
* *
* @return the list of new mapped collections * @return the list of new mapped collections
*/ */
public List<Collection> getNewMappedCollections() public List<Collection> getNewMappedCollections() {
{
// Return the array // Return the array
return newMappedCollections; return newMappedCollections;
} }
@@ -295,8 +303,7 @@ public class BulkEditChange
* *
* @return the list of old mapped collections * @return the list of old mapped collections
*/ */
public List<Collection> getOldMappedCollections() public List<Collection> getOldMappedCollections() {
{
// Return the array // Return the array
return oldMappedCollections; return oldMappedCollections;
} }
@@ -306,8 +313,7 @@ public class BulkEditChange
* *
* @return the old owning collection * @return the old owning collection
*/ */
public Collection getOldOwningCollection() public Collection getOldOwningCollection() {
{
// Return the old owning collection // Return the old owning collection
return oldOwningCollection; return oldOwningCollection;
} }
@@ -317,8 +323,7 @@ public class BulkEditChange
* *
* @return the new owning collection * @return the new owning collection
*/ */
public Collection getNewOwningCollection() public Collection getNewOwningCollection() {
{
// Return the new owning collection // Return the new owning collection
return newOwningCollection; return newOwningCollection;
} }
@@ -328,8 +333,7 @@ public class BulkEditChange
* *
* @return Whether or not this is for a new item * @return Whether or not this is for a new item
*/ */
public boolean isNewItem() public boolean isNewItem() {
{
// Return the new item status // Return the new item status
return newItem; return newItem;
} }
@@ -339,8 +343,7 @@ public class BulkEditChange
* *
* @return Whether or not this is for a deleted item * @return Whether or not this is for a deleted item
*/ */
public boolean isDeleted() public boolean isDeleted() {
{
// Return the new item status // Return the new item status
return deleted; return deleted;
} }
@@ -359,8 +362,7 @@ public class BulkEditChange
* *
* @return Whether or not this is for a withdrawn item * @return Whether or not this is for a withdrawn item
*/ */
public boolean isWithdrawn() public boolean isWithdrawn() {
{
// Return the new item status // Return the new item status
return withdrawn; return withdrawn;
} }
@@ -379,8 +381,7 @@ public class BulkEditChange
* *
* @return Whether or not this is for a reinstated item * @return Whether or not this is for a reinstated item
*/ */
public boolean isReinstated() public boolean isReinstated() {
{
// Return the new item status // Return the new item status
return reinstated; return reinstated;
} }
@@ -399,8 +400,7 @@ public class BulkEditChange
* *
* @return Whether or not changes have been made * @return Whether or not changes have been made
*/ */
public boolean hasChanges() public boolean hasChanges() {
{
return !empty; return !empty;
} }
} }

View File

@@ -7,6 +7,23 @@
*/ */
package org.dspace.app.bulkedit; package org.dspace.app.bulkedit;
import java.io.BufferedReader;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.Serializable;
import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.UUID;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import org.apache.commons.io.IOUtils;
import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.StringUtils;
import org.dspace.authority.AuthorityValue; import org.dspace.authority.AuthorityValue;
import org.dspace.authority.factory.AuthorityServiceFactory; import org.dspace.authority.factory.AuthorityServiceFactory;
@@ -15,20 +32,16 @@ import org.dspace.content.Collection;
import org.dspace.content.Item; import org.dspace.content.Item;
import org.dspace.content.MetadataField; import org.dspace.content.MetadataField;
import org.dspace.content.MetadataSchema; import org.dspace.content.MetadataSchema;
import org.dspace.content.MetadataSchemaEnum;
import org.dspace.content.MetadataValue; import org.dspace.content.MetadataValue;
import org.dspace.content.authority.Choices;
import org.dspace.content.factory.ContentServiceFactory; import org.dspace.content.factory.ContentServiceFactory;
import org.dspace.content.service.ItemService; import org.dspace.content.service.ItemService;
import org.dspace.content.service.MetadataFieldService; import org.dspace.content.service.MetadataFieldService;
import org.dspace.content.service.MetadataSchemaService; import org.dspace.content.service.MetadataSchemaService;
import org.dspace.content.authority.Choices;
import org.dspace.core.Context; import org.dspace.core.Context;
import org.dspace.services.factory.DSpaceServicesFactory; import org.dspace.services.factory.DSpaceServicesFactory;
import java.util.*;
import java.util.regex.Pattern;
import java.util.regex.Matcher;
import java.io.*;
/** /**
* Utility class to read and write CSV files * Utility class to read and write CSV files
* *
@@ -38,50 +51,74 @@ import java.io.*;
* *
* This class has been made serializable, as it is stored in a Session. * This class has been made serializable, as it is stored in a Session.
* Is it wise to: * Is it wise to:
* a) be putting this into a user's session? * a) be putting this into a user's session?
* b) holding an entire CSV upload in memory? * b) holding an entire CSV upload in memory?
* *
* @author Stuart Lewis * @author Stuart Lewis
*/ */
public class DSpaceCSV implements Serializable public class DSpaceCSV implements Serializable {
{ /**
/** The headings of the CSV file */ * The headings of the CSV file
*/
protected List<String> headings; protected List<String> headings;
/** An array list of CSV lines */ /**
* An array list of CSV lines
*/
protected List<DSpaceCSVLine> lines; protected List<DSpaceCSVLine> lines;
/** A counter of how many CSV lines this object holds */ /**
* A counter of how many CSV lines this object holds
*/
protected int counter; protected int counter;
/** The value separator (defaults to double pipe '||') */ /**
* The value separator (defaults to double pipe '||')
*/
protected String valueSeparator; protected String valueSeparator;
/** The value separator in an escaped form for using in regexes */ /**
* The value separator in an escaped form for using in regexes
*/
protected String escapedValueSeparator; protected String escapedValueSeparator;
/** The field separator (defaults to comma) */ /**
* The field separator (defaults to comma)
*/
protected String fieldSeparator; protected String fieldSeparator;
/** The field separator in an escaped form for using in regexes */ /**
* The field separator in an escaped form for using in regexes
*/
protected String escapedFieldSeparator; protected String escapedFieldSeparator;
/** The authority separator (defaults to double colon '::') */ /**
* The authority separator (defaults to double colon '::')
*/
protected String authoritySeparator; protected String authoritySeparator;
/** The authority separator in an escaped form for using in regexes */ /**
* The authority separator in an escaped form for using in regexes
*/
protected String escapedAuthoritySeparator; protected String escapedAuthoritySeparator;
protected transient final ItemService itemService = ContentServiceFactory.getInstance().getItemService(); protected transient final ItemService itemService = ContentServiceFactory.getInstance().getItemService();
protected transient final MetadataSchemaService metadataSchemaService = ContentServiceFactory.getInstance().getMetadataSchemaService(); protected transient final MetadataSchemaService metadataSchemaService =
protected transient final MetadataFieldService metadataFieldService = ContentServiceFactory.getInstance().getMetadataFieldService(); ContentServiceFactory.getInstance().getMetadataSchemaService();
protected transient final AuthorityValueService authorityValueService = AuthorityServiceFactory.getInstance().getAuthorityValueService(); protected transient final MetadataFieldService metadataFieldService =
ContentServiceFactory.getInstance().getMetadataFieldService();
protected transient final AuthorityValueService authorityValueService =
AuthorityServiceFactory.getInstance().getAuthorityValueService();
/** Whether to export all metadata such as handles and provenance information */ /**
* Whether to export all metadata such as handles and provenance information
*/
protected boolean exportAll; protected boolean exportAll;
/** A list of metadata elements to ignore */ /**
* A list of metadata elements to ignore
*/
protected Map<String, String> ignore; protected Map<String, String> ignore;
@@ -90,8 +127,7 @@ public class DSpaceCSV implements Serializable
* *
* @param exportAll Whether to export all metadata such as handles and provenance information * @param exportAll Whether to export all metadata such as handles and provenance information
*/ */
public DSpaceCSV(boolean exportAll) public DSpaceCSV(boolean exportAll) {
{
// Initialise the class // Initialise the class
init(); init();
@@ -102,50 +138,42 @@ public class DSpaceCSV implements Serializable
/** /**
* Create a new instance, reading the lines in from file * Create a new instance, reading the lines in from file
* *
* @param f The file to read from * @param inputStream the inputstream to read from
* @param c The DSpace Context * @param c The DSpace Context
*
* @throws Exception thrown if there is an error reading or processing the file * @throws Exception thrown if there is an error reading or processing the file
*/ */
public DSpaceCSV(File f, Context c) throws Exception public DSpaceCSV(InputStream inputStream, Context c) throws Exception {
{
// Initialise the class // Initialise the class
init(); init();
// Open the CSV file // Open the CSV file
BufferedReader input = null; BufferedReader input = null;
try try {
{ input = new BufferedReader(new InputStreamReader(inputStream, StandardCharsets.UTF_8));
input = new BufferedReader(new InputStreamReader(new FileInputStream(f),"UTF-8"));
// Read the heading line // Read the heading line
String head = input.readLine(); String head = input.readLine();
String[] headingElements = head.split(escapedFieldSeparator); String[] headingElements = head.split(escapedFieldSeparator);
int columnCounter = 0; int columnCounter = 0;
for (String element : headingElements) for (String element : headingElements) {
{
columnCounter++; columnCounter++;
// Remove surrounding quotes if there are any // Remove surrounding quotes if there are any
if ((element.startsWith("\"")) && (element.endsWith("\""))) if ((element.startsWith("\"")) && (element.endsWith("\""))) {
{
element = element.substring(1, element.length() - 1); element = element.substring(1, element.length() - 1);
} }
// Store the heading // Store the heading
if ("collection".equals(element)) if ("collection".equals(element)) {
{
// Store the heading // Store the heading
headings.add(element); headings.add(element);
} } else if ("rowName".equals(element)) {
// Store the action
else if ("action".equals(element))
{
// Store the heading // Store the heading
headings.add(element); headings.add(element);
} } else if ("action".equals(element)) { // Store the action
else if (!"id".equals(element)) // Store the heading
{ headings.add(element);
} else if (!"id".equals(element)) {
String authorityPrefix = ""; String authorityPrefix = "";
AuthorityValue authorityValueType = authorityValueService.getAuthorityValueType(element); AuthorityValue authorityValueType = authorityValueService.getAuthorityValueType(element);
if (authorityValueType != null) { if (authorityValueType != null) {
@@ -172,19 +200,24 @@ public class DSpaceCSV implements Serializable
} }
// Check that the scheme exists // Check that the scheme exists
MetadataSchema foundSchema = metadataSchemaService.find(c, metadataSchema); if (!StringUtils.equals(metadataSchema, MetadataSchemaEnum.RELATION.getName())) {
if (foundSchema == null) { MetadataSchema foundSchema = metadataSchemaService.find(c, metadataSchema);
throw new MetadataImportInvalidHeadingException(clean[0], if (foundSchema == null) {
MetadataImportInvalidHeadingException.SCHEMA, throw new MetadataImportInvalidHeadingException(clean[0],
columnCounter); MetadataImportInvalidHeadingException
} .SCHEMA,
columnCounter);
}
// Check that the metadata element exists in the schema // Check that the metadata element exists in the schema
MetadataField foundField = metadataFieldService.findByElement(c, foundSchema, metadataElement, metadataQualifier); MetadataField foundField = metadataFieldService
if (foundField == null) { .findByElement(c, foundSchema, metadataElement, metadataQualifier);
throw new MetadataImportInvalidHeadingException(clean[0], if (foundField == null) {
MetadataImportInvalidHeadingException.ELEMENT, throw new MetadataImportInvalidHeadingException(clean[0],
columnCounter); MetadataImportInvalidHeadingException
.ELEMENT,
columnCounter);
}
} }
// Store the heading // Store the heading
@@ -196,8 +229,7 @@ public class DSpaceCSV implements Serializable
StringBuilder lineBuilder = new StringBuilder(); StringBuilder lineBuilder = new StringBuilder();
String lineRead; String lineRead;
while ((lineRead = input.readLine()) != null) while ((lineRead = input.readLine()) != null) {
{
if (lineBuilder.length() > 0) { if (lineBuilder.length() > 0) {
// Already have a previously read value - add this line // Already have a previously read value - add this line
lineBuilder.append("\n").append(lineRead); lineBuilder.append("\n").append(lineRead);
@@ -236,11 +268,8 @@ public class DSpaceCSV implements Serializable
addItem(lineRead); addItem(lineRead);
} }
} }
} } finally {
finally if (input != null) {
{
if (input != null)
{
input.close(); input.close();
} }
} }
@@ -249,8 +278,7 @@ public class DSpaceCSV implements Serializable
/** /**
* Initialise this class with values from dspace.cfg * Initialise this class with values from dspace.cfg
*/ */
protected void init() protected void init() {
{
// Set the value separator // Set the value separator
setValueSeparator(); setValueSeparator();
@@ -273,13 +301,16 @@ public class DSpaceCSV implements Serializable
ignore = new HashMap<>(); ignore = new HashMap<>();
// Specify default values // Specify default values
String[] defaultValues = new String[]{"dc.date.accessioned, dc.date.available, " + String[] defaultValues =
"dc.date.updated, dc.description.provenance"}; new String[] {
String[] toIgnoreArray = DSpaceServicesFactory.getInstance().getConfigurationService().getArrayProperty("bulkedit.ignore-on-export", defaultValues); "dc.date.accessioned, dc.date.available, dc.date.updated, dc.description.provenance"
for (String toIgnoreString : toIgnoreArray) };
{ String[] toIgnoreArray =
if (!"".equals(toIgnoreString.trim())) DSpaceServicesFactory.getInstance()
{ .getConfigurationService()
.getArrayProperty("bulkedit.ignore-on-export", defaultValues);
for (String toIgnoreString : toIgnoreArray) {
if (!"".equals(toIgnoreString.trim())) {
ignore.put(toIgnoreString.trim(), toIgnoreString.trim()); ignore.put(toIgnoreString.trim(), toIgnoreString.trim());
} }
} }
@@ -307,16 +338,13 @@ public class DSpaceCSV implements Serializable
* *
* If not set, defaults to double pipe '||' * If not set, defaults to double pipe '||'
*/ */
private void setValueSeparator() private void setValueSeparator() {
{
// Get the value separator // Get the value separator
valueSeparator = DSpaceServicesFactory.getInstance().getConfigurationService().getProperty("bulkedit.valueseparator"); valueSeparator = DSpaceServicesFactory.getInstance().getConfigurationService()
if ((valueSeparator != null) && (!"".equals(valueSeparator.trim()))) .getProperty("bulkedit.valueseparator");
{ if ((valueSeparator != null) && (!"".equals(valueSeparator.trim()))) {
valueSeparator = valueSeparator.trim(); valueSeparator = valueSeparator.trim();
} } else {
else
{
valueSeparator = "||"; valueSeparator = "||";
} }
@@ -336,32 +364,22 @@ public class DSpaceCSV implements Serializable
* Special values are 'tab', 'hash' and 'semicolon' which will * Special values are 'tab', 'hash' and 'semicolon' which will
* get substituted from the text to the value. * get substituted from the text to the value.
*/ */
private void setFieldSeparator() private void setFieldSeparator() {
{
// Get the value separator // Get the value separator
fieldSeparator =DSpaceServicesFactory.getInstance().getConfigurationService().getProperty("bulkedit.fieldseparator"); fieldSeparator = DSpaceServicesFactory.getInstance().getConfigurationService()
if ((fieldSeparator != null) && (!"".equals(fieldSeparator.trim()))) .getProperty("bulkedit.fieldseparator");
{ if ((fieldSeparator != null) && (!"".equals(fieldSeparator.trim()))) {
fieldSeparator = fieldSeparator.trim(); fieldSeparator = fieldSeparator.trim();
if ("tab".equals(fieldSeparator)) if ("tab".equals(fieldSeparator)) {
{
fieldSeparator = "\t"; fieldSeparator = "\t";
} } else if ("semicolon".equals(fieldSeparator)) {
else if ("semicolon".equals(fieldSeparator))
{
fieldSeparator = ";"; fieldSeparator = ";";
} } else if ("hash".equals(fieldSeparator)) {
else if ("hash".equals(fieldSeparator))
{
fieldSeparator = "#"; fieldSeparator = "#";
} } else {
else
{
fieldSeparator = fieldSeparator.trim(); fieldSeparator = fieldSeparator.trim();
} }
} } else {
else
{
fieldSeparator = ","; fieldSeparator = ",";
} }
@@ -371,23 +389,20 @@ public class DSpaceCSV implements Serializable
escapedFieldSeparator = match.replaceAll("\\\\$1"); escapedFieldSeparator = match.replaceAll("\\\\$1");
} }
/** /**
* Set the authority separator for value with authority data. * Set the authority separator for value with authority data.
* *
* Is set in dspace.cfg as bulkedit.authorityseparator * Is set in dspace.cfg as bulkedit.authorityseparator
* *
* If not set, defaults to double colon '::' * If not set, defaults to double colon '::'
*/ */
private void setAuthoritySeparator() private void setAuthoritySeparator() {
{
// Get the value separator // Get the value separator
authoritySeparator = DSpaceServicesFactory.getInstance().getConfigurationService().getProperty("bulkedit.authorityseparator"); authoritySeparator = DSpaceServicesFactory.getInstance().getConfigurationService()
if ((authoritySeparator != null) && (!"".equals(authoritySeparator.trim()))) .getProperty("bulkedit.authorityseparator");
{ if ((authoritySeparator != null) && (!"".equals(authoritySeparator.trim()))) {
authoritySeparator = authoritySeparator.trim(); authoritySeparator = authoritySeparator.trim();
} } else {
else
{
authoritySeparator = "::"; authoritySeparator = "::";
} }
@@ -401,11 +416,9 @@ public class DSpaceCSV implements Serializable
* Add a DSpace item to the CSV file * Add a DSpace item to the CSV file
* *
* @param i The DSpace item * @param i The DSpace item
*
* @throws Exception if something goes wrong with adding the Item * @throws Exception if something goes wrong with adding the Item
*/ */
public final void addItem(Item i) throws Exception public final void addItem(Item i) throws Exception {
{
// If the item does not have an "owningCollection" the the below "getHandle()" call will fail // If the item does not have an "owningCollection" the the below "getHandle()" call will fail
// This should not happen but is here for safety. // This should not happen but is here for safety.
if (i.getOwningCollection() == null) { if (i.getOwningCollection() == null) {
@@ -421,49 +434,42 @@ public class DSpaceCSV implements Serializable
// Add in any mapped collections // Add in any mapped collections
List<Collection> collections = i.getCollections(); List<Collection> collections = i.getCollections();
for (Collection c : collections) for (Collection c : collections) {
{
// Only add if it is not the owning collection // Only add if it is not the owning collection
if (!c.getHandle().equals(owningCollectionHandle)) if (!c.getHandle().equals(owningCollectionHandle)) {
{
line.add("collection", c.getHandle()); line.add("collection", c.getHandle());
} }
} }
// Populate it // Populate it
List<MetadataValue> md = itemService.getMetadata(i, Item.ANY, Item.ANY, Item.ANY, Item.ANY); List<MetadataValue> md = itemService.getMetadata(i, Item.ANY, Item.ANY, Item.ANY, Item.ANY);
for (MetadataValue value : md) for (MetadataValue value : md) {
{
MetadataField metadataField = value.getMetadataField(); MetadataField metadataField = value.getMetadataField();
MetadataSchema metadataSchema = metadataField.getMetadataSchema(); MetadataSchema metadataSchema = metadataField.getMetadataSchema();
// Get the key (schema.element) // Get the key (schema.element)
String key = metadataSchema.getName() + "." + metadataField.getElement(); String key = metadataSchema.getName() + "." + metadataField.getElement();
// Add the qualifier if there is one (schema.element.qualifier) // Add the qualifier if there is one (schema.element.qualifier)
if (metadataField.getQualifier() != null) if (metadataField.getQualifier() != null) {
{
key = key + "." + metadataField.getQualifier(); key = key + "." + metadataField.getQualifier();
} }
// Add the language if there is one (schema.element.qualifier[langauge]) // Add the language if there is one (schema.element.qualifier[langauge])
//if ((value.language != null) && (!"".equals(value.language))) //if ((value.language != null) && (!"".equals(value.language)))
if (value.getLanguage() != null) if (value.getLanguage() != null) {
{
key = key + "[" + value.getLanguage() + "]"; key = key + "[" + value.getLanguage() + "]";
} }
// Store the item // Store the item
if (exportAll || okToExport(metadataField)) if (exportAll || okToExport(metadataField)) {
{
// Add authority and confidence if authority is not null // Add authority and confidence if authority is not null
String mdValue = value.getValue(); String mdValue = value.getValue();
if (value.getAuthority() != null && !"".equals(value.getAuthority())) if (value.getAuthority() != null && !"".equals(value.getAuthority())) {
{ mdValue += authoritySeparator + value.getAuthority() + authoritySeparator + (value
mdValue += authoritySeparator + value.getAuthority() + authoritySeparator + (value.getConfidence() != -1 ? value.getConfidence() : Choices.CF_ACCEPTED); .getConfidence() != -1 ? value.getConfidence() : Choices.CF_ACCEPTED);
} }
line.add(key, mdValue); line.add(key, mdValue);
if (!headings.contains(key)) if (!headings.contains(key)) {
{
headings.add(key); headings.add(key);
} }
} }
@@ -478,12 +484,10 @@ public class DSpaceCSV implements Serializable
* @param line The line of elements * @param line The line of elements
* @throws Exception Thrown if an error occurs when adding the item * @throws Exception Thrown if an error occurs when adding the item
*/ */
public final void addItem(String line) throws Exception public final void addItem(String line) throws Exception {
{
// Check to see if the last character is a field separator, which hides the last empty column // Check to see if the last character is a field separator, which hides the last empty column
boolean last = false; boolean last = false;
if (line.endsWith(fieldSeparator)) if (line.endsWith(fieldSeparator)) {
{
// Add a space to the end, then remove it later // Add a space to the end, then remove it later
last = true; last = true;
line += " "; line += " ";
@@ -496,15 +500,12 @@ public class DSpaceCSV implements Serializable
// Merge parts with embedded separators // Merge parts with embedded separators
boolean alldone = false; boolean alldone = false;
while (!alldone) while (!alldone) {
{
boolean found = false; boolean found = false;
int i = 0; int i = 0;
for (String part : bits) for (String part : bits) {
{
int bitcounter = part.length() - part.replaceAll("\"", "").length(); int bitcounter = part.length() - part.replaceAll("\"", "").length();
if ((part.startsWith("\"")) && ((!part.endsWith("\"")) || ((bitcounter & 1) == 1))) if ((part.startsWith("\"")) && ((!part.endsWith("\"")) || ((bitcounter & 1) == 1))) {
{
found = true; found = true;
String add = bits.get(i) + fieldSeparator + bits.get(i + 1); String add = bits.get(i) + fieldSeparator + bits.get(i + 1);
bits.remove(i); bits.remove(i);
@@ -519,10 +520,8 @@ public class DSpaceCSV implements Serializable
// Deal with quotes around the elements // Deal with quotes around the elements
int i = 0; int i = 0;
for (String part : bits) for (String part : bits) {
{ if ((part.startsWith("\"")) && (part.endsWith("\""))) {
if ((part.startsWith("\"")) && (part.endsWith("\"")))
{
part = part.substring(1, part.length() - 1); part = part.substring(1, part.length() - 1);
bits.set(i, part); bits.set(i, part);
} }
@@ -531,10 +530,8 @@ public class DSpaceCSV implements Serializable
// Remove embedded quotes // Remove embedded quotes
i = 0; i = 0;
for (String part : bits) for (String part : bits) {
{ if (part.contains("\"\"")) {
if (part.contains("\"\""))
{
part = part.replaceAll("\"\"", "\""); part = part.replaceAll("\"\"", "\"");
bits.set(i, part); bits.set(i, part);
} }
@@ -546,34 +543,25 @@ public class DSpaceCSV implements Serializable
DSpaceCSVLine csvLine; DSpaceCSVLine csvLine;
// Is this an existing item, or a new item (where id = '+') // Is this an existing item, or a new item (where id = '+')
if ("+".equals(id)) if ("+".equals(id)) {
{
csvLine = new DSpaceCSVLine(); csvLine = new DSpaceCSVLine();
} } else {
else try {
{
try
{
csvLine = new DSpaceCSVLine(UUID.fromString(id)); csvLine = new DSpaceCSVLine(UUID.fromString(id));
} } catch (NumberFormatException nfe) {
catch (NumberFormatException nfe)
{
System.err.println("Invalid item identifier: " + id); System.err.println("Invalid item identifier: " + id);
System.err.println("Please check your CSV file for information. " + System.err.println("Please check your CSV file for information. " +
"Item id must be numeric, or a '+' to add a new item"); "Item id must be numeric, or a '+' to add a new item");
throw(nfe); throw (nfe);
} }
} }
// Add the rest of the parts // Add the rest of the parts
i = 0; i = 0;
for (String part : bits) for (String part : bits) {
{ if (i > 0) {
if (i > 0)
{
// Is this a last empty item? // Is this a last empty item?
if ((last) && (i == headings.size())) if ((last) && (i == headings.size())) {
{
part = ""; part = "";
} }
@@ -585,10 +573,8 @@ public class DSpaceCSV implements Serializable
} }
csvLine.add(headings.get(i - 1), null); csvLine.add(headings.get(i - 1), null);
String[] elements = part.split(escapedValueSeparator); String[] elements = part.split(escapedValueSeparator);
for (String element : elements) for (String element : elements) {
{ if ((element != null) && (!"".equals(element))) {
if ((element != null) && (!"".equals(element)))
{
csvLine.add(headings.get(i - 1), element); csvLine.add(headings.get(i - 1), element);
} }
} }
@@ -604,8 +590,7 @@ public class DSpaceCSV implements Serializable
* *
* @return The lines * @return The lines
*/ */
public final List<DSpaceCSVLine> getCSVLines() public final List<DSpaceCSVLine> getCSVLines() {
{
// Return the lines // Return the lines
return lines; return lines;
} }
@@ -615,22 +600,19 @@ public class DSpaceCSV implements Serializable
* *
* @return the array of CSV formatted Strings * @return the array of CSV formatted Strings
*/ */
public final String[] getCSVLinesAsStringArray() public final String[] getCSVLinesAsStringArray() {
{
// Create the headings line // Create the headings line
String[] csvLines = new String[counter + 1]; String[] csvLines = new String[counter + 1];
csvLines[0] = "id" + fieldSeparator + "collection"; csvLines[0] = "id" + fieldSeparator + "collection";
List<String> headingsCopy = new ArrayList<>(headings); List<String> headingsCopy = new ArrayList<>(headings);
Collections.sort(headingsCopy); Collections.sort(headingsCopy);
for (String value : headingsCopy) for (String value : headingsCopy) {
{
csvLines[0] = csvLines[0] + fieldSeparator + value; csvLines[0] = csvLines[0] + fieldSeparator + value;
} }
Iterator<DSpaceCSVLine> i = lines.iterator(); Iterator<DSpaceCSVLine> i = lines.iterator();
int c = 1; int c = 1;
while (i.hasNext()) while (i.hasNext()) {
{
csvLines[c++] = i.next().toCSV(headingsCopy, fieldSeparator, valueSeparator); csvLines[c++] = i.next().toCSV(headingsCopy, fieldSeparator, valueSeparator);
} }
@@ -638,23 +620,15 @@ public class DSpaceCSV implements Serializable
} }
/** /**
* Save the CSV file to the given filename * Creates and returns an InputStream from the CSV Lines in this DSpaceCSV
* * @return The InputStream created from the CSVLines in this DSpaceCSV
* @param filename The filename to save the CSV file to
*
* @throws IOException Thrown if an error occurs when writing the file
*/ */
public final void save(String filename) throws IOException public InputStream getInputStream() {
{ StringBuilder stringBuilder = new StringBuilder();
// Save the file
BufferedWriter out = new BufferedWriter(
new OutputStreamWriter(
new FileOutputStream(filename), "UTF-8"));
for (String csvLine : getCSVLinesAsStringArray()) { for (String csvLine : getCSVLinesAsStringArray()) {
out.write(csvLine + "\n"); stringBuilder.append(csvLine + "\n");
} }
out.flush(); return IOUtils.toInputStream(stringBuilder.toString(), StandardCharsets.UTF_8);
out.close();
} }
/** /**
@@ -666,12 +640,10 @@ public class DSpaceCSV implements Serializable
* @param md The Metadatum to examine * @param md The Metadatum to examine
* @return Whether or not it is OK to export this element * @return Whether or not it is OK to export this element
*/ */
protected boolean okToExport(MetadataField md) protected boolean okToExport(MetadataField md) {
{
// Now compare with the list to ignore // Now compare with the list to ignore
String key = md.getMetadataSchema().getName() + "." + md.getElement(); String key = md.getMetadataSchema().getName() + "." + md.getElement();
if (md.getQualifier() != null) if (md.getQualifier() != null) {
{
key += "." + md.getQualifier(); key += "." + md.getQualifier();
} }
if (ignore.get(key) != null) { if (ignore.get(key) != null) {
@@ -687,8 +659,7 @@ public class DSpaceCSV implements Serializable
* *
* @return The headings * @return The headings
*/ */
public List<String> getHeadings() public List<String> getHeadings() {
{
return headings; return headings;
} }
@@ -698,13 +669,11 @@ public class DSpaceCSV implements Serializable
* @return The formatted String as a csv * @return The formatted String as a csv
*/ */
@Override @Override
public final String toString() public final String toString() {
{
// Return the csv as one long string // Return the csv as one long string
StringBuilder csvLines = new StringBuilder(); StringBuilder csvLines = new StringBuilder();
String[] lines = this.getCSVLinesAsStringArray(); String[] lines = this.getCSVLinesAsStringArray();
for (String line : lines) for (String line : lines) {
{
csvLines.append(line).append("\n"); csvLines.append(line).append("\n");
} }
return csvLines.toString(); return csvLines.toString();

View File

@@ -7,30 +7,41 @@
*/ */
package org.dspace.app.bulkedit; package org.dspace.app.bulkedit;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.Comparator;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.TreeMap;
import java.util.UUID;
import org.dspace.authority.AuthorityValue; import org.dspace.authority.AuthorityValue;
import org.dspace.authority.factory.AuthorityServiceFactory; import org.dspace.authority.factory.AuthorityServiceFactory;
import org.dspace.authority.service.AuthorityValueService; import org.dspace.authority.service.AuthorityValueService;
import java.io.Serializable;
import java.util.*;
/** /**
* Utility class to store a line from a CSV file * Utility class to store a line from a CSV file
* *
* @author Stuart Lewis * @author Stuart Lewis
*/ */
public class DSpaceCSVLine implements Serializable public class DSpaceCSVLine implements Serializable {
{ /**
/** The item id of the item represented by this line. -1 is for a new item */ * The item id of the item represented by this line. -1 is for a new item
*/
private final UUID id; private final UUID id;
/** The elements in this line in a hashtable, keyed by the metadata type */ /**
* The elements in this line in a hashtable, keyed by the metadata type
*/
private final Map<String, ArrayList> items; private final Map<String, ArrayList> items;
protected transient final AuthorityValueService authorityValueService protected transient final AuthorityValueService authorityValueService
= AuthorityServiceFactory.getInstance().getAuthorityValueService(); = AuthorityServiceFactory.getInstance().getAuthorityValueService();
/** ensuring that the order-sensible columns of the csv are processed in the correct order */ /**
* ensuring that the order-sensible columns of the csv are processed in the correct order
*/
private transient final Comparator<? super String> headerComparator = new Comparator<String>() { private transient final Comparator<? super String> headerComparator = new Comparator<String>() {
@Override @Override
public int compare(String md1, String md2) { public int compare(String md1, String md2) {
@@ -41,8 +52,7 @@ public class DSpaceCSVLine implements Serializable
int compare; int compare;
if (source1 == null && source2 != null) { if (source1 == null && source2 != null) {
compare = -1; compare = -1;
} } else if (source1 != null && source2 == null) {
else if (source1 != null && source2 == null) {
compare = 1; compare = 1;
} else { } else {
// the order of the rest does not matter // the order of the rest does not matter
@@ -57,8 +67,7 @@ public class DSpaceCSVLine implements Serializable
* *
* @param itemId The item ID of the line * @param itemId The item ID of the line
*/ */
public DSpaceCSVLine(UUID itemId) public DSpaceCSVLine(UUID itemId) {
{
// Store the ID + separator, and initialise the hashtable // Store the ID + separator, and initialise the hashtable
this.id = itemId; this.id = itemId;
items = new TreeMap<>(headerComparator); items = new TreeMap<>(headerComparator);
@@ -68,8 +77,7 @@ public class DSpaceCSVLine implements Serializable
/** /**
* Create a new CSV line for a new item * Create a new CSV line for a new item
*/ */
public DSpaceCSVLine() public DSpaceCSVLine() {
{
// Set the ID to be null, and initialise the hashtable // Set the ID to be null, and initialise the hashtable
this.id = null; this.id = null;
this.items = new TreeMap<>(headerComparator); this.items = new TreeMap<>(headerComparator);
@@ -80,8 +88,7 @@ public class DSpaceCSVLine implements Serializable
* *
* @return The item ID * @return The item ID
*/ */
public UUID getID() public UUID getID() {
{
// Return the ID // Return the ID
return id; return id;
} }
@@ -89,20 +96,17 @@ public class DSpaceCSVLine implements Serializable
/** /**
* Add a new metadata value to this line * Add a new metadata value to this line
* *
* @param key The metadata key (e.g. dc.contributor.author) * @param key The metadata key (e.g. dc.contributor.author)
* @param value The metadata value * @param value The metadata value
*/ */
public void add(String key, String value) public void add(String key, String value) {
{
// Create the array list if we need to // Create the array list if we need to
if (items.get(key) == null) if (items.get(key) == null) {
{
items.put(key, new ArrayList<String>()); items.put(key, new ArrayList<String>());
} }
// Store the item if it is not null // Store the item if it is not null
if (value != null) if (value != null) {
{
items.get(key).add(value); items.get(key).add(value);
} }
} }
@@ -113,8 +117,7 @@ public class DSpaceCSVLine implements Serializable
* @param key The metadata key * @param key The metadata key
* @return All the elements that match * @return All the elements that match
*/ */
public List<String> get(String key) public List<String> get(String key) {
{
// Return any relevant values // Return any relevant values
return items.get(key); return items.get(key);
} }
@@ -124,12 +127,11 @@ public class DSpaceCSVLine implements Serializable
* *
* @return The action (may be blank, 'withdraw', 'reinstate' or 'delete') * @return The action (may be blank, 'withdraw', 'reinstate' or 'delete')
*/ */
public String getAction() public String getAction() {
{
if (items.containsKey("action")) { if (items.containsKey("action")) {
ArrayList actions = items.get("action"); ArrayList actions = items.get("action");
if (actions.size() > 0) { if (actions.size() > 0) {
return ((String)actions.get(0)).trim(); return ((String) actions.get(0)).trim();
} }
} }
return ""; return "";
@@ -140,8 +142,7 @@ public class DSpaceCSVLine implements Serializable
* *
* @return An enumeration of all the keys * @return An enumeration of all the keys
*/ */
public Set<String> keys() public Set<String> keys() {
{
// Return the keys // Return the keys
return items.keySet(); return items.keySet();
} }
@@ -149,26 +150,23 @@ public class DSpaceCSVLine implements Serializable
/** /**
* Write this line out as a CSV formatted string, in the order given by the headings provided * Write this line out as a CSV formatted string, in the order given by the headings provided
* *
* @param headings The headings which define the order the elements must be presented in * @param headings The headings which define the order the elements must be presented in
* @param fieldSeparator separator between metadata fields * @param fieldSeparator separator between metadata fields
* @param valueSeparator separator between metadata values (within a field) * @param valueSeparator separator between metadata values (within a field)
* @return The CSV formatted String * @return The CSV formatted String
*/ */
protected String toCSV(List<String> headings, String fieldSeparator, String valueSeparator) protected String toCSV(List<String> headings, String fieldSeparator, String valueSeparator) {
{
StringBuilder bits = new StringBuilder(); StringBuilder bits = new StringBuilder();
// Add the id // Add the id
bits.append("\"").append(id).append("\"").append(fieldSeparator); bits.append("\"").append(id).append("\"").append(fieldSeparator);
bits.append(valueToCSV(items.get("collection"),valueSeparator)); bits.append(valueToCSV(items.get("collection"), valueSeparator));
// Add the rest of the elements // Add the rest of the elements
for (String heading : headings) for (String heading : headings) {
{
bits.append(fieldSeparator); bits.append(fieldSeparator);
List<String> values = items.get(heading); List<String> values = items.get(heading);
if (values != null && !"collection".equals(heading)) if (values != null && !"collection".equals(heading)) {
{
bits.append(valueToCSV(values, valueSeparator)); bits.append(valueToCSV(values, valueSeparator));
} }
} }
@@ -179,33 +177,26 @@ public class DSpaceCSVLine implements Serializable
/** /**
* Internal method to create a CSV formatted String joining a given set of elements * Internal method to create a CSV formatted String joining a given set of elements
* *
* @param values The values to create the string from * @param values The values to create the string from
* @param valueSeparator value separator * @param valueSeparator value separator
* @return The line as a CSV formatted String * @return The line as a CSV formatted String
*/ */
protected String valueToCSV(List<String> values, String valueSeparator) protected String valueToCSV(List<String> values, String valueSeparator) {
{
// Check there is some content // Check there is some content
if (values == null) if (values == null) {
{
return ""; return "";
} }
// Get on with the work // Get on with the work
String s; String s;
if (values.size() == 1) if (values.size() == 1) {
{
s = values.get(0); s = values.get(0);
} } else {
else
{
// Concatenate any fields together // Concatenate any fields together
StringBuilder str = new StringBuilder(); StringBuilder str = new StringBuilder();
for (String value : values) for (String value : values) {
{ if (str.length() > 0) {
if (str.length() > 0)
{
str.append(valueSeparator); str.append(valueSeparator);
} }

View File

@@ -7,295 +7,85 @@
*/ */
package org.dspace.app.bulkedit; package org.dspace.app.bulkedit;
import com.google.common.collect.Iterators;
import org.apache.commons.cli.*;
import org.dspace.content.*;
import org.dspace.content.factory.ContentServiceFactory;
import org.dspace.content.service.ItemService;
import org.dspace.core.Constants;
import org.dspace.core.Context;
import org.dspace.handle.factory.HandleServiceFactory;
import java.util.ArrayList;
import java.sql.SQLException; import java.sql.SQLException;
import java.util.Iterator;
import java.util.List; import org.apache.commons.cli.ParseException;
import org.dspace.content.service.MetadataDSpaceCsvExportService;
import org.dspace.core.Context;
import org.dspace.eperson.factory.EPersonServiceFactory;
import org.dspace.eperson.service.EPersonService;
import org.dspace.scripts.DSpaceRunnable;
import org.dspace.utils.DSpace;
/** /**
* Metadata exporter to allow the batch export of metadata into a file * Metadata exporter to allow the batch export of metadata into a file
* *
* @author Stuart Lewis * @author Stuart Lewis
*/ */
public class MetadataExport public class MetadataExport extends DSpaceRunnable<MetadataExportScriptConfiguration> {
{
/** The items to export */
protected Iterator<Item> toExport;
protected ItemService itemService; private boolean help = false;
private String filename = null;
private String handle = null;
private boolean exportAllMetadata = false;
private boolean exportAllItems = false;
protected Context context; private static final String EXPORT_CSV = "exportCSV";
/** Whether to export all metadata, or just normally edited metadata */ private MetadataDSpaceCsvExportService metadataDSpaceCsvExportService = new DSpace().getServiceManager()
protected boolean exportAll; .getServicesByType(MetadataDSpaceCsvExportService.class).get(0);
protected MetadataExport() { private EPersonService ePersonService = EPersonServiceFactory.getInstance().getEPersonService();
itemService = ContentServiceFactory.getInstance().getItemService();
@Override
public void internalRun() throws Exception {
if (help) {
handler.logInfo("\nfull export: metadata-export -f filename");
handler.logInfo("partial export: metadata-export -i handle -f filename");
printHelp();
return;
}
Context context = new Context();
context.turnOffAuthorisationSystem();
try {
context.setCurrentUser(ePersonService.find(context, this.getEpersonIdentifier()));
} catch (SQLException e) {
handler.handleException(e);
}
DSpaceCSV dSpaceCSV = metadataDSpaceCsvExportService
.handleExport(context, exportAllItems, exportAllMetadata, handle,
handler);
handler.writeFilestream(context, filename, dSpaceCSV.getInputStream(), EXPORT_CSV);
context.restoreAuthSystemState();
context.complete();
} }
/** @Override
* Set up a new metadata export public MetadataExportScriptConfiguration getScriptConfiguration() {
* return new DSpace().getServiceManager().getServiceByName("metadata-export",
* @param c The Context MetadataExportScriptConfiguration.class);
* @param toExport The ItemIterator of items to export
* @param exportAll whether to export all metadata or not (include handle, provenance etc)
*/
public MetadataExport(Context c, Iterator<Item> toExport, boolean exportAll)
{
itemService = ContentServiceFactory.getInstance().getItemService();
// Store the export settings
this.toExport = toExport;
this.exportAll = exportAll;
this.context = c;
} }
/** @Override
* Method to export a community (and sub-communities and collections) public void setup() throws ParseException {
*
* @param c The Context
* @param toExport The Community to export
* @param exportAll whether to export all metadata or not (include handle, provenance etc)
*/
public MetadataExport(Context c, Community toExport, boolean exportAll)
{
itemService = ContentServiceFactory.getInstance().getItemService();
try if (commandLine.hasOption('h')) {
{ help = true;
// Try to export the community return;
this.toExport = buildFromCommunity(c, toExport, 0);
this.exportAll = exportAll;
this.context = c;
}
catch (SQLException sqle)
{
// Something went wrong...
System.err.println("Error running exporter:");
sqle.printStackTrace(System.err);
System.exit(1);
}
}
/**
* Build an array list of item ids that are in a community (include sub-communities and collections)
*
* @param context DSpace context
* @param community The community to build from
* @param indent How many spaces to use when writing out the names of items added
* @return The list of item ids
* @throws SQLException if database error
*/
protected Iterator<Item> buildFromCommunity(Context context, Community community, int indent)
throws SQLException
{
// Add all the collections
List<Collection> collections = community.getCollections();
Iterator<Item> result = null;
for (Collection collection : collections)
{
for (int i = 0; i < indent; i++)
{
System.out.print(" ");
}
Iterator<Item> items = itemService.findByCollection(context, collection);
result = addItemsToResult(result,items);
}
// Add all the sub-communities
List<Community> communities = community.getSubcommunities();
for (Community subCommunity : communities)
{
for (int i = 0; i < indent; i++)
{
System.out.print(" ");
}
Iterator<Item> items = buildFromCommunity(context, subCommunity, indent + 1);
result = addItemsToResult(result,items);
}
return result;
}
private Iterator<Item> addItemsToResult(Iterator<Item> result, Iterator<Item> items) {
if(result == null)
{
result = items;
}else{
result = Iterators.concat(result, items);
}
return result;
}
/**
* Run the export
*
* @return the exported CSV lines
*/
public DSpaceCSV export()
{
try
{
Context.Mode originalMode = context.getCurrentMode();
context.setMode(Context.Mode.READ_ONLY);
// Process each item
DSpaceCSV csv = new DSpaceCSV(exportAll);
while (toExport.hasNext())
{
Item item = toExport.next();
csv.addItem(item);
context.uncacheEntity(item);
}
context.setMode(originalMode);
// Return the results
return csv;
}
catch (Exception e)
{
// Something went wrong...
System.err.println("Error exporting to CSV:");
e.printStackTrace();
return null;
}
}
/**
* Print the help message
*
* @param options The command line options the user gave
* @param exitCode the system exit code to use
*/
private static void printHelp(Options options, int exitCode)
{
// print the help message
HelpFormatter myhelp = new HelpFormatter();
myhelp.printHelp("MetadataExport\n", options);
System.out.println("\nfull export: metadataexport -f filename");
System.out.println("partial export: metadataexport -i handle -f filename");
System.exit(exitCode);
}
/**
* main method to run the metadata exporter
*
* @param argv the command line arguments given
* @throws Exception if error occurs
*/
public static void main(String[] argv) throws Exception
{
// Create an options object and populate it
CommandLineParser parser = new PosixParser();
Options options = new Options();
options.addOption("i", "id", true, "ID or handle of thing to export (item, collection, or community)");
options.addOption("f", "file", true, "destination where you want file written");
options.addOption("a", "all", false, "include all metadata fields that are not normally changed (e.g. provenance)");
options.addOption("h", "help", false, "help");
CommandLine line = null;
try
{
line = parser.parse(options, argv);
}
catch (ParseException pe)
{
System.err.println("Error with commands.");
printHelp(options, 1);
System.exit(0);
}
if (line.hasOption('h'))
{
printHelp(options, 0);
} }
// Check a filename is given // Check a filename is given
if (!line.hasOption('f')) if (!commandLine.hasOption('f')) {
{ throw new ParseException("Required parameter -f missing!");
System.err.println("Required parameter -f missing!");
printHelp(options, 1);
} }
String filename = line.getOptionValue('f'); filename = commandLine.getOptionValue('f');
// Create a context exportAllMetadata = commandLine.hasOption('a');
Context c = new Context(Context.Mode.READ_ONLY);
c.turnOffAuthorisationSystem();
// The things we'll export if (!commandLine.hasOption('i')) {
Iterator<Item> toExport = null; exportAllItems = true;
MetadataExport exporter = null;
// Export everything?
boolean exportAll = line.hasOption('a');
ContentServiceFactory contentServiceFactory = ContentServiceFactory.getInstance();
// Check we have an item OK
ItemService itemService = contentServiceFactory.getItemService();
if (!line.hasOption('i'))
{
System.out.println("Exporting whole repository WARNING: May take some time!");
exporter = new MetadataExport(c, itemService.findAll(c), exportAll);
} }
else handle = commandLine.getOptionValue('i');
{
String handle = line.getOptionValue('i');
DSpaceObject dso = HandleServiceFactory.getInstance().getHandleService().resolveToObject(c, handle);
if (dso == null)
{
System.err.println("Item '" + handle + "' does not resolve to an item in your repository!");
printHelp(options, 1);
}
if (dso.getType() == Constants.ITEM)
{
System.out.println("Exporting item '" + dso.getName() + "' (" + handle + ")");
List<Item> item = new ArrayList<>();
item.add((Item) dso);
exporter = new MetadataExport(c, item.iterator(), exportAll);
}
else if (dso.getType() == Constants.COLLECTION)
{
System.out.println("Exporting collection '" + dso.getName() + "' (" + handle + ")");
Collection collection = (Collection)dso;
toExport = itemService.findByCollection(c, collection);
exporter = new MetadataExport(c, toExport, exportAll);
}
else if (dso.getType() == Constants.COMMUNITY)
{
System.out.println("Exporting community '" + dso.getName() + "' (" + handle + ")");
exporter = new MetadataExport(c, (Community)dso, exportAll);
}
else
{
System.err.println("Error identifying '" + handle + "'");
System.exit(1);
}
}
// Perform the export
DSpaceCSV csv = exporter.export();
// Save the files to the file
csv.save(filename);
// Finish off and tidy up
c.restoreAuthSystemState();
c.complete();
} }
} }

View File

@@ -0,0 +1,74 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.bulkedit;
import java.io.OutputStream;
import java.sql.SQLException;
import org.apache.commons.cli.Options;
import org.dspace.authorize.service.AuthorizeService;
import org.dspace.core.Context;
import org.dspace.scripts.configuration.ScriptConfiguration;
import org.springframework.beans.factory.annotation.Autowired;
/**
* The {@link ScriptConfiguration} for the {@link MetadataExport} script
*/
public class MetadataExportScriptConfiguration<T extends MetadataExport> extends ScriptConfiguration<T> {
@Autowired
private AuthorizeService authorizeService;
private Class<T> dspaceRunnableClass;
@Override
public Class<T> getDspaceRunnableClass() {
return dspaceRunnableClass;
}
/**
* Generic setter for the dspaceRunnableClass
* @param dspaceRunnableClass The dspaceRunnableClass to be set on this MetadataExportScriptConfiguration
*/
@Override
public void setDspaceRunnableClass(Class<T> dspaceRunnableClass) {
this.dspaceRunnableClass = dspaceRunnableClass;
}
@Override
public boolean isAllowedToExecute(Context context) {
try {
return authorizeService.isAdmin(context);
} catch (SQLException e) {
throw new RuntimeException("SQLException occurred when checking if the current user is an admin", e);
}
}
@Override
public Options getOptions() {
if (options == null) {
Options options = new Options();
options.addOption("i", "id", true, "ID or handle of thing to export (item, collection, or community)");
options.getOption("i").setType(String.class);
options.addOption("f", "file", true, "destination where you want file written");
options.getOption("f").setType(OutputStream.class);
options.getOption("f").setRequired(true);
options.addOption("a", "all", false,
"include all metadata fields that are not normally changed (e.g. provenance)");
options.getOption("a").setType(boolean.class);
options.addOption("h", "help", false, "help");
options.getOption("h").setType(boolean.class);
super.options = options;
}
return options;
}
}

View File

@@ -0,0 +1,33 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.bulkedit;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStreamReader;
import org.dspace.scripts.handler.DSpaceRunnableHandler;
/**
* CLI variant for the {@link MetadataImport} class
* This has been made so that we can specify the behaviour of the determineChanges method to be specific for the CLI
*/
public class MetadataImportCLI extends MetadataImport {
@Override
protected boolean determineChange(DSpaceRunnableHandler handler) throws IOException {
handler.logInfo("Do you want to make these changes? [y/n] ");
try (BufferedReader bufferedReader = new BufferedReader(new InputStreamReader(System.in))) {
String yn = bufferedReader.readLine();
if ("y".equalsIgnoreCase(yn)) {
return true;
}
return false;
}
}
}

View File

@@ -0,0 +1,16 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.bulkedit;
import org.dspace.scripts.configuration.ScriptConfiguration;
/**
* The {@link ScriptConfiguration} for the {@link org.dspace.app.bulkedit.MetadataImportCLI} CLI script
*/
public class MetadataImportCliScriptConfiguration extends MetadataImportScriptConfiguration<MetadataImportCLI> {
}

View File

@@ -12,26 +12,23 @@ package org.dspace.app.bulkedit;
* *
* @author Stuart Lewis * @author Stuart Lewis
*/ */
public class MetadataImportException extends Exception public class MetadataImportException extends Exception {
{
/** /**
* Instantiate a new MetadataImportException * Instantiate a new MetadataImportException
* *
* @param message the error message * @param message the error message
*/ */
public MetadataImportException(String message) public MetadataImportException(String message) {
{ super(message);
super(message);
} }
/** /**
* Instantiate a new MetadataImportException * Instantiate a new MetadataImportException
* *
* @param message the error message * @param message the error message
* @param exception the root cause * @param exception the root cause
*/ */
public MetadataImportException(String message, Exception exception) public MetadataImportException(String message, Exception exception) {
{ super(message, exception);
super(message, exception);
} }
} }

View File

@@ -12,39 +12,51 @@ package org.dspace.app.bulkedit;
* *
* @author Stuart Lewis * @author Stuart Lewis
*/ */
public class MetadataImportInvalidHeadingException extends Exception public class MetadataImportInvalidHeadingException extends Exception {
{ /**
/** The type of error (schema or element) */ * The type of error (schema or element)
*/
private int type; private int type;
/** The bad heading */ /**
* The bad heading
*/
private String badHeading; private String badHeading;
/** The column number */ /**
* The column number
*/
private int column; private int column;
/** Error with the schema */ /**
* Error with the schema
*/
public static final int SCHEMA = 0; public static final int SCHEMA = 0;
/** Error with the element */ /**
* Error with the element
*/
public static final int ELEMENT = 1; public static final int ELEMENT = 1;
/** Error with a missing header */ /**
* Error with a missing header
*/
public static final int MISSING = 98; public static final int MISSING = 98;
/** Error with the whole entry */ /**
* Error with the whole entry
*/
public static final int ENTRY = 99; public static final int ENTRY = 99;
/** /**
* Instantiate a new MetadataImportInvalidHeadingException * Instantiate a new MetadataImportInvalidHeadingException
* *
* @param message the error message * @param message the error message
* @param theType the type of the error * @param theType the type of the error
* @param theColumn column number * @param theColumn column number
*/ */
public MetadataImportInvalidHeadingException(String message, int theType, int theColumn) public MetadataImportInvalidHeadingException(String message, int theType, int theColumn) {
{
super(message); super(message);
badHeading = message; badHeading = message;
type = theType; type = theType;
@@ -54,10 +66,9 @@ public class MetadataImportInvalidHeadingException extends Exception
/** /**
* Get the type of the exception * Get the type of the exception
* *
* @return the type of the exception * @return the type of the exception
*/ */
public String getType() public String getType() {
{
return "" + type; return "" + type;
} }
@@ -66,8 +77,7 @@ public class MetadataImportInvalidHeadingException extends Exception
* *
* @return the invalid heading * @return the invalid heading
*/ */
public String getBadHeader() public String getBadHeader() {
{
return badHeading; return badHeading;
} }
@@ -76,8 +86,7 @@ public class MetadataImportInvalidHeadingException extends Exception
* *
* @return the invalid column number * @return the invalid column number
*/ */
public int getColumn() public int getColumn() {
{
return column; return column;
} }
@@ -87,19 +96,14 @@ public class MetadataImportInvalidHeadingException extends Exception
* @return The exception message * @return The exception message
*/ */
@Override @Override
public String getMessage() public String getMessage() {
{ if (type == SCHEMA) {
if (type == SCHEMA)
{
return "Unknown metadata schema in column " + column + ": " + badHeading; return "Unknown metadata schema in column " + column + ": " + badHeading;
} else if (type == ELEMENT) } else if (type == ELEMENT) {
{
return "Unknown metadata element in column " + column + ": " + badHeading; return "Unknown metadata element in column " + column + ": " + badHeading;
} else if (type == MISSING) } else if (type == MISSING) {
{
return "Row with missing header: column " + column; return "Row with missing header: column " + column;
} else } else {
{
return "Bad metadata declaration in column" + column + ": " + badHeading; return "Bad metadata declaration in column" + column + ": " + badHeading;
} }
} }

View File

@@ -0,0 +1,84 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.bulkedit;
import java.io.InputStream;
import java.sql.SQLException;
import org.apache.commons.cli.Options;
import org.dspace.authorize.service.AuthorizeService;
import org.dspace.core.Context;
import org.dspace.scripts.configuration.ScriptConfiguration;
import org.springframework.beans.factory.annotation.Autowired;
/**
* The {@link ScriptConfiguration} for the {@link MetadataImport} script
*/
public class MetadataImportScriptConfiguration<T extends MetadataImport> extends ScriptConfiguration<T> {
@Autowired
private AuthorizeService authorizeService;
private Class<T> dspaceRunnableClass;
@Override
public Class<T> getDspaceRunnableClass() {
return dspaceRunnableClass;
}
/**
* Generic setter for the dspaceRunnableClass
* @param dspaceRunnableClass The dspaceRunnableClass to be set on this MetadataImportScriptConfiguration
*/
@Override
public void setDspaceRunnableClass(Class<T> dspaceRunnableClass) {
this.dspaceRunnableClass = dspaceRunnableClass;
}
@Override
public boolean isAllowedToExecute(Context context) {
try {
return authorizeService.isAdmin(context);
} catch (SQLException e) {
throw new RuntimeException("SQLException occurred when checking if the current user is an admin", e);
}
}
@Override
public Options getOptions() {
if (options == null) {
Options options = new Options();
options.addOption("f", "file", true, "source file");
options.getOption("f").setType(InputStream.class);
options.getOption("f").setRequired(true);
options.addOption("e", "email", true, "email address or user id of user (required if adding new items)");
options.getOption("e").setType(String.class);
options.getOption("e").setRequired(true);
options.addOption("s", "silent", false,
"silent operation - doesn't request confirmation of changes USE WITH CAUTION");
options.getOption("s").setType(boolean.class);
options.addOption("w", "workflow", false, "workflow - when adding new items, use collection workflow");
options.getOption("w").setType(boolean.class);
options.addOption("n", "notify", false,
"notify - when adding new items using a workflow, send notification emails");
options.getOption("n").setType(boolean.class);
options.addOption("v", "validate-only", false,
"validate - just validate the csv, don't run the import");
options.getOption("v").setType(boolean.class);
options.addOption("t", "template", false,
"template - when adding new items, use the collection template (if it exists)");
options.getOption("t").setType(boolean.class);
options.addOption("h", "help", false, "help");
options.getOption("h").setType(boolean.class);
super.options = options;
}
return options;
}
}

View File

@@ -9,7 +9,11 @@ package org.dspace.app.checker;
import java.io.FileNotFoundException; import java.io.FileNotFoundException;
import java.sql.SQLException; import java.sql.SQLException;
import java.util.*; import java.util.ArrayList;
import java.util.Calendar;
import java.util.Date;
import java.util.List;
import java.util.UUID;
import org.apache.commons.cli.CommandLine; import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.CommandLineParser; import org.apache.commons.cli.CommandLineParser;
@@ -19,8 +23,17 @@ import org.apache.commons.cli.OptionBuilder;
import org.apache.commons.cli.Options; import org.apache.commons.cli.Options;
import org.apache.commons.cli.ParseException; import org.apache.commons.cli.ParseException;
import org.apache.commons.cli.PosixParser; import org.apache.commons.cli.PosixParser;
import org.apache.log4j.Logger; import org.apache.logging.log4j.LogManager;
import org.dspace.checker.*; import org.apache.logging.log4j.Logger;
import org.dspace.checker.BitstreamDispatcher;
import org.dspace.checker.CheckerCommand;
import org.dspace.checker.HandleDispatcher;
import org.dspace.checker.IteratorDispatcher;
import org.dspace.checker.LimitedCountDispatcher;
import org.dspace.checker.LimitedDurationDispatcher;
import org.dspace.checker.ResultsLogger;
import org.dspace.checker.ResultsPruner;
import org.dspace.checker.SimpleDispatcher;
import org.dspace.content.Bitstream; import org.dspace.content.Bitstream;
import org.dspace.content.factory.ContentServiceFactory; import org.dspace.content.factory.ContentServiceFactory;
import org.dspace.content.service.BitstreamService; import org.dspace.content.service.BitstreamService;
@@ -35,42 +48,40 @@ import org.dspace.core.Utils;
* @author Grace Carpenter * @author Grace Carpenter
* @author Nathan Sarr * @author Nathan Sarr
*/ */
public final class ChecksumChecker public final class ChecksumChecker {
{ private static final Logger LOG = LogManager.getLogger(ChecksumChecker.class);
private static final Logger LOG = Logger.getLogger(ChecksumChecker.class);
private static final BitstreamService bitstreamService = ContentServiceFactory.getInstance().getBitstreamService(); private static final BitstreamService bitstreamService = ContentServiceFactory.getInstance().getBitstreamService();
/** /**
* Blanked off constructor, this class should be used as a command line * Blanked off constructor, this class should be used as a command line
* tool. * tool.
*
*/ */
private ChecksumChecker() private ChecksumChecker() {
{
} }
/** /**
* Command line access to the checksum package. * Command line access to the checksum package.
* *
* <dl> * <dl>
* <dt>-h</dt> * <dt>-h</dt>
* <dd>Print help on command line options</dd> * <dd>Print help on command line options</dd>
* <dt>-l</dt> * <dt>-l</dt>
* <dd>loop through bitstreams once</dd> * <dd>loop through bitstreams once</dd>
* <dt>-L</dt> * <dt>-L</dt>
* <dd>loop continuously through bitstreams</dd> * <dd>loop continuously through bitstreams</dd>
* <dt>-d</dt> * <dt>-d</dt>
* <dd>specify duration of process run</dd> * <dd>specify duration of process run</dd>
* <dt>-b</dt> * <dt>-b</dt>
* <dd>specify bitstream IDs</dd> * <dd>specify bitstream IDs</dd>
* <dt>-a [handle_id]</dt> * <dt>-a [handle_id]</dt>
* <dd>check anything by handle</dd> * <dd>check anything by handle</dd>
* <dt>-e</dt> * <dt>-e</dt>
* <dd>Report only errors in the logs</dd> * <dd>Report only errors in the logs</dd>
* <dt>-p</dt> * <dt>-p</dt>
* <dd>Don't prune results before running checker</dd> * <dd>Don't prune results before running checker</dd>
* </dl> * </dl>
*
* @param args the command line arguments given * @param args the command line arguments given
* @throws SQLException if error * @throws SQLException if error
*/ */
@@ -84,7 +95,7 @@ public final class ChecksumChecker
options.addOption("l", "looping", false, "Loop once through bitstreams"); options.addOption("l", "looping", false, "Loop once through bitstreams");
options.addOption("L", "continuous", false, options.addOption("L", "continuous", false,
"Loop continuously through bitstreams"); "Loop continuously through bitstreams");
options.addOption("h", "help", false, "Help"); options.addOption("h", "help", false, "Help");
options.addOption("d", "duration", true, "Checking duration"); options.addOption("d", "duration", true, "Checking duration");
options.addOption("c", "count", true, "Check count"); options.addOption("c", "count", true, "Check count");
@@ -99,25 +110,21 @@ public final class ChecksumChecker
options.addOption("p", "prune", false, "Prune configuration file"); options.addOption("p", "prune", false, "Prune configuration file");
options.addOption(OptionBuilder options.addOption(OptionBuilder
.withArgName("prune") .withArgName("prune")
.hasOptionalArgs(1) .hasOptionalArgs(1)
.withDescription( .withDescription(
"Prune old results (optionally using specified properties file for configuration)") "Prune old results (optionally using specified properties file for configuration)")
.create('p')); .create('p'));
try try {
{
line = parser.parse(options, args); line = parser.parse(options, args);
} } catch (ParseException e) {
catch (ParseException e)
{
LOG.fatal(e); LOG.fatal(e);
System.exit(1); System.exit(1);
} }
// user asks for help // user asks for help
if (line.hasOption('h')) if (line.hasOption('h')) {
{
printHelp(options); printHelp(options);
} }
Context context = null; Context context = null;
@@ -126,23 +133,19 @@ public final class ChecksumChecker
// Prune stage // Prune stage
if (line.hasOption('p')) if (line.hasOption('p')) {
{
ResultsPruner rp = null; ResultsPruner rp = null;
try try {
{
rp = (line.getOptionValue('p') != null) ? ResultsPruner rp = (line.getOptionValue('p') != null) ? ResultsPruner
.getPruner(context, line.getOptionValue('p')) : ResultsPruner .getPruner(context, line.getOptionValue('p')) : ResultsPruner
.getDefaultPruner(context); .getDefaultPruner(context);
} } catch (FileNotFoundException e) {
catch (FileNotFoundException e)
{
LOG.error("File not found", e); LOG.error("File not found", e);
System.exit(1); System.exit(1);
} }
int count = rp.prune(); int count = rp.prune();
System.out.println("Pruned " + count System.out.println("Pruned " + count
+ " old results from the database."); + " old results from the database.");
} }
Date processStart = Calendar.getInstance().getTime(); Date processStart = Calendar.getInstance().getTime();
@@ -151,77 +154,55 @@ public final class ChecksumChecker
// process should loop infinitely through // process should loop infinitely through
// most_recent_checksum table // most_recent_checksum table
if (line.hasOption('l')) if (line.hasOption('l')) {
{
dispatcher = new SimpleDispatcher(context, processStart, false); dispatcher = new SimpleDispatcher(context, processStart, false);
} } else if (line.hasOption('L')) {
else if (line.hasOption('L'))
{
dispatcher = new SimpleDispatcher(context, processStart, true); dispatcher = new SimpleDispatcher(context, processStart, true);
} } else if (line.hasOption('b')) {
else if (line.hasOption('b'))
{
// check only specified bitstream(s) // check only specified bitstream(s)
String[] ids = line.getOptionValues('b'); String[] ids = line.getOptionValues('b');
List<Bitstream> bitstreams = new ArrayList<>(ids.length); List<Bitstream> bitstreams = new ArrayList<>(ids.length);
for (int i = 0; i < ids.length; i++) for (int i = 0; i < ids.length; i++) {
{ try {
try
{
bitstreams.add(bitstreamService.find(context, UUID.fromString(ids[i]))); bitstreams.add(bitstreamService.find(context, UUID.fromString(ids[i])));
} } catch (NumberFormatException nfe) {
catch (NumberFormatException nfe)
{
System.err.println("The following argument: " + ids[i] System.err.println("The following argument: " + ids[i]
+ " is not an integer"); + " is not an integer");
System.exit(0); System.exit(0);
} }
} }
dispatcher = new IteratorDispatcher(bitstreams.iterator()); dispatcher = new IteratorDispatcher(bitstreams.iterator());
} } else if (line.hasOption('a')) {
else if (line.hasOption('a'))
{
dispatcher = new HandleDispatcher(context, line.getOptionValue('a')); dispatcher = new HandleDispatcher(context, line.getOptionValue('a'));
} } else if (line.hasOption('d')) {
else if (line.hasOption('d'))
{
// run checker process for specified duration // run checker process for specified duration
try try {
{
dispatcher = new LimitedDurationDispatcher( dispatcher = new LimitedDurationDispatcher(
new SimpleDispatcher(context, processStart, true), new Date( new SimpleDispatcher(context, processStart, true), new Date(
System.currentTimeMillis() System.currentTimeMillis()
+ Utils.parseDuration(line + Utils.parseDuration(line
.getOptionValue('d')))); .getOptionValue('d'))));
} } catch (Exception e) {
catch (Exception e)
{
LOG.fatal("Couldn't parse " + line.getOptionValue('d') LOG.fatal("Couldn't parse " + line.getOptionValue('d')
+ " as a duration: ", e); + " as a duration: ", e);
System.exit(0); System.exit(0);
} }
} } else if (line.hasOption('c')) {
else if (line.hasOption('c'))
{
int count = Integer.valueOf(line.getOptionValue('c')); int count = Integer.valueOf(line.getOptionValue('c'));
// run checker process for specified number of bitstreams // run checker process for specified number of bitstreams
dispatcher = new LimitedCountDispatcher(new SimpleDispatcher( dispatcher = new LimitedCountDispatcher(new SimpleDispatcher(
context, processStart, false), count); context, processStart, false), count);
} } else {
else
{
dispatcher = new LimitedCountDispatcher(new SimpleDispatcher( dispatcher = new LimitedCountDispatcher(new SimpleDispatcher(
context, processStart, false), 1); context, processStart, false), 1);
} }
ResultsLogger logger = new ResultsLogger(processStart); ResultsLogger logger = new ResultsLogger(processStart);
CheckerCommand checker = new CheckerCommand(context); CheckerCommand checker = new CheckerCommand(context);
// verbose reporting // verbose reporting
if (line.hasOption('v')) if (line.hasOption('v')) {
{
checker.setReportVerbose(true); checker.setReportVerbose(true);
} }
@@ -243,18 +224,17 @@ public final class ChecksumChecker
* *
* @param options that are available for the user * @param options that are available for the user
*/ */
private static void printHelp(Options options) private static void printHelp(Options options) {
{
HelpFormatter myhelp = new HelpFormatter(); HelpFormatter myhelp = new HelpFormatter();
myhelp.printHelp("Checksum Checker\n", options); myhelp.printHelp("Checksum Checker\n", options);
System.out.println("\nSpecify a duration for checker process, using s(seconds)," System.out.println("\nSpecify a duration for checker process, using s(seconds),"
+ "m(minutes), or h(hours): ChecksumChecker -d 30s" + "m(minutes), or h(hours): ChecksumChecker -d 30s"
+ " OR ChecksumChecker -d 30m" + " OR ChecksumChecker -d 30m"
+ " OR ChecksumChecker -d 2h"); + " OR ChecksumChecker -d 2h");
System.out.println("\nSpecify bitstream IDs: ChecksumChecker -b 13 15 17 20"); System.out.println("\nSpecify bitstream IDs: ChecksumChecker -b 13 15 17 20");
System.out.println("\nLoop once through all bitstreams: " System.out.println("\nLoop once through all bitstreams: "
+ "ChecksumChecker -l"); + "ChecksumChecker -l");
System.out.println("\nLoop continuously through all bitstreams: ChecksumChecker -L"); System.out.println("\nLoop continuously through all bitstreams: ChecksumChecker -L");
System.out.println("\nCheck a defined number of bitstreams: ChecksumChecker -c 10"); System.out.println("\nCheck a defined number of bitstreams: ChecksumChecker -c 10");
System.out.println("\nReport all processing (verbose)(default reports only errors): ChecksumChecker -v"); System.out.println("\nReport all processing (verbose)(default reports only errors): ChecksumChecker -v");

View File

@@ -7,12 +7,12 @@
*/ */
package org.dspace.app.configuration; package org.dspace.app.configuration;
import org.dspace.kernel.config.SpringLoader;
import org.dspace.services.ConfigurationService;
import java.io.File; import java.io.File;
import java.net.MalformedURLException; import java.net.MalformedURLException;
import org.dspace.kernel.config.SpringLoader;
import org.dspace.services.ConfigurationService;
/** /**
* @author Kevin Van de Velde (kevin at atmire dot com) * @author Kevin Van de Velde (kevin at atmire dot com)
*/ */
@@ -32,7 +32,7 @@ public class APISpringLoader implements SpringLoader {
try { try {
return new String[]{new File(filePath.toString()).toURI().toURL().toString() + XML_SUFFIX}; return new String[] {new File(filePath.toString()).toURI().toURL().toString() + XML_SUFFIX};
} catch (MalformedURLException e) { } catch (MalformedURLException e) {
return new String[0]; return new String[0];
} }

View File

@@ -7,7 +7,17 @@
*/ */
package org.dspace.app.harvest; package org.dspace.app.harvest;
import org.apache.commons.cli.*; import java.io.IOException;
import java.sql.SQLException;
import java.util.Iterator;
import java.util.List;
import java.util.UUID;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.CommandLineParser;
import org.apache.commons.cli.HelpFormatter;
import org.apache.commons.cli.Options;
import org.apache.commons.cli.PosixParser;
import org.dspace.authorize.AuthorizeException; import org.dspace.authorize.AuthorizeException;
import org.dspace.content.Collection; import org.dspace.content.Collection;
import org.dspace.content.DSpaceObject; import org.dspace.content.DSpaceObject;
@@ -27,27 +37,21 @@ import org.dspace.harvest.OAIHarvester;
import org.dspace.harvest.factory.HarvestServiceFactory; import org.dspace.harvest.factory.HarvestServiceFactory;
import org.dspace.harvest.service.HarvestedCollectionService; import org.dspace.harvest.service.HarvestedCollectionService;
import java.io.IOException;
import java.sql.SQLException;
import java.util.Iterator;
import java.util.List;
import java.util.UUID;
/** /**
* Test class for harvested collections. * Test class for harvested collections.
* *
* @author Alexey Maslov * @author Alexey Maslov
*/ */
public class Harvest public class Harvest {
{
private static Context context; private static Context context;
private static final HarvestedCollectionService harvestedCollectionService = HarvestServiceFactory.getInstance().getHarvestedCollectionService(); private static final HarvestedCollectionService harvestedCollectionService =
HarvestServiceFactory.getInstance().getHarvestedCollectionService();
private static final EPersonService ePersonService = EPersonServiceFactory.getInstance().getEPersonService(); private static final EPersonService ePersonService = EPersonServiceFactory.getInstance().getEPersonService();
private static final CollectionService collectionService = ContentServiceFactory.getInstance().getCollectionService(); private static final CollectionService collectionService =
ContentServiceFactory.getInstance().getCollectionService();
public static void main(String[] argv) throws Exception public static void main(String[] argv) throws Exception {
{
// create an options object and populate it // create an options object and populate it
CommandLineParser parser = new PosixParser(); CommandLineParser parser = new PosixParser();
@@ -64,17 +68,18 @@ public class Harvest
options.addOption("e", "eperson", true, options.addOption("e", "eperson", true,
"eperson"); "eperson");
options.addOption("c", "collection", true, options.addOption("c", "collection", true,
"harvesting collection (handle or id)"); "harvesting collection (handle or id)");
options.addOption("t", "type", true, options.addOption("t", "type", true,
"type of harvesting (0 for none)"); "type of harvesting (0 for none)");
options.addOption("a", "address", true, options.addOption("a", "address", true,
"address of the OAI-PMH server"); "address of the OAI-PMH server");
options.addOption("i", "oai_set_id", true, options.addOption("i", "oai_set_id", true,
"id of the PMH set representing the harvested collection"); "id of the PMH set representing the harvested collection");
options.addOption("m", "metadata_format", true, options.addOption("m", "metadata_format", true,
"the name of the desired metadata format for harvesting, resolved to namespace and crosswalk in dspace.cfg"); "the name of the desired metadata format for harvesting, resolved to namespace and " +
"crosswalk in dspace.cfg");
options.addOption("h", "help", false, "help"); options.addOption("h", "help", false, "help");
@@ -88,13 +93,16 @@ public class Harvest
String metadataKey = null; String metadataKey = null;
int harvestType = 0; int harvestType = 0;
if (line.hasOption('h')) if (line.hasOption('h')) {
{
HelpFormatter myhelp = new HelpFormatter(); HelpFormatter myhelp = new HelpFormatter();
myhelp.printHelp("Harvest\n", options); myhelp.printHelp("Harvest\n", options);
System.out.println("\nPING OAI server: Harvest -g -a oai_source -i oai_set_id"); System.out.println("\nPING OAI server: Harvest -g -a oai_source -i oai_set_id");
System.out.println("RUNONCE harvest with arbitrary options: Harvest -o -e eperson -c collection -t harvest_type -a oai_source -i oai_set_id -m metadata_format"); System.out.println(
System.out.println("SETUP a collection for harvesting: Harvest -s -c collection -t harvest_type -a oai_source -i oai_set_id -m metadata_format"); "RUNONCE harvest with arbitrary options: Harvest -o -e eperson -c collection -t harvest_type -a " +
"oai_source -i oai_set_id -m metadata_format");
System.out.println(
"SETUP a collection for harvesting: Harvest -s -c collection -t harvest_type -a oai_source -i " +
"oai_set_id -m metadata_format");
System.out.println("RUN harvest once: Harvest -r -e eperson -c collection"); System.out.println("RUN harvest once: Harvest -r -e eperson -c collection");
System.out.println("START harvest scheduler: Harvest -S"); System.out.println("START harvest scheduler: Harvest -S");
System.out.println("RESET all harvest status: Harvest -R"); System.out.println("RESET all harvest status: Harvest -R");
@@ -102,7 +110,6 @@ public class Harvest
System.out.println("PURGE all harvestable collections: Harvest -P -e eperson"); System.out.println("PURGE all harvestable collections: Harvest -P -e eperson");
System.exit(0); System.exit(0);
} }
@@ -160,61 +167,49 @@ public class Harvest
// Check our options // Check our options
if (command == null) if (command == null) {
{
System.out System.out
.println("Error - no parameters specified (run with -h flag for details)"); .println("Error - no parameters specified (run with -h flag for details)");
System.exit(1); System.exit(1);
} } else if ("run".equals(command)) {
// Run a single harvest cycle on a collection using saved settings. // Run a single harvest cycle on a collection using saved settings.
else if ("run".equals(command)) if (collection == null || eperson == null) {
{
if (collection == null || eperson == null)
{
System.out System.out
.println("Error - a target collection and eperson must be provided"); .println("Error - a target collection and eperson must be provided");
System.out.println(" (run with -h flag for details)"); System.out.println(" (run with -h flag for details)");
System.exit(1); System.exit(1);
} }
harvester.runHarvest(collection, eperson); harvester.runHarvest(collection, eperson);
} } else if ("start".equals(command)) {
// start the harvest loop // start the harvest loop
else if ("start".equals(command))
{
startHarvester(); startHarvester();
} } else if ("reset".equals(command)) {
// reset harvesting status // reset harvesting status
else if ("reset".equals(command))
{
resetHarvesting(); resetHarvesting();
} } else if ("purgeAll".equals(command)) {
// purge all collections that are set up for harvesting (obviously for testing purposes only) // purge all collections that are set up for harvesting (obviously for testing purposes only)
else if ("purgeAll".equals(command)) if (eperson == null) {
{
if (eperson == null)
{
System.out System.out
.println("Error - an eperson must be provided"); .println("Error - an eperson must be provided");
System.out.println(" (run with -h flag for details)"); System.out.println(" (run with -h flag for details)");
System.exit(1); System.exit(1);
} }
List<HarvestedCollection> harvestedCollections = harvestedCollectionService.findAll(context); List<HarvestedCollection> harvestedCollections = harvestedCollectionService.findAll(context);
for (HarvestedCollection harvestedCollection : harvestedCollections) for (HarvestedCollection harvestedCollection : harvestedCollections) {
{ System.out.println(
System.out.println("Purging the following collections (deleting items and resetting harvest status): " + harvestedCollection.getCollection().getID().toString()); "Purging the following collections (deleting items and resetting harvest status): " +
harvestedCollection
.getCollection().getID().toString());
harvester.purgeCollection(harvestedCollection.getCollection().getID().toString(), eperson); harvester.purgeCollection(harvestedCollection.getCollection().getID().toString(), eperson);
} }
context.complete(); context.complete();
} } else if ("purge".equals(command)) {
// Delete all items in a collection. Useful for testing fresh harvests. // Delete all items in a collection. Useful for testing fresh harvests.
else if ("purge".equals(command)) if (collection == null || eperson == null) {
{
if (collection == null || eperson == null)
{
System.out System.out
.println("Error - a target collection and eperson must be provided"); .println("Error - a target collection and eperson must be provided");
System.out.println(" (run with -h flag for details)"); System.out.println(" (run with -h flag for details)");
System.exit(1); System.exit(1);
} }
@@ -223,35 +218,28 @@ public class Harvest
context.complete(); context.complete();
//TODO: implement this... remove all items and remember to unset "last-harvested" settings //TODO: implement this... remove all items and remember to unset "last-harvested" settings
} } else if ("config".equals(command)) {
// Configure a collection with the three main settings // Configure a collection with the three main settings
else if ("config".equals(command)) if (collection == null) {
{
if (collection == null)
{
System.out.println("Error - a target collection must be provided"); System.out.println("Error - a target collection must be provided");
System.out.println(" (run with -h flag for details)"); System.out.println(" (run with -h flag for details)");
System.exit(1); System.exit(1);
} }
if (oaiSource == null || oaiSetID == null) if (oaiSource == null || oaiSetID == null) {
{
System.out.println("Error - both the OAI server address and OAI set id must be specified"); System.out.println("Error - both the OAI server address and OAI set id must be specified");
System.out.println(" (run with -h flag for details)"); System.out.println(" (run with -h flag for details)");
System.exit(1); System.exit(1);
} }
if (metadataKey == null) if (metadataKey == null) {
{ System.out
System.out.println("Error - a metadata key (commonly the prefix) must be specified for this collection"); .println("Error - a metadata key (commonly the prefix) must be specified for this collection");
System.out.println(" (run with -h flag for details)"); System.out.println(" (run with -h flag for details)");
System.exit(1); System.exit(1);
} }
harvester.configureCollection(collection, harvestType, oaiSource, oaiSetID, metadataKey); harvester.configureCollection(collection, harvestType, oaiSource, oaiSetID, metadataKey);
} } else if ("ping".equals(command)) {
else if ("ping".equals(command)) if (oaiSource == null || oaiSetID == null) {
{
if (oaiSource == null || oaiSetID == null)
{
System.out.println("Error - both the OAI server address and OAI set id must be specified"); System.out.println("Error - both the OAI server address and OAI set id must be specified");
System.out.println(" (run with -h flag for details)"); System.out.println(" (run with -h flag for details)");
System.exit(1); System.exit(1);
@@ -272,39 +260,29 @@ public class Harvest
try { try {
// is the ID a handle? // is the ID a handle?
if (collectionID != null) if (collectionID != null) {
{ if (collectionID.indexOf('/') != -1) {
if (collectionID.indexOf('/') != -1)
{
// string has a / so it must be a handle - try and resolve it // string has a / so it must be a handle - try and resolve it
dso = HandleServiceFactory.getInstance().getHandleService().resolveToObject(context, collectionID); dso = HandleServiceFactory.getInstance().getHandleService().resolveToObject(context, collectionID);
// resolved, now make sure it's a collection // resolved, now make sure it's a collection
if (dso == null || dso.getType() != Constants.COLLECTION) if (dso == null || dso.getType() != Constants.COLLECTION) {
{
targetCollection = null; targetCollection = null;
} } else {
else
{
targetCollection = (Collection) dso; targetCollection = (Collection) dso;
} }
} } else {
// not a handle, try and treat it as an integer collection // not a handle, try and treat it as an collection database UUID
// database ID System.out.println("Looking up by UUID: " + collectionID + ", " + "in context: " + context);
else
{
System.out.println("Looking up by id: " + collectionID + ", parsed as '" + Integer.parseInt(collectionID) + "', " + "in context: " + context);
targetCollection = collectionService.find(context, UUID.fromString(collectionID)); targetCollection = collectionService.find(context, UUID.fromString(collectionID));
} }
} }
// was the collection valid? // was the collection valid?
if (targetCollection == null) if (targetCollection == null) {
{
System.out.println("Cannot resolve " + collectionID + " to collection"); System.out.println("Cannot resolve " + collectionID + " to collection");
System.exit(1); System.exit(1);
} }
} } catch (SQLException se) {
catch (SQLException se) {
se.printStackTrace(); se.printStackTrace();
} }
@@ -312,7 +290,8 @@ public class Harvest
} }
private void configureCollection(String collectionID, int type, String oaiSource, String oaiSetId, String mdConfigId) { private void configureCollection(String collectionID, int type, String oaiSource, String oaiSetId,
String mdConfigId) {
System.out.println("Running: configure collection"); System.out.println("Running: configure collection");
Collection collection = resolveCollection(collectionID); Collection collection = resolveCollection(collectionID);
@@ -330,15 +309,12 @@ public class Harvest
harvestedCollectionService.update(context, hc); harvestedCollectionService.update(context, hc);
context.restoreAuthSystemState(); context.restoreAuthSystemState();
context.complete(); context.complete();
} } catch (Exception e) {
catch (Exception e) {
System.out.println("Changes could not be committed"); System.out.println("Changes could not be committed");
e.printStackTrace(); e.printStackTrace();
System.exit(1); System.exit(1);
} } finally {
finally { if (context != null) {
if (context != null)
{
context.restoreAuthSystemState(); context.restoreAuthSystemState();
} }
} }
@@ -352,33 +328,33 @@ public class Harvest
* @param email * @param email
*/ */
private void purgeCollection(String collectionID, String email) { private void purgeCollection(String collectionID, String email) {
System.out.println("Purging collection of all items and resetting last_harvested and harvest_message: " + collectionID); System.out.println(
"Purging collection of all items and resetting last_harvested and harvest_message: " + collectionID);
Collection collection = resolveCollection(collectionID); Collection collection = resolveCollection(collectionID);
try try {
{
EPerson eperson = ePersonService.findByEmail(context, email); EPerson eperson = ePersonService.findByEmail(context, email);
context.setCurrentUser(eperson); context.setCurrentUser(eperson);
context.turnOffAuthorisationSystem(); context.turnOffAuthorisationSystem();
ItemService itemService = ContentServiceFactory.getInstance().getItemService(); ItemService itemService = ContentServiceFactory.getInstance().getItemService();
Iterator<Item> it = itemService.findByCollection(context, collection); Iterator<Item> it = itemService.findByCollection(context, collection);
int i=0; int i = 0;
while (it.hasNext()) { while (it.hasNext()) {
i++; i++;
Item item = it.next(); Item item = it.next();
System.out.println("Deleting: " + item.getHandle()); System.out.println("Deleting: " + item.getHandle());
collectionService.removeItem(context, collection, item); collectionService.removeItem(context, collection, item);
context.uncacheEntity(item);// Dispatch events every 50 items context.uncacheEntity(item);// Dispatch events every 50 items
if (i%50 == 0) { if (i % 50 == 0) {
context.dispatchEvents(); context.dispatchEvents();
i=0; i = 0;
} }
} }
HarvestedCollection hc = harvestedCollectionService.find(context, collection); HarvestedCollection hc = harvestedCollectionService.find(context, collection);
if (hc != null) { if (hc != null) {
hc.setLastHarvested(null); hc.setLastHarvested(null);
hc.setHarvestMessage(""); hc.setHarvestMessage("");
hc.setHarvestStatus(HarvestedCollection.STATUS_READY); hc.setHarvestStatus(HarvestedCollection.STATUS_READY);
hc.setHarvestStartTime(null); hc.setHarvestStartTime(null);
@@ -386,13 +362,11 @@ public class Harvest
} }
context.restoreAuthSystemState(); context.restoreAuthSystemState();
context.dispatchEvents(); context.dispatchEvents();
} } catch (Exception e) {
catch (Exception e) {
System.out.println("Changes could not be committed"); System.out.println("Changes could not be committed");
e.printStackTrace(); e.printStackTrace();
System.exit(1); System.exit(1);
} } finally {
finally {
context.restoreAuthSystemState(); context.restoreAuthSystemState();
} }
} }
@@ -411,8 +385,7 @@ public class Harvest
HarvestedCollection hc = harvestedCollectionService.find(context, collection); HarvestedCollection hc = harvestedCollectionService.find(context, collection);
harvester = new OAIHarvester(context, collection, hc); harvester = new OAIHarvester(context, collection, hc);
System.out.println("success. "); System.out.println("success. ");
} } catch (HarvestingException hex) {
catch (HarvestingException hex) {
System.out.print("failed. "); System.out.print("failed. ");
System.out.println(hex.getMessage()); System.out.println(hex.getMessage());
throw new IllegalStateException("Unable to harvest", hex); throw new IllegalStateException("Unable to harvest", hex);
@@ -429,14 +402,11 @@ public class Harvest
context.setCurrentUser(eperson); context.setCurrentUser(eperson);
harvester.runHarvest(); harvester.runHarvest();
context.complete(); context.complete();
} } catch (SQLException e) {
catch (SQLException e) {
throw new IllegalStateException("Failed to run harvester", e); throw new IllegalStateException("Failed to run harvester", e);
} } catch (AuthorizeException e) {
catch (AuthorizeException e) {
throw new IllegalStateException("Failed to run harvester", e); throw new IllegalStateException("Failed to run harvester", e);
} } catch (IOException e) {
catch (IOException e) {
throw new IllegalStateException("Failed to run harvester", e); throw new IllegalStateException("Failed to run harvester", e);
} }
@@ -444,24 +414,22 @@ public class Harvest
} }
/** /**
* Resets harvest_status and harvest_start_time flags for all collections that have a row in the harvested_collections table * Resets harvest_status and harvest_start_time flags for all collections that have a row in the
* harvested_collections table
*/ */
private static void resetHarvesting() { private static void resetHarvesting() {
System.out.print("Resetting harvest status flag on all collections... "); System.out.print("Resetting harvest status flag on all collections... ");
try try {
{
List<HarvestedCollection> harvestedCollections = harvestedCollectionService.findAll(context); List<HarvestedCollection> harvestedCollections = harvestedCollectionService.findAll(context);
for (HarvestedCollection harvestedCollection : harvestedCollections) for (HarvestedCollection harvestedCollection : harvestedCollections) {
{
//hc.setHarvestResult(null,""); //hc.setHarvestResult(null,"");
harvestedCollection.setHarvestStartTime(null); harvestedCollection.setHarvestStartTime(null);
harvestedCollection.setHarvestStatus(HarvestedCollection.STATUS_READY); harvestedCollection.setHarvestStatus(HarvestedCollection.STATUS_READY);
harvestedCollectionService.update(context, harvestedCollection); harvestedCollectionService.update(context, harvestedCollection);
} }
System.out.println("success. "); System.out.println("success. ");
} } catch (Exception ex) {
catch (Exception ex) {
System.out.println("failed. "); System.out.println("failed. ");
ex.printStackTrace(); ex.printStackTrace();
} }
@@ -470,15 +438,12 @@ public class Harvest
/** /**
* Starts up the harvest scheduler. Terminating this process will stop the scheduler. * Starts up the harvest scheduler. Terminating this process will stop the scheduler.
*/ */
private static void startHarvester() private static void startHarvester() {
{ try {
try
{
System.out.print("Starting harvest loop... "); System.out.print("Starting harvest loop... ");
HarvestServiceFactory.getInstance().getHarvestSchedulingService().startNewScheduler(); HarvestServiceFactory.getInstance().getHarvestSchedulingService().startNewScheduler();
System.out.println("running. "); System.out.println("running. ");
} } catch (Exception ex) {
catch (Exception ex) {
ex.printStackTrace(); ex.printStackTrace();
} }
} }
@@ -486,34 +451,33 @@ public class Harvest
/** /**
* See if the responder is alive and working. * See if the responder is alive and working.
* *
* @param server address of the responder's host. * @param server address of the responder's host.
* @param set name of an item set. * @param set name of an item set.
* @param metadataFormat local prefix name, or null for "dc". * @param metadataFormat local prefix name, or null for "dc".
*/ */
private static void pingResponder(String server, String set, String metadataFormat) private static void pingResponder(String server, String set, String metadataFormat) {
{
List<String> errors; List<String> errors;
System.out.print("Testing basic PMH access: "); System.out.print("Testing basic PMH access: ");
errors = OAIHarvester.verifyOAIharvester(server, set, errors = harvestedCollectionService.verifyOAIharvester(server, set,
(null != metadataFormat) ? metadataFormat : "dc", false); (null != metadataFormat) ? metadataFormat : "dc", false);
if (errors.isEmpty()) if (errors.isEmpty()) {
System.out.println("OK"); System.out.println("OK");
else } else {
{ for (String error : errors) {
for (String error : errors)
System.err.println(error); System.err.println(error);
}
} }
System.out.print("Testing ORE support: "); System.out.print("Testing ORE support: ");
errors = OAIHarvester.verifyOAIharvester(server, set, errors = harvestedCollectionService.verifyOAIharvester(server, set,
(null != metadataFormat) ? metadataFormat : "dc", true); (null != metadataFormat) ? metadataFormat : "dc", true);
if (errors.isEmpty()) if (errors.isEmpty()) {
System.out.println("OK"); System.out.println("OK");
else } else {
{ for (String error : errors) {
for (String error : errors)
System.err.println(error); System.err.println(error);
}
} }
} }
} }

View File

@@ -7,7 +7,17 @@
*/ */
package org.dspace.app.itemexport; package org.dspace.app.itemexport;
import org.apache.commons.cli.*; import java.util.ArrayList;
import java.util.Collections;
import java.util.Iterator;
import java.util.List;
import java.util.UUID;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.CommandLineParser;
import org.apache.commons.cli.HelpFormatter;
import org.apache.commons.cli.Options;
import org.apache.commons.cli.PosixParser;
import org.dspace.app.itemexport.factory.ItemExportServiceFactory; import org.dspace.app.itemexport.factory.ItemExportServiceFactory;
import org.dspace.app.itemexport.service.ItemExportService; import org.dspace.app.itemexport.service.ItemExportService;
import org.dspace.content.Collection; import org.dspace.content.Collection;
@@ -20,8 +30,6 @@ import org.dspace.core.Context;
import org.dspace.handle.factory.HandleServiceFactory; import org.dspace.handle.factory.HandleServiceFactory;
import org.dspace.handle.service.HandleService; import org.dspace.handle.service.HandleService;
import java.util.*;
/** /**
* Item exporter to create simple AIPs for DSpace content. Currently exports * Item exporter to create simple AIPs for DSpace content. Currently exports
* individual items, or entire collections. For instructions on use, see * individual items, or entire collections. For instructions on use, see
@@ -45,17 +53,21 @@ import java.util.*;
*/ */
public class ItemExportCLITool { public class ItemExportCLITool {
protected static ItemExportService itemExportService = ItemExportServiceFactory.getInstance().getItemExportService(); protected static ItemExportService itemExportService = ItemExportServiceFactory.getInstance()
.getItemExportService();
protected static HandleService handleService = HandleServiceFactory.getInstance().getHandleService(); protected static HandleService handleService = HandleServiceFactory.getInstance().getHandleService();
protected static ItemService itemService = ContentServiceFactory.getInstance().getItemService(); protected static ItemService itemService = ContentServiceFactory.getInstance().getItemService();
protected static CollectionService collectionService = ContentServiceFactory.getInstance().getCollectionService(); protected static CollectionService collectionService = ContentServiceFactory.getInstance().getCollectionService();
/**
* Default constructor
*/
private ItemExportCLITool() { }
/* /*
* *
*/ */
public static void main(String[] argv) throws Exception public static void main(String[] argv) throws Exception {
{
// create an options object and populate it // create an options object and populate it
CommandLineParser parser = new PosixParser(); CommandLineParser parser = new PosixParser();
@@ -64,10 +76,11 @@ public class ItemExportCLITool {
options.addOption("t", "type", true, "type: COLLECTION or ITEM"); options.addOption("t", "type", true, "type: COLLECTION or ITEM");
options.addOption("i", "id", true, "ID or handle of thing to export"); options.addOption("i", "id", true, "ID or handle of thing to export");
options.addOption("d", "dest", true, options.addOption("d", "dest", true,
"destination where you want items to go"); "destination where you want items to go");
options.addOption("m", "migrate", false, "export for migration (remove handle and metadata that will be re-created in new system)"); options.addOption("m", "migrate", false,
"export for migration (remove handle and metadata that will be re-created in new system)");
options.addOption("n", "number", true, options.addOption("n", "number", true,
"sequence number to begin exporting items with"); "sequence number to begin exporting items with");
options.addOption("z", "zip", true, "export as zip file (specify filename e.g. export.zip)"); options.addOption("z", "zip", true, "export as zip file (specify filename e.g. export.zip)");
options.addOption("h", "help", false, "help"); options.addOption("h", "help", false, "help");
@@ -86,175 +99,140 @@ public class ItemExportCLITool {
Item myItem = null; Item myItem = null;
Collection mycollection = null; Collection mycollection = null;
if (line.hasOption('h')) if (line.hasOption('h')) {
{
HelpFormatter myhelp = new HelpFormatter(); HelpFormatter myhelp = new HelpFormatter();
myhelp.printHelp("ItemExport\n", options); myhelp.printHelp("ItemExport\n", options);
System.out System.out
.println("\nfull collection: ItemExport -t COLLECTION -i ID -d dest -n number"); .println("\nfull collection: ItemExport -t COLLECTION -i ID -d dest -n number");
System.out System.out
.println("singleitem: ItemExport -t ITEM -i ID -d dest -n number"); .println("singleitem: ItemExport -t ITEM -i ID -d dest -n number");
System.exit(0); System.exit(0);
} }
if (line.hasOption('t')) // type if (line.hasOption('t')) { // type
{
typeString = line.getOptionValue('t'); typeString = line.getOptionValue('t');
if ("ITEM".equals(typeString)) if ("ITEM".equals(typeString)) {
{
myType = Constants.ITEM; myType = Constants.ITEM;
} } else if ("COLLECTION".equals(typeString)) {
else if ("COLLECTION".equals(typeString))
{
myType = Constants.COLLECTION; myType = Constants.COLLECTION;
} }
} }
if (line.hasOption('i')) // id if (line.hasOption('i')) { // id
{
myIDString = line.getOptionValue('i'); myIDString = line.getOptionValue('i');
} }
if (line.hasOption('d')) // dest if (line.hasOption('d')) { // dest
{
destDirName = line.getOptionValue('d'); destDirName = line.getOptionValue('d');
} }
if (line.hasOption('n')) // number if (line.hasOption('n')) { // number
{
seqStart = Integer.parseInt(line.getOptionValue('n')); seqStart = Integer.parseInt(line.getOptionValue('n'));
} }
boolean migrate = false; boolean migrate = false;
if (line.hasOption('m')) // number if (line.hasOption('m')) { // number
{
migrate = true; migrate = true;
} }
boolean zip = false; boolean zip = false;
String zipFileName = ""; String zipFileName = "";
if (line.hasOption('z')) if (line.hasOption('z')) {
{
zip = true; zip = true;
zipFileName = line.getOptionValue('z'); zipFileName = line.getOptionValue('z');
} }
boolean excludeBitstreams = false; boolean excludeBitstreams = false;
if (line.hasOption('x')) if (line.hasOption('x')) {
{ excludeBitstreams = true;
excludeBitstreams = true;
} }
// now validate the args // now validate the args
if (myType == -1) if (myType == -1) {
{
System.out System.out
.println("type must be either COLLECTION or ITEM (-h for help)"); .println("type must be either COLLECTION or ITEM (-h for help)");
System.exit(1); System.exit(1);
} }
if (destDirName == null) if (destDirName == null) {
{
System.out System.out
.println("destination directory must be set (-h for help)"); .println("destination directory must be set (-h for help)");
System.exit(1); System.exit(1);
} }
if (seqStart == -1) if (seqStart == -1) {
{
System.out System.out
.println("sequence start number must be set (-h for help)"); .println("sequence start number must be set (-h for help)");
System.exit(1); System.exit(1);
} }
if (myIDString == null) if (myIDString == null) {
{
System.out System.out
.println("ID must be set to either a database ID or a handle (-h for help)"); .println("ID must be set to either a database ID or a handle (-h for help)");
System.exit(1); System.exit(1);
} }
Context c = new Context(Context.Mode.READ_ONLY); Context c = new Context(Context.Mode.READ_ONLY);
c.turnOffAuthorisationSystem(); c.turnOffAuthorisationSystem();
if (myType == Constants.ITEM) if (myType == Constants.ITEM) {
{
// first, is myIDString a handle? // first, is myIDString a handle?
if (myIDString.indexOf('/') != -1) if (myIDString.indexOf('/') != -1) {
{
myItem = (Item) handleService.resolveToObject(c, myIDString); myItem = (Item) handleService.resolveToObject(c, myIDString);
if ((myItem == null) || (myItem.getType() != Constants.ITEM)) if ((myItem == null) || (myItem.getType() != Constants.ITEM)) {
{
myItem = null; myItem = null;
} }
} } else {
else
{
myItem = itemService.find(c, UUID.fromString(myIDString)); myItem = itemService.find(c, UUID.fromString(myIDString));
} }
if (myItem == null) if (myItem == null) {
{
System.out System.out
.println("Error, item cannot be found: " + myIDString); .println("Error, item cannot be found: " + myIDString);
} }
} } else {
else if (myIDString.indexOf('/') != -1) {
{
if (myIDString.indexOf('/') != -1)
{
// has a / must be a handle // has a / must be a handle
mycollection = (Collection) handleService.resolveToObject(c, mycollection = (Collection) handleService.resolveToObject(c,
myIDString); myIDString);
// ensure it's a collection // ensure it's a collection
if ((mycollection == null) if ((mycollection == null)
|| (mycollection.getType() != Constants.COLLECTION)) || (mycollection.getType() != Constants.COLLECTION)) {
{
mycollection = null; mycollection = null;
} }
} } else if (myIDString != null) {
else if (myIDString != null)
{
mycollection = collectionService.find(c, UUID.fromString(myIDString)); mycollection = collectionService.find(c, UUID.fromString(myIDString));
} }
if (mycollection == null) if (mycollection == null) {
{
System.out.println("Error, collection cannot be found: " System.out.println("Error, collection cannot be found: "
+ myIDString); + myIDString);
System.exit(1); System.exit(1);
} }
} }
if (zip) if (zip) {
{
Iterator<Item> items; Iterator<Item> items;
if (myItem != null) if (myItem != null) {
{
List<Item> myItems = new ArrayList<>(); List<Item> myItems = new ArrayList<>();
myItems.add(myItem); myItems.add(myItem);
items = myItems.iterator(); items = myItems.iterator();
} } else {
else
{
System.out.println("Exporting from collection: " + myIDString); System.out.println("Exporting from collection: " + myIDString);
items = itemService.findByCollection(c, mycollection); items = itemService.findByCollection(c, mycollection);
} }
itemExportService.exportAsZip(c, items, destDirName, zipFileName, seqStart, migrate, excludeBitstreams); itemExportService.exportAsZip(c, items, destDirName, zipFileName, seqStart, migrate, excludeBitstreams);
} } else {
else if (myItem != null) {
{
if (myItem != null)
{
// it's only a single item // it's only a single item
itemExportService.exportItem(c, Collections.singletonList(myItem).iterator(), destDirName, seqStart, migrate, excludeBitstreams); itemExportService
} .exportItem(c, Collections.singletonList(myItem).iterator(), destDirName, seqStart, migrate,
else excludeBitstreams);
{ } else {
System.out.println("Exporting from collection: " + myIDString); System.out.println("Exporting from collection: " + myIDString);
// it's a collection, so do a bunch of items // it's a collection, so do a bunch of items

View File

@@ -10,20 +10,17 @@ package org.dspace.app.itemexport;
/** /**
* An exception that can be thrown when error occur during item export * An exception that can be thrown when error occur during item export
*/ */
public class ItemExportException extends Exception public class ItemExportException extends Exception {
{
public static final int EXPORT_TOO_LARGE = 0; public static final int EXPORT_TOO_LARGE = 0;
private int reason; private int reason;
public ItemExportException(int r, String message) public ItemExportException(int r, String message) {
{
super(message); super(message);
reason = r; reason = r;
} }
public int getReason() public int getReason() {
{
return reason; return reason;
} }
} }

View File

@@ -11,7 +11,8 @@ import org.dspace.app.itemexport.service.ItemExportService;
import org.dspace.services.factory.DSpaceServicesFactory; import org.dspace.services.factory.DSpaceServicesFactory;
/** /**
* Abstract factory to get services for the itemexport package, use ItemExportServiceFactory.getInstance() to retrieve an implementation * Abstract factory to get services for the itemexport package, use ItemExportServiceFactory.getInstance() to
* retrieve an implementation
* *
* @author kevinvandevelde at atmire.com * @author kevinvandevelde at atmire.com
*/ */
@@ -19,7 +20,8 @@ public abstract class ItemExportServiceFactory {
public abstract ItemExportService getItemExportService(); public abstract ItemExportService getItemExportService();
public static ItemExportServiceFactory getInstance(){ public static ItemExportServiceFactory getInstance() {
return DSpaceServicesFactory.getInstance().getServiceManager().getServiceByName("itemExportServiceFactory", ItemExportServiceFactory.class); return DSpaceServicesFactory.getInstance().getServiceManager()
.getServiceByName("itemExportServiceFactory", ItemExportServiceFactory.class);
} }
} }

View File

@@ -11,7 +11,8 @@ import org.dspace.app.itemexport.service.ItemExportService;
import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Autowired;
/** /**
* Factory implementation to get services for the itemexport package, use ItemExportServiceFactory.getInstance() to retrieve an implementation * Factory implementation to get services for the itemexport package, use ItemExportServiceFactory.getInstance() to
* retrieve an implementation
* *
* @author kevinvandevelde at atmire.com * @author kevinvandevelde at atmire.com
*/ */

View File

@@ -7,16 +7,16 @@
*/ */
package org.dspace.app.itemexport.service; package org.dspace.app.itemexport.service;
import org.dspace.content.DSpaceObject;
import org.dspace.content.Item;
import org.dspace.core.Context;
import org.dspace.eperson.EPerson;
import javax.mail.MessagingException;
import java.io.InputStream; import java.io.InputStream;
import java.util.Date; import java.util.Date;
import java.util.Iterator; import java.util.Iterator;
import java.util.List; import java.util.List;
import javax.mail.MessagingException;
import org.dspace.content.DSpaceObject;
import org.dspace.content.Item;
import org.dspace.core.Context;
import org.dspace.eperson.EPerson;
/** /**
* Item exporter to create simple AIPs for DSpace content. Currently exports * Item exporter to create simple AIPs for DSpace content. Currently exports
@@ -47,122 +47,109 @@ public interface ItemExportService {
public static final String COMPRESSED_EXPORT_MIME_TYPE = "application/zip"; public static final String COMPRESSED_EXPORT_MIME_TYPE = "application/zip";
public void exportItem(Context c, Iterator<Item> i, public void exportItem(Context c, Iterator<Item> i,
String destDirName, int seqStart, boolean migrate, String destDirName, int seqStart, boolean migrate,
boolean excludeBitstreams) throws Exception; boolean excludeBitstreams) throws Exception;
/** /**
* Method to perform an export and save it as a zip file. * Method to perform an export and save it as a zip file.
* *
* @param context The DSpace Context * @param context The DSpace Context
* @param items The items to export * @param items The items to export
* @param destDirName The directory to save the export in * @param destDirName The directory to save the export in
* @param zipFileName The name to save the zip file as * @param zipFileName The name to save the zip file as
* @param seqStart The first number in the sequence * @param seqStart The first number in the sequence
* @param migrate Whether to use the migrate option or not * @param migrate Whether to use the migrate option or not
* @param excludeBitstreams Whether to exclude bitstreams or not * @param excludeBitstreams Whether to exclude bitstreams or not
* @throws Exception if error * @throws Exception if error
*/ */
public void exportAsZip(Context context, Iterator<Item> items, public void exportAsZip(Context context, Iterator<Item> items,
String destDirName, String zipFileName, String destDirName, String zipFileName,
int seqStart, boolean migrate, int seqStart, boolean migrate,
boolean excludeBitstreams) throws Exception; boolean excludeBitstreams) throws Exception;
/** /**
* Convenience methot to create export a single Community, Collection, or * Convenience methot to create export a single Community, Collection, or
* Item * Item
* *
* @param dso * @param dso - the dspace object to export
* - the dspace object to export * @param context - the dspace context
* @param context
* - the dspace context
* @param migrate Whether to use the migrate option or not * @param migrate Whether to use the migrate option or not
* @throws Exception if error * @throws Exception if error
*/ */
public void createDownloadableExport(DSpaceObject dso, public void createDownloadableExport(DSpaceObject dso,
Context context, boolean migrate) throws Exception; Context context, boolean migrate) throws Exception;
/** /**
* Convenience method to export a List of dspace objects (Community, * Convenience method to export a List of dspace objects (Community,
* Collection or Item) * Collection or Item)
* *
* @param dsObjects * @param dsObjects - List containing dspace objects
* - List containing dspace objects * @param context - the dspace context
* @param context * @param migrate Whether to use the migrate option or not
* - the dspace context
* @param migrate Whether to use the migrate option or not
* @throws Exception if error * @throws Exception if error
*/ */
public void createDownloadableExport(List<DSpaceObject> dsObjects, public void createDownloadableExport(List<DSpaceObject> dsObjects,
Context context, boolean migrate) throws Exception; Context context, boolean migrate) throws Exception;
/** /**
* Convenience methot to create export a single Community, Collection, or * Convenience methot to create export a single Community, Collection, or
* Item * Item
* *
* @param dso * @param dso - the dspace object to export
* - the dspace object to export * @param context - the dspace context
* @param context * @param additionalEmail - cc email to use
* - the dspace context * @param migrate Whether to use the migrate option or not
* @param additionalEmail
* - cc email to use
* @param migrate Whether to use the migrate option or not
* @throws Exception if error * @throws Exception if error
*/ */
public void createDownloadableExport(DSpaceObject dso, public void createDownloadableExport(DSpaceObject dso,
Context context, String additionalEmail, boolean migrate) throws Exception; Context context, String additionalEmail, boolean migrate) throws Exception;
/** /**
* Convenience method to export a List of dspace objects (Community, * Convenience method to export a List of dspace objects (Community,
* Collection or Item) * Collection or Item)
* *
* @param dsObjects * @param dsObjects - List containing dspace objects
* - List containing dspace objects * @param context - the dspace context
* @param context * @param additionalEmail - cc email to use
* - the dspace context * @param migrate Whether to use the migrate option or not
* @param additionalEmail
* - cc email to use
* @param migrate Whether to use the migrate option or not
* @throws Exception if error * @throws Exception if error
*/ */
public void createDownloadableExport(List<DSpaceObject> dsObjects, public void createDownloadableExport(List<DSpaceObject> dsObjects,
Context context, String additionalEmail, boolean migrate) throws Exception; Context context, String additionalEmail, boolean migrate) throws Exception;
/** /**
* Create a file name based on the date and eperson * Create a file name based on the date and eperson
* *
* @param type Type of object (as string) * @param type Type of object (as string)
* @param eperson * @param eperson - eperson who requested export and will be able to download it
* - eperson who requested export and will be able to download it * @param date - the date the export process was created
* @param date
* - the date the export process was created
* @return String representing the file name in the form of * @return String representing the file name in the form of
* 'export_yyy_MMM_dd_count_epersonID' * 'export_yyy_MMM_dd_count_epersonID'
* @throws Exception if error * @throws Exception if error
*/ */
public String assembleFileName(String type, EPerson eperson, public String assembleFileName(String type, EPerson eperson,
Date date) throws Exception; Date date) throws Exception;
/** /**
* Use config file entry for org.dspace.app.itemexport.download.dir and id * Use config file entry for org.dspace.app.itemexport.download.dir and id
* of the eperson to create a download directory name * of the eperson to create a download directory name
* *
* @param ePerson * @param ePerson - the eperson who requested export archive
* - the eperson who requested export archive
* @return String representing a directory in the form of * @return String representing a directory in the form of
* org.dspace.app.itemexport.download.dir/epersonID * org.dspace.app.itemexport.download.dir/epersonID
* @throws Exception if error * @throws Exception if error
*/ */
public String getExportDownloadDirectory(EPerson ePerson) public String getExportDownloadDirectory(EPerson ePerson)
throws Exception; throws Exception;
/** /**
* Returns config file entry for org.dspace.app.itemexport.work.dir * Returns config file entry for org.dspace.app.itemexport.work.dir
* *
* @return String representing config file entry for * @return String representing config file entry for
* org.dspace.app.itemexport.work.dir * org.dspace.app.itemexport.work.dir
* @throws Exception if error * @throws Exception if error
*/ */
public String getExportWorkDirectory() throws Exception; public String getExportWorkDirectory() throws Exception;
@@ -170,49 +157,43 @@ public interface ItemExportService {
/** /**
* Used to read the export archived. Inteded for download. * Used to read the export archived. Inteded for download.
* *
* @param fileName * @param fileName the name of the file to download
* the name of the file to download * @param eperson the eperson requesting the download
* @param eperson
* the eperson requesting the download
* @return an input stream of the file to be downloaded * @return an input stream of the file to be downloaded
* @throws Exception if error * @throws Exception if error
*/ */
public InputStream getExportDownloadInputStream(String fileName, public InputStream getExportDownloadInputStream(String fileName,
EPerson eperson) throws Exception; EPerson eperson) throws Exception;
/** /**
* Get the file size of the export archive represented by the file name. * Get the file size of the export archive represented by the file name.
* *
* @param context DSpace context * @param context DSpace context
* @param fileName * @param fileName name of the file to get the size.
* name of the file to get the size.
* @throws Exception if error
* @return size as long * @return size as long
* @throws Exception if error
*/ */
public long getExportFileSize(Context context, String fileName) throws Exception; public long getExportFileSize(Context context, String fileName) throws Exception;
/** /**
* Get the last modified date of the export archive represented by the file name. * Get the last modified date of the export archive represented by the file name.
* *
* @param context DSpace context * @param context DSpace context
* @param fileName * @param fileName name of the file to get the size.
* name of the file to get the size.
* @return date as long * @return date as long
* @see java.io.File#lastModified()
* @throws Exception if error * @throws Exception if error
* @see java.io.File#lastModified()
*/ */
public long getExportFileLastModified(Context context, String fileName) public long getExportFileLastModified(Context context, String fileName)
throws Exception; throws Exception;
/** /**
* The file name of the export archive contains the eperson id of the person * The file name of the export archive contains the eperson id of the person
* who created it When requested for download this method can check if the * who created it When requested for download this method can check if the
* person requesting it is the same one that created it * person requesting it is the same one that created it
* *
* @param context * @param context dspace context
* dspace context * @param fileName the file name to check auths for
* @param fileName
* the file name to check auths for
* @return true if it is the same person false otherwise * @return true if it is the same person false otherwise
*/ */
public boolean canDownload(Context context, String fileName); public boolean canDownload(Context context, String fileName);
@@ -223,19 +204,18 @@ public interface ItemExportService {
* *
* @param eperson EPerson object * @param eperson EPerson object
* @return a list of file names representing export archives that have been * @return a list of file names representing export archives that have been
* processed * processed
* @throws Exception if error * @throws Exception if error
*/ */
public List<String> getExportsAvailable(EPerson eperson) public List<String> getExportsAvailable(EPerson eperson)
throws Exception; throws Exception;
/** /**
* A clean up method that is ran before a new export archive is created. It * A clean up method that is ran before a new export archive is created. It
* uses the config file entry 'org.dspace.app.itemexport.life.span.hours' to * uses the config file entry 'org.dspace.app.itemexport.life.span.hours' to
* determine if the current exports are too old and need pruging * determine if the current exports are too old and need pruging
* *
* @param eperson * @param eperson - the eperson to clean up
* - the eperson to clean up
* @throws Exception if error * @throws Exception if error
*/ */
public void deleteOldExportArchives(EPerson eperson) throws Exception; public void deleteOldExportArchives(EPerson eperson) throws Exception;
@@ -256,17 +236,14 @@ public interface ItemExportService {
* communication with email instead. Send a success email once the export * communication with email instead. Send a success email once the export
* archive is complete and ready for download * archive is complete and ready for download
* *
* @param context * @param context - the current Context
* - the current Context * @param eperson - eperson to send the email to
* @param eperson * @param fileName - the file name to be downloaded. It is added to the url in
* - eperson to send the email to * the email
* @param fileName
* - the file name to be downloaded. It is added to the url in
* the email
* @throws MessagingException if error * @throws MessagingException if error
*/ */
public void emailSuccessMessage(Context context, EPerson eperson, public void emailSuccessMessage(Context context, EPerson eperson,
String fileName) throws MessagingException; String fileName) throws MessagingException;
/** /**
* Since the archive is created in a new thread we are unable to communicate * Since the archive is created in a new thread we are unable to communicate
@@ -274,19 +251,18 @@ public interface ItemExportService {
* communication with email instead. Send an error email if the export * communication with email instead. Send an error email if the export
* archive fails * archive fails
* *
* @param eperson * @param eperson - EPerson to send the error message to
* - EPerson to send the error message to * @param error - the error message
* @param error
* - the error message
* @throws MessagingException if error * @throws MessagingException if error
*/ */
public void emailErrorMessage(EPerson eperson, String error) public void emailErrorMessage(EPerson eperson, String error)
throws MessagingException; throws MessagingException;
/** /**
* Zip source to target * Zip source to target
*
* @param strSource source file * @param strSource source file
* @param target target file * @param target target file
* @throws Exception if error * @throws Exception if error
*/ */
public void zip(String strSource, String target) throws Exception; public void zip(String strSource, String target) throws Exception;

View File

@@ -7,99 +7,100 @@
*/ */
package org.dspace.app.itemimport; package org.dspace.app.itemimport;
import gr.ekt.bte.core.DataLoader;
import gr.ekt.bte.core.TransformationEngine;
import gr.ekt.bte.dataloader.FileDataLoader;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.HashMap; import java.util.HashMap;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
import gr.ekt.bte.core.DataLoader;
import gr.ekt.bte.core.TransformationEngine;
import gr.ekt.bte.dataloader.FileDataLoader;
/** /**
* This class acts as a Service in the procedure to batch import using the Biblio-Transformation-Engine * This class acts as a Service in the procedure to batch import using the Biblio-Transformation-Engine
*/ */
public class BTEBatchImportService public class BTEBatchImportService {
{
TransformationEngine transformationEngine; TransformationEngine transformationEngine;
Map<String, DataLoader> dataLoaders = new HashMap<String, DataLoader>(); Map<String, DataLoader> dataLoaders = new HashMap<String, DataLoader>();
Map<String, String> outputMap = new HashMap<String,String>(); Map<String, String> outputMap = new HashMap<String, String>();
/** /**
* Default constructor * Default constructor
*/ */
public BTEBatchImportService() public BTEBatchImportService() {
{
super(); super();
} }
/** /**
* Setter method for dataLoaders parameter * Setter method for dataLoaders parameter
*
* @param dataLoaders map of data loaders * @param dataLoaders map of data loaders
*/ */
public void setDataLoaders(Map<String, DataLoader> dataLoaders) public void setDataLoaders(Map<String, DataLoader> dataLoaders) {
{
this.dataLoaders = dataLoaders; this.dataLoaders = dataLoaders;
} }
/** /**
* Get data loaders * Get data loaders
*
* @return the map of DataLoaders * @return the map of DataLoaders
*/ */
public Map<String, DataLoader> getDataLoaders() public Map<String, DataLoader> getDataLoaders() {
{
return dataLoaders; return dataLoaders;
} }
/** /**
* Get output map * Get output map
*
* @return the outputMapping * @return the outputMapping
*/ */
public Map<String, String> getOutputMap() { public Map<String, String> getOutputMap() {
return outputMap; return outputMap;
} }
/** /**
* Setter method for the outputMapping * Setter method for the outputMapping
* @param outputMap the output mapping *
*/ * @param outputMap the output mapping
public void setOutputMap(Map<String, String> outputMap) { */
this.outputMap = outputMap; public void setOutputMap(Map<String, String> outputMap) {
} this.outputMap = outputMap;
}
/** /**
* Get transformation engine * Get transformation engine
* @return transformation engine *
*/ * @return transformation engine
public TransformationEngine getTransformationEngine() { */
return transformationEngine; public TransformationEngine getTransformationEngine() {
} return transformationEngine;
}
/** /**
* set transformation engine * set transformation engine
* @param transformationEngine transformation engine *
*/ * @param transformationEngine transformation engine
public void setTransformationEngine(TransformationEngine transformationEngine) { */
this.transformationEngine = transformationEngine; public void setTransformationEngine(TransformationEngine transformationEngine) {
} this.transformationEngine = transformationEngine;
}
/** /**
* Getter of file data loaders * Getter of file data loaders
* @return List of file data loaders *
*/ * @return List of file data loaders
public List<String> getFileDataLoaders(){ */
List<String> result = new ArrayList<String>(); public List<String> getFileDataLoaders() {
List<String> result = new ArrayList<String>();
for (String key : dataLoaders.keySet()){ for (String key : dataLoaders.keySet()) {
DataLoader dl = dataLoaders.get(key); DataLoader dl = dataLoaders.get(key);
if (dl instanceof FileDataLoader){ if (dl instanceof FileDataLoader) {
result.add(key); result.add(key);
} }
} }
return result; return result;
} }
} }

View File

@@ -20,198 +20,210 @@ import java.util.List;
/** /**
* @author kstamatis * @author kstamatis
*
*/ */
public class BatchUpload { public class BatchUpload {
private Date date; private Date date;
private File dir; private File dir;
private boolean successful; private boolean successful;
private int itemsImported; private int itemsImported;
private int totalItems = 0; private int totalItems = 0;
private List<String> handlesImported = new ArrayList<String>(); private List<String> handlesImported = new ArrayList<String>();
private String errorMsg = ""; private String errorMsg = "";
private String errorMsgHTML = ""; private String errorMsgHTML = "";
/** /**
* Initialize with directory * Initialize with directory
* @param dirPath directory path *
*/ * @param dirPath directory path
public BatchUpload(String dirPath) { */
public BatchUpload(String dirPath) {
this.initializeWithFile(new File(dirPath)); this.initializeWithFile(new File(dirPath));
} }
/** /**
* Initialize with directory * Initialize with directory
* @param dir directory path *
*/ * @param dir directory path
public BatchUpload(File dir) { */
public BatchUpload(File dir) {
this.initializeWithFile(dir); this.initializeWithFile(dir);
} }
/** /**
* Initialize with directory * Initialize with directory
* @param dir directory path *
*/ * @param dir directory path
private void initializeWithFile(File dir){ */
private void initializeWithFile(File dir) {
this.dir = dir; this.dir = dir;
String dirName = dir.getName(); String dirName = dir.getName();
long timeMillis = Long.parseLong(dirName); long timeMillis = Long.parseLong(dirName);
Calendar calendar = new GregorianCalendar(); Calendar calendar = new GregorianCalendar();
calendar.setTimeInMillis(timeMillis); calendar.setTimeInMillis(timeMillis);
this.date = calendar.getTime(); this.date = calendar.getTime();
try { try {
this.itemsImported = countLines(dir + File.separator + "mapfile"); this.itemsImported = countLines(dir + File.separator + "mapfile");
} catch (IOException e) { } catch (IOException e) {
e.printStackTrace(); e.printStackTrace();
} }
for (File file : dir.listFiles()){ for (File file : dir.listFiles()) {
if (file.isDirectory()){ if (file.isDirectory()) {
this.totalItems = file.list().length; this.totalItems = file.list().length;
} }
} }
this.successful = this.totalItems == this.itemsImported; this.successful = this.totalItems == this.itemsImported;
//Parse possible error message //Parse possible error message
File errorFile = new File(dir + File.separator + "error.txt"); File errorFile = new File(dir + File.separator + "error.txt");
if (errorFile.exists()){ if (errorFile.exists()) {
try { try {
readFile(dir + File.separator + "error.txt"); readFile(dir + File.separator + "error.txt");
} catch (IOException e) { } catch (IOException e) {
// TODO Auto-generated catch block // TODO Auto-generated catch block
e.printStackTrace(); e.printStackTrace();
} }
} }
} }
/** /**
* Count lines in file * Count lines in file
* @param filename file name *
* @return lines in file * @param filename file name
* @throws IOException if IO error * @return lines in file
*/ * @throws IOException if IO error
private int countLines(String filename) throws IOException { */
LineNumberReader reader = new LineNumberReader(new FileReader(filename)); private int countLines(String filename) throws IOException {
int cnt = 0; LineNumberReader reader = new LineNumberReader(new FileReader(filename));
String lineRead = ""; int cnt = 0;
while ((lineRead = reader.readLine()) != null) { String lineRead = "";
String[] parts = lineRead.split(" "); while ((lineRead = reader.readLine()) != null) {
if (parts.length > 1) String[] parts = lineRead.split(" ");
handlesImported.add(parts[1].trim()); if (parts.length > 1) {
else handlesImported.add(parts[1].trim());
handlesImported.add(lineRead); } else {
} handlesImported.add(lineRead);
}
}
cnt = reader.getLineNumber(); cnt = reader.getLineNumber();
reader.close(); reader.close();
return cnt; return cnt;
} }
/** /**
* Read a file * Read a file
* @param filename file name *
* @throws IOException if IO error * @param filename file name
*/ * @throws IOException if IO error
private void readFile(String filename) throws IOException { */
LineNumberReader reader = new LineNumberReader(new FileReader(filename)); private void readFile(String filename) throws IOException {
String lineRead = ""; LineNumberReader reader = new LineNumberReader(new FileReader(filename));
while ((lineRead = reader.readLine()) != null) { String lineRead = "";
this.errorMsg += lineRead + "\n"; while ((lineRead = reader.readLine()) != null) {
this.errorMsg += lineRead + "\n";
if (lineRead.startsWith("\tat ")){ if (lineRead.startsWith("\tat ")) {
this.errorMsgHTML += "<span class=\"batchimport-error-tab\">" + lineRead + "</span><br/>"; this.errorMsgHTML += "<span class=\"batchimport-error-tab\">" + lineRead + "</span><br/>";
} } else if (lineRead.startsWith("Caused by")) {
else if (lineRead.startsWith("Caused by")){ this.errorMsgHTML += "<span class=\"batchimport-error-caused\">" + lineRead + "</span><br/>";
this.errorMsgHTML += "<span class=\"batchimport-error-caused\">" + lineRead + "</span><br/>"; } else {
} this.errorMsgHTML += lineRead + "<br/>";
else { }
this.errorMsgHTML += lineRead + "<br/>"; }
} reader.close();
} }
reader.close();
}
/** /**
* Get date * Get date
* @return Date *
*/ * @return Date
public Date getDate() { */
return date; public Date getDate() {
} return date;
}
/** /**
* Get path to directory * Get path to directory
* @return directory *
*/ * @return directory
public File getDir() { */
return dir; public File getDir() {
} return dir;
}
/** /**
* Whether successulf * Whether successulf
* @return true or false *
*/ * @return true or false
public boolean isSuccessful() { */
return successful; public boolean isSuccessful() {
} return successful;
}
/** /**
* Get items imported * Get items imported
* @return number of items *
*/ * @return number of items
public int getItemsImported() { */
return itemsImported; public int getItemsImported() {
} return itemsImported;
}
/** /**
* Get total items * Get total items
* @return total *
*/ * @return total
public int getTotalItems() { */
return totalItems; public int getTotalItems() {
} return totalItems;
}
/** /**
* Get formatted date (DD/MM/YY) * Get formatted date (DD/MM/YY)
* @return date as string *
*/ * @return date as string
public String getDateFormatted(){ */
SimpleDateFormat df = new SimpleDateFormat("dd/MM/yyyy - HH:mm"); public String getDateFormatted() {
SimpleDateFormat df = new SimpleDateFormat("dd/MM/yyyy - HH:mm");
return df.format(date); return df.format(date);
} }
/** /**
* Get handles of imported files * Get handles of imported files
* @return list of handles *
*/ * @return list of handles
public List<String> getHandlesImported() { */
return handlesImported; public List<String> getHandlesImported() {
} return handlesImported;
}
/** /**
* Get error message * Get error message
* @return error message *
*/ * @return error message
public String getErrorMsg() { */
return errorMsg; public String getErrorMsg() {
} return errorMsg;
}
/** /**
* Get error message as HTML * Get error message as HTML
* @return error message string as HTML *
*/ * @return error message string as HTML
public String getErrorMsgHTML() { */
return errorMsgHTML; public String getErrorMsgHTML() {
} return errorMsgHTML;
}
} }

View File

@@ -7,7 +7,17 @@
*/ */
package org.dspace.app.itemimport; package org.dspace.app.itemimport;
import org.apache.commons.cli.*; import java.io.File;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import java.util.UUID;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.CommandLineParser;
import org.apache.commons.cli.HelpFormatter;
import org.apache.commons.cli.Options;
import org.apache.commons.cli.PosixParser;
import org.dspace.app.itemimport.factory.ItemImportServiceFactory; import org.dspace.app.itemimport.factory.ItemImportServiceFactory;
import org.dspace.app.itemimport.service.ItemImportService; import org.dspace.app.itemimport.service.ItemImportService;
import org.dspace.content.Collection; import org.dspace.content.Collection;
@@ -21,12 +31,6 @@ import org.dspace.eperson.service.EPersonService;
import org.dspace.handle.factory.HandleServiceFactory; import org.dspace.handle.factory.HandleServiceFactory;
import org.dspace.handle.service.HandleService; import org.dspace.handle.service.HandleService;
import java.io.File;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import java.util.UUID;
/** /**
* Import items into DSpace. The conventional use is upload files by copying * Import items into DSpace. The conventional use is upload files by copying
* them. DSpace writes the item's bitstreams into its assetstore. Metadata is * them. DSpace writes the item's bitstreams into its assetstore. Metadata is
@@ -47,12 +51,17 @@ public class ItemImportCLITool {
private static boolean template = false; private static boolean template = false;
private static final CollectionService collectionService = ContentServiceFactory.getInstance().getCollectionService(); private static final CollectionService collectionService = ContentServiceFactory.getInstance()
.getCollectionService();
private static final EPersonService epersonService = EPersonServiceFactory.getInstance().getEPersonService(); private static final EPersonService epersonService = EPersonServiceFactory.getInstance().getEPersonService();
private static final HandleService handleService = HandleServiceFactory.getInstance().getHandleService(); private static final HandleService handleService = HandleServiceFactory.getInstance().getHandleService();
public static void main(String[] argv) throws Exception /**
{ * Default constructor
*/
private ItemImportCLITool() { }
public static void main(String[] argv) throws Exception {
Date startTime = new Date(); Date startTime = new Date();
int status = 0; int status = 0;
@@ -66,24 +75,24 @@ public class ItemImportCLITool {
options.addOption("b", "add-bte", false, "add items to DSpace via Biblio-Transformation-Engine (BTE)"); options.addOption("b", "add-bte", false, "add items to DSpace via Biblio-Transformation-Engine (BTE)");
options.addOption("r", "replace", false, "replace items in mapfile"); options.addOption("r", "replace", false, "replace items in mapfile");
options.addOption("d", "delete", false, options.addOption("d", "delete", false,
"delete items listed in mapfile"); "delete items listed in mapfile");
options.addOption("i", "inputtype", true, "input type in case of BTE import"); options.addOption("i", "inputtype", true, "input type in case of BTE import");
options.addOption("s", "source", true, "source of items (directory)"); options.addOption("s", "source", true, "source of items (directory)");
options.addOption("z", "zip", true, "name of zip file"); options.addOption("z", "zip", true, "name of zip file");
options.addOption("c", "collection", true, options.addOption("c", "collection", true,
"destination collection(s) Handle or database ID"); "destination collection(s) Handle or database ID");
options.addOption("m", "mapfile", true, "mapfile items in mapfile"); options.addOption("m", "mapfile", true, "mapfile items in mapfile");
options.addOption("e", "eperson", true, options.addOption("e", "eperson", true,
"email of eperson doing importing"); "email of eperson doing importing");
options.addOption("w", "workflow", false, options.addOption("w", "workflow", false,
"send submission through collection's workflow"); "send submission through collection's workflow");
options.addOption("n", "notify", false, options.addOption("n", "notify", false,
"if sending submissions through the workflow, send notification emails"); "if sending submissions through the workflow, send notification emails");
options.addOption("t", "test", false, options.addOption("t", "test", false,
"test run - do not actually import items"); "test run - do not actually import items");
options.addOption("p", "template", false, "apply template"); options.addOption("p", "template", false, "apply template");
options.addOption("R", "resume", false, options.addOption("R", "resume", false,
"resume a failed import (add only)"); "resume a failed import (add only)");
options.addOption("q", "quiet", false, "don't display metadata"); options.addOption("q", "quiet", false, "don't display metadata");
options.addOption("h", "help", false, "help"); options.addOption("h", "help", false, "help");
@@ -106,15 +115,19 @@ public class ItemImportCLITool {
HelpFormatter myhelp = new HelpFormatter(); HelpFormatter myhelp = new HelpFormatter();
myhelp.printHelp("ItemImport\n", options); myhelp.printHelp("ItemImport\n", options);
System.out System.out
.println("\nadding items: ItemImport -a -e eperson -c collection -s sourcedir -m mapfile"); .println("\nadding items: ItemImport -a -e eperson -c collection -s sourcedir -m mapfile");
System.out System.out
.println("\nadding items from zip file: ItemImport -a -e eperson -c collection -s sourcedir -z filename.zip -m mapfile"); .println(
"\nadding items from zip file: ItemImport -a -e eperson -c collection -s sourcedir -z " +
"filename.zip -m mapfile");
System.out System.out
.println("replacing items: ItemImport -r -e eperson -c collection -s sourcedir -m mapfile"); .println("replacing items: ItemImport -r -e eperson -c collection -s sourcedir -m mapfile");
System.out System.out
.println("deleting items: ItemImport -d -e eperson -m mapfile"); .println("deleting items: ItemImport -d -e eperson -m mapfile");
System.out System.out
.println("If multiple collections are specified, the first collection will be the one that owns the item."); .println(
"If multiple collections are specified, the first collection will be the one that owns the " +
"item.");
System.exit(0); System.exit(0);
} }
@@ -155,30 +168,26 @@ public class ItemImportCLITool {
template = true; template = true;
} }
if (line.hasOption('s')) // source if (line.hasOption('s')) { // source
{
sourcedir = line.getOptionValue('s'); sourcedir = line.getOptionValue('s');
} }
if (line.hasOption('m')) // mapfile if (line.hasOption('m')) { // mapfile
{
mapfile = line.getOptionValue('m'); mapfile = line.getOptionValue('m');
} }
if (line.hasOption('e')) // eperson if (line.hasOption('e')) { // eperson
{
eperson = line.getOptionValue('e'); eperson = line.getOptionValue('e');
} }
if (line.hasOption('c')) // collections if (line.hasOption('c')) { // collections
{
collections = line.getOptionValues('c'); collections = line.getOptionValues('c');
} }
if (line.hasOption('R')) { if (line.hasOption('R')) {
isResume = true; isResume = true;
System.out System.out
.println("**Resume import** - attempting to import items not already imported"); .println("**Resume import** - attempting to import items not already imported");
} }
if (line.hasOption('q')) { if (line.hasOption('q')) {
@@ -198,26 +207,26 @@ public class ItemImportCLITool {
// must have a command set // must have a command set
if (command == null) { if (command == null) {
System.out System.out
.println("Error - must run with either add, replace, or remove (run with -h flag for details)"); .println("Error - must run with either add, replace, or remove (run with -h flag for details)");
System.exit(1); System.exit(1);
} else if ("add".equals(command) || "replace".equals(command)) { } else if ("add".equals(command) || "replace".equals(command)) {
if (sourcedir == null) { if (sourcedir == null) {
System.out System.out
.println("Error - a source directory containing items must be set"); .println("Error - a source directory containing items must be set");
System.out.println(" (run with -h flag for details)"); System.out.println(" (run with -h flag for details)");
System.exit(1); System.exit(1);
} }
if (mapfile == null) { if (mapfile == null) {
System.out System.out
.println("Error - a map file to hold importing results must be specified"); .println("Error - a map file to hold importing results must be specified");
System.out.println(" (run with -h flag for details)"); System.out.println(" (run with -h flag for details)");
System.exit(1); System.exit(1);
} }
if (eperson == null) { if (eperson == null) {
System.out System.out
.println("Error - an eperson to do the importing must be specified"); .println("Error - an eperson to do the importing must be specified");
System.out.println(" (run with -h flag for details)"); System.out.println(" (run with -h flag for details)");
System.exit(1); System.exit(1);
} }
@@ -227,18 +236,19 @@ public class ItemImportCLITool {
commandLineCollections = false; commandLineCollections = false;
} }
} else if ("add-bte".equals(command)) { } else if ("add-bte".equals(command)) {
//Source dir can be null, the user can specify the parameters for his loader in the Spring XML configuration file //Source dir can be null, the user can specify the parameters for his loader in the Spring XML
// configuration file
if (mapfile == null) { if (mapfile == null) {
System.out System.out
.println("Error - a map file to hold importing results must be specified"); .println("Error - a map file to hold importing results must be specified");
System.out.println(" (run with -h flag for details)"); System.out.println(" (run with -h flag for details)");
System.exit(1); System.exit(1);
} }
if (eperson == null) { if (eperson == null) {
System.out System.out
.println("Error - an eperson to do the importing must be specified"); .println("Error - an eperson to do the importing must be specified");
System.out.println(" (run with -h flag for details)"); System.out.println(" (run with -h flag for details)");
System.exit(1); System.exit(1);
} }
@@ -250,14 +260,16 @@ public class ItemImportCLITool {
if (bteInputType == null) { if (bteInputType == null) {
System.out System.out
.println("Error - an input type (tsv, csv, ris, endnote, bibtex or any other type you have specified in BTE Spring XML configuration file) must be specified"); .println(
"Error - an input type (tsv, csv, ris, endnote, bibtex or any other type you have " +
"specified in BTE Spring XML configuration file) must be specified");
System.out.println(" (run with -h flag for details)"); System.out.println(" (run with -h flag for details)");
System.exit(1); System.exit(1);
} }
} else if ("delete".equals(command)) { } else if ("delete".equals(command)) {
if (eperson == null) { if (eperson == null) {
System.out System.out
.println("Error - an eperson to do the importing must be specified"); .println("Error - an eperson to do the importing must be specified");
System.exit(1); System.exit(1);
} }
@@ -270,7 +282,7 @@ public class ItemImportCLITool {
// can only resume for adds // can only resume for adds
if (isResume && !"add".equals(command) && !"add-bte".equals(command)) { if (isResume && !"add".equals(command) && !"add-bte".equals(command)) {
System.out System.out
.println("Error - resume option only works with the --add or the --add-bte commands"); .println("Error - resume option only works with the --add or the --add-bte commands");
System.exit(1); System.exit(1);
} }
@@ -280,9 +292,9 @@ public class ItemImportCLITool {
if (!isResume && "add".equals(command) && myFile.exists()) { if (!isResume && "add".equals(command) && myFile.exists()) {
System.out.println("Error - the mapfile " + mapfile System.out.println("Error - the mapfile " + mapfile
+ " already exists."); + " already exists.");
System.out System.out
.println("Either delete it or use --resume if attempting to resume an aborted import."); .println("Either delete it or use --resume if attempting to resume an aborted import.");
System.exit(1); System.exit(1);
} }
@@ -330,24 +342,22 @@ public class ItemImportCLITool {
// string has a / so it must be a handle - try and resolve // string has a / so it must be a handle - try and resolve
// it // it
mycollections.add((Collection) handleService mycollections.add((Collection) handleService
.resolveToObject(c, collections[i])); .resolveToObject(c, collections[i]));
// resolved, now make sure it's a collection // resolved, now make sure it's a collection
if ((mycollections.get(i) == null) if ((mycollections.get(i) == null)
|| (mycollections.get(i).getType() != Constants.COLLECTION)) { || (mycollections.get(i).getType() != Constants.COLLECTION)) {
mycollections.set(i, null); mycollections.set(i, null);
} }
} } else if (collections[i] != null) {
// not a handle, try and treat it as an integer collection // not a handle, try and treat it as an integer collection database ID
// database ID
else if (collections[i] != null) {
mycollections.set(i, collectionService.find(c, UUID.fromString(collections[i]))); mycollections.set(i, collectionService.find(c, UUID.fromString(collections[i])));
} }
// was the collection valid? // was the collection valid?
if (mycollections.get(i) == null) { if (mycollections.get(i) == null) {
throw new IllegalArgumentException("Cannot resolve " throw new IllegalArgumentException("Cannot resolve "
+ collections[i] + " to collection"); + collections[i] + " to collection");
} }
// print progress info // print progress info
@@ -358,7 +368,7 @@ public class ItemImportCLITool {
} }
System.out.println(owningPrefix + " Collection: " System.out.println(owningPrefix + " Collection: "
+ mycollections.get(i).getName()); + mycollections.get(i).getName());
} }
} // end of validating collections } // end of validating collections
@@ -394,11 +404,13 @@ public class ItemImportCLITool {
try { try {
if (zip) { if (zip) {
System.gc(); System.gc();
System.out.println("Deleting temporary zip directory: " + myloader.getTempWorkDirFile().getAbsolutePath()); System.out.println(
"Deleting temporary zip directory: " + myloader.getTempWorkDirFile().getAbsolutePath());
myloader.cleanupZipTemp(); myloader.cleanupZipTemp();
} }
} catch (Exception ex) { } catch (Exception ex) {
System.out.println("Unable to delete temporary zip archive location: " + myloader.getTempWorkDirFile().getAbsolutePath()); System.out.println("Unable to delete temporary zip archive location: " + myloader.getTempWorkDirFile()
.getAbsolutePath());
} }
@@ -409,7 +421,9 @@ public class ItemImportCLITool {
Date endTime = new Date(); Date endTime = new Date();
System.out.println("Started: " + startTime.getTime()); System.out.println("Started: " + startTime.getTime());
System.out.println("Ended: " + endTime.getTime()); System.out.println("Ended: " + endTime.getTime());
System.out.println("Elapsed time: " + ((endTime.getTime() - startTime.getTime()) / 1000) + " secs (" + (endTime.getTime() - startTime.getTime()) + " msecs)"); System.out.println(
"Elapsed time: " + ((endTime.getTime() - startTime.getTime()) / 1000) + " secs (" + (endTime
.getTime() - startTime.getTime()) + " msecs)");
} }
System.exit(status); System.exit(status);

View File

@@ -11,7 +11,8 @@ import org.dspace.app.itemimport.service.ItemImportService;
import org.dspace.services.factory.DSpaceServicesFactory; import org.dspace.services.factory.DSpaceServicesFactory;
/** /**
* Abstract factory to get services for the itemimport package, use ItemImportService.getInstance() to retrieve an implementation * Abstract factory to get services for the itemimport package, use ItemImportService.getInstance() to retrieve an
* implementation
* *
* @author kevinvandevelde at atmire.com * @author kevinvandevelde at atmire.com
*/ */
@@ -19,7 +20,8 @@ public abstract class ItemImportServiceFactory {
public abstract ItemImportService getItemImportService(); public abstract ItemImportService getItemImportService();
public static ItemImportServiceFactory getInstance(){ public static ItemImportServiceFactory getInstance() {
return DSpaceServicesFactory.getInstance().getServiceManager().getServiceByName("itemImportServiceFactory", ItemImportServiceFactory.class); return DSpaceServicesFactory.getInstance().getServiceManager()
.getServiceByName("itemImportServiceFactory", ItemImportServiceFactory.class);
} }
} }

View File

@@ -11,7 +11,8 @@ import org.dspace.app.itemimport.service.ItemImportService;
import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Autowired;
/** /**
* Factory implementation to get services for the itemimport package, use ItemImportService.getInstance() to retrieve an implementation * Factory implementation to get services for the itemimport package, use ItemImportService.getInstance() to retrieve
* an implementation
* *
* @author kevinvandevelde at atmire.com * @author kevinvandevelde at atmire.com
*/ */

View File

@@ -7,16 +7,16 @@
*/ */
package org.dspace.app.itemimport.service; package org.dspace.app.itemimport.service;
import java.io.File;
import java.io.IOException;
import java.util.List;
import javax.mail.MessagingException;
import org.dspace.app.itemimport.BatchUpload; import org.dspace.app.itemimport.BatchUpload;
import org.dspace.content.Collection; import org.dspace.content.Collection;
import org.dspace.core.Context; import org.dspace.core.Context;
import org.dspace.eperson.EPerson; import org.dspace.eperson.EPerson;
import javax.mail.MessagingException;
import java.io.File;
import java.io.IOException;
import java.util.List;
/** /**
* Import items into DSpace. The conventional use is upload files by copying * Import items into DSpace. The conventional use is upload files by copying
* them. DSpace writes the item's bitstreams into its assetstore. Metadata is * them. DSpace writes the item's bitstreams into its assetstore. Metadata is
@@ -37,30 +37,32 @@ public interface ItemImportService {
/** /**
* * @param c DSpace Context
* @param c DSpace Context
* @param mycollections List of Collections * @param mycollections List of Collections
* @param sourceDir source location * @param sourceDir source location
* @param mapFile map file * @param mapFile map file
* @param template whether to use template item * @param template whether to use template item
* @throws Exception if error * @throws Exception if error
*/ */
public void addItemsAtomic(Context c, List<Collection> mycollections, String sourceDir, String mapFile, boolean template) throws Exception; public void addItemsAtomic(Context c, List<Collection> mycollections, String sourceDir, String mapFile,
boolean template) throws Exception;
/** /**
* Add items * Add items
* @param c DSpace Context *
* @param c DSpace Context
* @param mycollections List of Collections * @param mycollections List of Collections
* @param sourceDir source location * @param sourceDir source location
* @param mapFile map file * @param mapFile map file
* @param template whether to use template item * @param template whether to use template item
* @throws Exception if error * @throws Exception if error
*/ */
public void addItems(Context c, List<Collection> mycollections, public void addItems(Context c, List<Collection> mycollections,
String sourceDir, String mapFile, boolean template) throws Exception; String sourceDir, String mapFile, boolean template) throws Exception;
/** /**
* Unzip a file * Unzip a file
*
* @param zipfile file * @param zipfile file
* @return unzip location * @return unzip location
* @throws IOException if error * @throws IOException if error
@@ -69,6 +71,7 @@ public interface ItemImportService {
/** /**
* Unzip a file to a destination * Unzip a file to a destination
*
* @param zipfile file * @param zipfile file
* @param destDir destination directory * @param destDir destination directory
* @return unzip location * @return unzip location
@@ -78,7 +81,8 @@ public interface ItemImportService {
/** /**
* Unzip a file in a specific source directory * Unzip a file in a specific source directory
* @param sourcedir source directory *
* @param sourcedir source directory
* @param zipfilename file name * @param zipfilename file name
* @return unzip location * @return unzip location
* @throws IOException if error * @throws IOException if error
@@ -86,18 +90,19 @@ public interface ItemImportService {
public String unzip(String sourcedir, String zipfilename) throws IOException; public String unzip(String sourcedir, String zipfilename) throws IOException;
/** /**
*
* Given a public URL to a zip file that has the Simple Archive Format, this method imports the contents to DSpace * Given a public URL to a zip file that has the Simple Archive Format, this method imports the contents to DSpace
* @param url The public URL of the zip file *
* @param url The public URL of the zip file
* @param owningCollection The owning collection the items will belong to * @param owningCollection The owning collection the items will belong to
* @param collections The collections the created items will be inserted to, apart from the owning one * @param collections The collections the created items will be inserted to, apart from the owning one
* @param resumeDir In case of a resume request, the directory that containsthe old mapfile and data * @param resumeDir In case of a resume request, the directory that containsthe old mapfile and data
* @param inputType The input type of the data (bibtex, csv, etc.), in case of local file * @param inputType The input type of the data (bibtex, csv, etc.), in case of local file
* @param context The context * @param context The context
* @param template whether to use template item * @param template whether to use template item
* @throws Exception if error * @throws Exception if error
*/ */
public void processUIImport(String url, Collection owningCollection, String[] collections, String resumeDir, String inputType, Context context, boolean template) throws Exception; public void processUIImport(String url, Collection owningCollection, String[] collections, String resumeDir,
String inputType, Context context, boolean template) throws Exception;
/** /**
* Since the BTE batch import is done in a new thread we are unable to communicate * Since the BTE batch import is done in a new thread we are unable to communicate
@@ -105,16 +110,13 @@ public interface ItemImportService {
* communication with email instead. Send a success email once the batch * communication with email instead. Send a success email once the batch
* import is complete * import is complete
* *
* @param context * @param context - the current Context
* - the current Context * @param eperson - eperson to send the email to
* @param eperson * @param fileName - the filepath to the mapfile created by the batch import
* - eperson to send the email to
* @param fileName
* - the filepath to the mapfile created by the batch import
* @throws MessagingException if error * @throws MessagingException if error
*/ */
public void emailSuccessMessage(Context context, EPerson eperson, public void emailSuccessMessage(Context context, EPerson eperson,
String fileName) throws MessagingException; String fileName) throws MessagingException;
/** /**
* Since the BTE batch import is done in a new thread we are unable to communicate * Since the BTE batch import is done in a new thread we are unable to communicate
@@ -122,37 +124,38 @@ public interface ItemImportService {
* communication with email instead. Send an error email if the batch * communication with email instead. Send an error email if the batch
* import fails * import fails
* *
* @param eperson * @param eperson - EPerson to send the error message to
* - EPerson to send the error message to * @param error - the error message
* @param error
* - the error message
* @throws MessagingException if error * @throws MessagingException if error
*/ */
public void emailErrorMessage(EPerson eperson, String error) public void emailErrorMessage(EPerson eperson, String error)
throws MessagingException; throws MessagingException;
/** /**
* Get imports available for a person * Get imports available for a person
*
* @param eperson EPerson object * @param eperson EPerson object
* @return List of batch uploads * @return List of batch uploads
* @throws Exception if error * @throws Exception if error
*/ */
public List<BatchUpload> getImportsAvailable(EPerson eperson) public List<BatchUpload> getImportsAvailable(EPerson eperson)
throws Exception; throws Exception;
/** /**
* Get import upload directory * Get import upload directory
*
* @param ePerson EPerson object * @param ePerson EPerson object
* @return directory * @return directory
* @throws Exception if error * @throws Exception if error
*/ */
public String getImportUploadableDirectory(EPerson ePerson) public String getImportUploadableDirectory(EPerson ePerson)
throws Exception; throws Exception;
/** /**
* Delete a batch by ID * Delete a batch by ID
* @param c DSpace Context *
* @param c DSpace Context
* @param uploadId identifier * @param uploadId identifier
* @throws Exception if error * @throws Exception if error
*/ */
@@ -160,18 +163,21 @@ public interface ItemImportService {
/** /**
* Replace items * Replace items
* @param c DSpace Context *
* @param c DSpace Context
* @param mycollections List of Collections * @param mycollections List of Collections
* @param sourcedir source directory * @param sourcedir source directory
* @param mapfile map file * @param mapfile map file
* @param template whether to use template item * @param template whether to use template item
* @throws Exception if error * @throws Exception if error
*/ */
public void replaceItems(Context c, List<Collection> mycollections, String sourcedir, String mapfile, boolean template) throws Exception; public void replaceItems(Context c, List<Collection> mycollections, String sourcedir, String mapfile,
boolean template) throws Exception;
/** /**
* Delete items via mapfile * Delete items via mapfile
* @param c DSpace Context *
* @param c DSpace Context
* @param mapfile map file * @param mapfile map file
* @throws Exception if error * @throws Exception if error
*/ */
@@ -179,25 +185,29 @@ public interface ItemImportService {
/** /**
* Add items * Add items
* @param c DSpace Context *
* @param c DSpace Context
* @param mycollections List of Collections * @param mycollections List of Collections
* @param sourcedir source directory * @param sourcedir source directory
* @param mapfile map file * @param mapfile map file
* @param template whether to use template item * @param template whether to use template item
* @param bteInputType The input type of the data (bibtex, csv, etc.), in case of local file * @param bteInputType The input type of the data (bibtex, csv, etc.), in case of local file
* @param workingDir working directory * @param workingDir working directory
* @throws Exception if error * @throws Exception if error
*/ */
public void addBTEItems(Context c, List<Collection> mycollections, String sourcedir, String mapfile, boolean template, String bteInputType, String workingDir) throws Exception; public void addBTEItems(Context c, List<Collection> mycollections, String sourcedir, String mapfile,
boolean template, String bteInputType, String workingDir) throws Exception;
/** /**
* Get temporary work directory * Get temporary work directory
*
* @return directory as string * @return directory as string
*/ */
public String getTempWorkDir(); public String getTempWorkDir();
/** /**
* Get temporary work directory (as File) * Get temporary work directory (as File)
*
* @return directory as File * @return directory as File
* @throws java.io.IOException if the directory cannot be created. * @throws java.io.IOException if the directory cannot be created.
*/ */
@@ -210,18 +220,21 @@ public interface ItemImportService {
/** /**
* Set test flag * Set test flag
*
* @param isTest true or false * @param isTest true or false
*/ */
public void setTest(boolean isTest); public void setTest(boolean isTest);
/** /**
* Set resume flag * Set resume flag
*
* @param isResume true or false * @param isResume true or false
*/ */
public void setResume(boolean isResume); public void setResume(boolean isResume);
/** /**
* Set use workflow * Set use workflow
*
* @param useWorkflow whether to enable workflow * @param useWorkflow whether to enable workflow
*/ */
public void setUseWorkflow(boolean useWorkflow); public void setUseWorkflow(boolean useWorkflow);
@@ -233,6 +246,7 @@ public interface ItemImportService {
/** /**
* Set quiet flag * Set quiet flag
*
* @param isQuiet true or false * @param isQuiet true or false
*/ */
public void setQuiet(boolean isQuiet); public void setQuiet(boolean isQuiet);

View File

@@ -25,40 +25,37 @@ import org.springframework.beans.factory.annotation.Autowired;
* based on the existence of bitstreams within the ORIGINAL bundle. * based on the existence of bitstreams within the ORIGINAL bundle.
* *
* @author Kostas Stamatis * @author Kostas Stamatis
*
*/ */
public class ItemMarkingAvailabilityBitstreamStrategy implements ItemMarkingExtractor { public class ItemMarkingAvailabilityBitstreamStrategy implements ItemMarkingExtractor {
private String availableImageName; private String availableImageName;
private String nonAvailableImageName; private String nonAvailableImageName;
@Autowired(required = true) @Autowired(required = true)
protected ItemService itemService; protected ItemService itemService;
public ItemMarkingAvailabilityBitstreamStrategy() { public ItemMarkingAvailabilityBitstreamStrategy() {
} }
@Override @Override
public ItemMarkingInfo getItemMarkingInfo(Context context, Item item) public ItemMarkingInfo getItemMarkingInfo(Context context, Item item)
throws SQLException { throws SQLException {
List<Bundle> bundles = itemService.getBundles(item, "ORIGINAL"); List<Bundle> bundles = itemService.getBundles(item, "ORIGINAL");
if (bundles.size() == 0){ if (bundles.size() == 0) {
ItemMarkingInfo markInfo = new ItemMarkingInfo(); ItemMarkingInfo markInfo = new ItemMarkingInfo();
markInfo.setImageName(nonAvailableImageName); markInfo.setImageName(nonAvailableImageName);
return markInfo; return markInfo;
} } else {
else { Bundle originalBundle = bundles.iterator().next();
Bundle originalBundle = bundles.iterator().next(); if (originalBundle.getBitstreams().size() == 0) {
if (originalBundle.getBitstreams().size() == 0){ ItemMarkingInfo markInfo = new ItemMarkingInfo();
ItemMarkingInfo markInfo = new ItemMarkingInfo(); markInfo.setImageName(nonAvailableImageName);
markInfo.setImageName(nonAvailableImageName);
return markInfo; return markInfo;
} } else {
else {
Bitstream bitstream = originalBundle.getBitstreams().get(0); Bitstream bitstream = originalBundle.getBitstreams().get(0);
ItemMarkingInfo signInfo = new ItemMarkingInfo(); ItemMarkingInfo signInfo = new ItemMarkingInfo();
@@ -66,32 +63,31 @@ public class ItemMarkingAvailabilityBitstreamStrategy implements ItemMarkingExtr
signInfo.setTooltip(bitstream.getName()); signInfo.setTooltip(bitstream.getName());
String bsLink = "";
String bsLink = "";
bsLink = bsLink + "bitstream/" bsLink = bsLink + "bitstream/"
+ item.getHandle() + "/" + item.getHandle() + "/"
+ bitstream.getSequenceID() + "/"; + bitstream.getSequenceID() + "/";
try { try {
bsLink = bsLink + Util.encodeBitstreamName(bitstream.getName(), Constants.DEFAULT_ENCODING); bsLink = bsLink + Util.encodeBitstreamName(bitstream.getName(), Constants.DEFAULT_ENCODING);
} catch (UnsupportedEncodingException e) { } catch (UnsupportedEncodingException e) {
e.printStackTrace(); e.printStackTrace();
} }
signInfo.setLink(bsLink); signInfo.setLink(bsLink);
return signInfo; return signInfo;
} }
} }
} }
public void setAvailableImageName(String availableImageName) { public void setAvailableImageName(String availableImageName) {
this.availableImageName = availableImageName; this.availableImageName = availableImageName;
} }
public void setNonAvailableImageName(String nonAvailableImageName) { public void setNonAvailableImageName(String nonAvailableImageName) {
this.nonAvailableImageName = nonAvailableImageName; this.nonAvailableImageName = nonAvailableImageName;
} }
} }

View File

@@ -20,31 +20,30 @@ import org.dspace.core.Context;
* based on the collection the items belong to * based on the collection the items belong to
* *
* @author Kostas Stamatis * @author Kostas Stamatis
*
*/ */
public class ItemMarkingCollectionStrategy implements ItemMarkingExtractor { public class ItemMarkingCollectionStrategy implements ItemMarkingExtractor {
Map<String, ItemMarkingInfo> mapping = new HashMap<String, ItemMarkingInfo>(); Map<String, ItemMarkingInfo> mapping = new HashMap<String, ItemMarkingInfo>();
public ItemMarkingCollectionStrategy() { public ItemMarkingCollectionStrategy() {
} }
@Override @Override
public ItemMarkingInfo getItemMarkingInfo(Context context, Item item) public ItemMarkingInfo getItemMarkingInfo(Context context, Item item)
throws SQLException { throws SQLException {
if (mapping!=null){ if (mapping != null) {
for (Collection collection : item.getCollections()){ for (Collection collection : item.getCollections()) {
if (mapping.containsKey(collection.getHandle())){ if (mapping.containsKey(collection.getHandle())) {
return mapping.get(collection.getHandle()); return mapping.get(collection.getHandle());
} }
} }
} }
return null; return null;
} }
public void setMapping(Map<String, ItemMarkingInfo> mapping) { public void setMapping(Map<String, ItemMarkingInfo> mapping) {
this.mapping = mapping; this.mapping = mapping;
} }
} }

View File

@@ -16,9 +16,8 @@ import org.dspace.core.Context;
* Interface to abstract the strategy for item signing * Interface to abstract the strategy for item signing
* *
* @author Kostas Stamatis * @author Kostas Stamatis
*
*/ */
public interface ItemMarkingExtractor { public interface ItemMarkingExtractor {
public ItemMarkingInfo getItemMarkingInfo(Context context, Item item) public ItemMarkingInfo getItemMarkingInfo(Context context, Item item)
throws SQLException; throws SQLException;
} }

View File

@@ -11,47 +11,46 @@ package org.dspace.app.itemmarking;
* Simple DTO to transfer data about the marking info for an item * Simple DTO to transfer data about the marking info for an item
* *
* @author Kostas Stamatis * @author Kostas Stamatis
*
*/ */
public class ItemMarkingInfo { public class ItemMarkingInfo {
private String imageName; private String imageName;
private String classInfo; private String classInfo;
private String tooltip; private String tooltip;
private String link; private String link;
public ItemMarkingInfo() { public ItemMarkingInfo() {
super(); super();
} }
public String getImageName() { public String getImageName() {
return imageName; return imageName;
} }
public void setImageName(String imageName) { public void setImageName(String imageName) {
this.imageName = imageName; this.imageName = imageName;
} }
public String getTooltip() { public String getTooltip() {
return tooltip; return tooltip;
} }
public void setTooltip(String tooltip) { public void setTooltip(String tooltip) {
this.tooltip = tooltip; this.tooltip = tooltip;
} }
public String getLink() { public String getLink() {
return link; return link;
} }
public void setLink(String link) { public void setLink(String link) {
this.link = link; this.link = link;
} }
public String getClassInfo() { public String getClassInfo() {
return classInfo; return classInfo;
} }
public void setClassInfo(String classInfo) { public void setClassInfo(String classInfo) {
this.classInfo = classInfo; this.classInfo = classInfo;
} }
} }

View File

@@ -24,44 +24,41 @@ import org.springframework.beans.factory.annotation.Autowired;
* metadata field * metadata field
* *
* @author Kostas Stamatis * @author Kostas Stamatis
*
*/ */
public class ItemMarkingMetadataStrategy implements ItemMarkingExtractor { public class ItemMarkingMetadataStrategy implements ItemMarkingExtractor {
@Autowired(required = true) @Autowired(required = true)
protected ItemService itemService; protected ItemService itemService;
private String metadataField; private String metadataField;
Map<String, ItemMarkingInfo> mapping = new HashMap<String, ItemMarkingInfo>(); Map<String, ItemMarkingInfo> mapping = new HashMap<String, ItemMarkingInfo>();
public ItemMarkingMetadataStrategy() { public ItemMarkingMetadataStrategy() {
} }
@Override @Override
public ItemMarkingInfo getItemMarkingInfo(Context context, Item item) public ItemMarkingInfo getItemMarkingInfo(Context context, Item item)
throws SQLException { throws SQLException {
if (metadataField != null && mapping!=null) if (metadataField != null && mapping != null) {
{ List<MetadataValue> vals = itemService.getMetadataByMetadataString(item, metadataField);
List<MetadataValue> vals = itemService.getMetadataByMetadataString(item, metadataField); if (vals.size() > 0) {
if (vals.size() > 0) for (MetadataValue value : vals) {
{ String type = value.getValue();
for (MetadataValue value : vals){ if (mapping.containsKey(type)) {
String type = value.getValue(); return mapping.get(type);
if (mapping.containsKey(type)){ }
return mapping.get(type); }
} }
} }
} return null;
} }
return null;
}
public void setMetadataField(String metadataField) { public void setMetadataField(String metadataField) {
this.metadataField = metadataField; this.metadataField = metadataField;
} }
public void setMapping(Map<String, ItemMarkingInfo> mapping) { public void setMapping(Map<String, ItemMarkingInfo> mapping) {
this.mapping = mapping; this.mapping = mapping;
} }
} }

View File

@@ -12,80 +12,70 @@ import java.util.LinkedHashMap;
import java.util.Map; import java.util.Map;
/** /**
* Container for UpdateActions * Container for UpdateActions
* Order of actions is very import for correct processing. This implementation * Order of actions is very import for correct processing. This implementation
* supports an iterator that returns the actions in the order in which they are * supports an iterator that returns the actions in the order in which they are
* put in. Adding the same action a second time has no effect on this order. * put in. Adding the same action a second time has no effect on this order.
*
*
*/ */
public class ActionManager implements Iterable<UpdateAction> { public class ActionManager implements Iterable<UpdateAction> {
protected Map<Class<? extends UpdateAction>, UpdateAction> registry protected Map<Class<? extends UpdateAction>, UpdateAction> registry
= new LinkedHashMap<Class<? extends UpdateAction>, UpdateAction>(); = new LinkedHashMap<Class<? extends UpdateAction>, UpdateAction>();
/** /**
* Get update action * Get update action
* @param actionClass UpdateAction class *
* @return instantiation of UpdateAction class * @param actionClass UpdateAction class
* @throws InstantiationException if instantiation error * @return instantiation of UpdateAction class
* @throws IllegalAccessException if illegal access error * @throws InstantiationException if instantiation error
*/ * @throws IllegalAccessException if illegal access error
public UpdateAction getUpdateAction(Class<? extends UpdateAction> actionClass) */
throws InstantiationException, IllegalAccessException public UpdateAction getUpdateAction(Class<? extends UpdateAction> actionClass)
{ throws InstantiationException, IllegalAccessException {
UpdateAction action = registry.get(actionClass); UpdateAction action = registry.get(actionClass);
if (action == null) if (action == null) {
{ action = actionClass.newInstance();
action = actionClass.newInstance(); registry.put(actionClass, action);
registry.put(actionClass, action); }
}
return action; return action;
} }
/** /**
* * @return whether any actions have been registered with this manager
* @return whether any actions have been registered with this manager */
*/ public boolean hasActions() {
public boolean hasActions() return !registry.isEmpty();
{ }
return !registry.isEmpty();
}
/** /**
* This implementation guarantees the iterator order is the same as the order * This implementation guarantees the iterator order is the same as the order
* in which updateActions have been added * in which updateActions have been added
* *
* @return iterator for UpdateActions * @return iterator for UpdateActions
*/ */
@Override @Override
public Iterator<UpdateAction> iterator() public Iterator<UpdateAction> iterator() {
{ return new Iterator<UpdateAction>() {
return new Iterator<UpdateAction>() private Iterator<Class<? extends UpdateAction>> itr = registry.keySet().iterator();
{
private Iterator<Class<? extends UpdateAction>> itr = registry.keySet().iterator();
@Override @Override
public boolean hasNext() public boolean hasNext() {
{ return itr.hasNext();
return itr.hasNext(); }
}
@Override @Override
public UpdateAction next() public UpdateAction next() {
{ return registry.get(itr.next());
return registry.get(itr.next()); }
}
//not supported //not supported
@Override @Override
public void remove() public void remove() {
{ throw new UnsupportedOperationException();
throw new UnsupportedOperationException(); }
} };
};
} }
} }

View File

@@ -19,7 +19,11 @@ import java.util.List;
import org.dspace.authorize.AuthorizeException; import org.dspace.authorize.AuthorizeException;
import org.dspace.authorize.factory.AuthorizeServiceFactory; import org.dspace.authorize.factory.AuthorizeServiceFactory;
import org.dspace.authorize.service.AuthorizeService; import org.dspace.authorize.service.AuthorizeService;
import org.dspace.content.*; import org.dspace.content.Bitstream;
import org.dspace.content.BitstreamFormat;
import org.dspace.content.Bundle;
import org.dspace.content.DCDate;
import org.dspace.content.Item;
import org.dspace.content.factory.ContentServiceFactory; import org.dspace.content.factory.ContentServiceFactory;
import org.dspace.content.service.BitstreamFormatService; import org.dspace.content.service.BitstreamFormatService;
import org.dspace.content.service.InstallItemService; import org.dspace.content.service.InstallItemService;
@@ -29,116 +33,106 @@ import org.dspace.eperson.factory.EPersonServiceFactory;
import org.dspace.eperson.service.GroupService; import org.dspace.eperson.service.GroupService;
/** /**
* Action to add bitstreams listed in item contents file to the item in DSpace * Action to add bitstreams listed in item contents file to the item in DSpace
*
*
*/ */
public class AddBitstreamsAction extends UpdateBitstreamsAction { public class AddBitstreamsAction extends UpdateBitstreamsAction {
protected AuthorizeService authorizeService = AuthorizeServiceFactory.getInstance().getAuthorizeService(); protected AuthorizeService authorizeService = AuthorizeServiceFactory.getInstance().getAuthorizeService();
protected BitstreamFormatService bitstreamFormatService = ContentServiceFactory.getInstance().getBitstreamFormatService(); protected BitstreamFormatService bitstreamFormatService = ContentServiceFactory.getInstance()
.getBitstreamFormatService();
protected GroupService groupService = EPersonServiceFactory.getInstance().getGroupService(); protected GroupService groupService = EPersonServiceFactory.getInstance().getGroupService();
protected InstallItemService installItemService = ContentServiceFactory.getInstance().getInstallItemService(); protected InstallItemService installItemService = ContentServiceFactory.getInstance().getInstallItemService();
public AddBitstreamsAction() public AddBitstreamsAction() {
{ //empty
//empty }
}
/** /**
* Adds bitstreams from the archive as listed in the contents file. * Adds bitstreams from the archive as listed in the contents file.
* *
* @param context DSpace Context * @param context DSpace Context
* @param itarch Item Archive * @param itarch Item Archive
* @param isTest test flag * @param isTest test flag
* @param suppressUndo undo flag * @param suppressUndo undo flag
* @throws IOException if IO error * @throws IOException if IO error
* @throws IllegalArgumentException if arg exception * @throws IllegalArgumentException if arg exception
* @throws SQLException if database error * @throws SQLException if database error
* @throws AuthorizeException if authorization error * @throws AuthorizeException if authorization error
* @throws ParseException if parse error * @throws ParseException if parse error
*/ */
@Override @Override
public void execute(Context context, ItemArchive itarch, boolean isTest, public void execute(Context context, ItemArchive itarch, boolean isTest,
boolean suppressUndo) throws IllegalArgumentException, boolean suppressUndo) throws IllegalArgumentException,
ParseException, IOException, AuthorizeException, SQLException ParseException, IOException, AuthorizeException, SQLException {
{ Item item = itarch.getItem();
Item item = itarch.getItem(); File dir = itarch.getDirectory();
File dir = itarch.getDirectory();
List<ContentsEntry> contents = MetadataUtilities.readContentsFile(new File(dir, ItemUpdate.CONTENTS_FILE)); List<ContentsEntry> contents = MetadataUtilities.readContentsFile(new File(dir, ItemUpdate.CONTENTS_FILE));
if (contents.isEmpty()) if (contents.isEmpty()) {
{ ItemUpdate.pr("Contents is empty - no bitstreams to add");
ItemUpdate.pr("Contents is empty - no bitstreams to add"); return;
return; }
}
ItemUpdate.pr("Contents bitstream count: " + contents.size()); ItemUpdate.pr("Contents bitstream count: " + contents.size());
String[] files = dir.list(ItemUpdate.fileFilter); String[] files = dir.list(ItemUpdate.fileFilter);
List<String> fileList = new ArrayList<String>(); List<String> fileList = new ArrayList<String>();
for (String filename : files) for (String filename : files) {
{ fileList.add(filename);
fileList.add(filename); ItemUpdate.pr("file: " + filename);
ItemUpdate.pr("file: " + filename); }
}
for (ContentsEntry ce : contents) for (ContentsEntry ce : contents) {
{ //validate match to existing file in archive
//validate match to existing file in archive if (!fileList.contains(ce.filename)) {
if (!fileList.contains(ce.filename)) throw new IllegalArgumentException("File listed in contents is missing: " + ce.filename);
{ }
throw new IllegalArgumentException("File listed in contents is missing: " + ce.filename); }
} int bitstream_bundles_updated = 0;
}
int bitstream_bundles_updated = 0;
//now okay to add //now okay to add
for (ContentsEntry ce : contents) for (ContentsEntry ce : contents) {
{ String targetBundleName = addBitstream(context, itarch, item, dir, ce, suppressUndo, isTest);
String targetBundleName = addBitstream(context, itarch, item, dir, ce, suppressUndo, isTest); if (!targetBundleName.equals("")
if (!targetBundleName.equals("") && !targetBundleName.equals("THUMBNAIL")
&& !targetBundleName.equals("THUMBNAIL") && !targetBundleName.equals("TEXT")) {
&& !targetBundleName.equals("TEXT")) bitstream_bundles_updated++;
{ }
bitstream_bundles_updated++; }
}
}
if (alterProvenance && bitstream_bundles_updated > 0) if (alterProvenance && bitstream_bundles_updated > 0) {
{ DtoMetadata dtom = DtoMetadata.create("dc.description.provenance", "en", "");
DtoMetadata dtom = DtoMetadata.create("dc.description.provenance", "en", "");
String append = ". Added " + Integer.toString(bitstream_bundles_updated) String append = ". Added " + Integer.toString(bitstream_bundles_updated)
+ " bitstream(s) on " + DCDate.getCurrent() + " : " + " bitstream(s) on " + DCDate.getCurrent() + " : "
+ installItemService.getBitstreamProvenanceMessage(context, item); + installItemService.getBitstreamProvenanceMessage(context, item);
MetadataUtilities.appendMetadata(context, item, dtom, false, append); MetadataUtilities.appendMetadata(context, item, dtom, false, append);
} }
} }
/** /**
* Add bitstream * Add bitstream
* @param context DSpace Context *
* @param itarch Item Archive * @param context DSpace Context
* @param item DSpace Item * @param itarch Item Archive
* @param dir directory * @param item DSpace Item
* @param ce contents entry for bitstream * @param dir directory
* @param suppressUndo undo flag * @param ce contents entry for bitstream
* @param isTest test flag * @param suppressUndo undo flag
* @return bundle name * @param isTest test flag
* @throws IOException if IO error * @return bundle name
* @throws IllegalArgumentException if arg exception * @throws IOException if IO error
* @throws SQLException if database error * @throws IllegalArgumentException if arg exception
* @throws AuthorizeException if authorization error * @throws SQLException if database error
* @throws ParseException if parse error * @throws AuthorizeException if authorization error
*/ * @throws ParseException if parse error
protected String addBitstream(Context context, ItemArchive itarch, Item item, File dir, */
ContentsEntry ce, boolean suppressUndo, boolean isTest) protected String addBitstream(Context context, ItemArchive itarch, Item item, File dir,
throws IOException, IllegalArgumentException, SQLException, AuthorizeException, ParseException ContentsEntry ce, boolean suppressUndo, boolean isTest)
{ throws IOException, IllegalArgumentException, SQLException, AuthorizeException, ParseException {
ItemUpdate.pr("contents entry for bitstream: " + ce.toString()); ItemUpdate.pr("contents entry for bitstream: " + ce.toString());
File f = new File(dir, ce.filename); File f = new File(dir, ce.filename);
// get an input stream // get an input stream
BufferedInputStream bis = new BufferedInputStream(new FileInputStream(f)); BufferedInputStream bis = new BufferedInputStream(new FileInputStream(f));
@@ -146,84 +140,69 @@ public class AddBitstreamsAction extends UpdateBitstreamsAction {
Bitstream bs = null; Bitstream bs = null;
String newBundleName = ce.bundlename; String newBundleName = ce.bundlename;
if (ce.bundlename == null) // should be required but default convention established if (ce.bundlename == null) { // should be required but default convention established
{ if (ce.filename.equals("license.txt")) {
if (ce.filename.equals("license.txt"))
{
newBundleName = "LICENSE"; newBundleName = "LICENSE";
} } else {
else
{
newBundleName = "ORIGINAL"; newBundleName = "ORIGINAL";
} }
} }
ItemUpdate.pr(" Bitstream " + ce.filename + " to be added to bundle: " + newBundleName); ItemUpdate.pr(" Bitstream " + ce.filename + " to be added to bundle: " + newBundleName);
if (!isTest) if (!isTest) {
{ // find the bundle
// find the bundle List<Bundle> bundles = itemService.getBundles(item, newBundleName);
List<Bundle> bundles = itemService.getBundles(item, newBundleName); Bundle targetBundle = null;
Bundle targetBundle = null;
if (bundles.size() < 1) if (bundles.size() < 1) {
{ // not found, create a new one
// not found, create a new one targetBundle = bundleService.create(context, item, newBundleName);
targetBundle = bundleService.create(context, item, newBundleName); } else {
} //verify bundle + name are not duplicates
else for (Bundle b : bundles) {
{ List<Bitstream> bitstreams = b.getBitstreams();
//verify bundle + name are not duplicates for (Bitstream bsm : bitstreams) {
for (Bundle b : bundles) if (bsm.getName().equals(ce.filename)) {
{ throw new IllegalArgumentException("Duplicate bundle + filename cannot be added: "
List<Bitstream> bitstreams = b.getBitstreams(); + b.getName() + " + " + bsm.getName());
for (Bitstream bsm : bitstreams) }
{ }
if (bsm.getName().equals(ce.filename)) }
{
throw new IllegalArgumentException("Duplicate bundle + filename cannot be added: "
+ b.getName() + " + " + bsm.getName());
}
}
}
// select first bundle // select first bundle
targetBundle = bundles.iterator().next(); targetBundle = bundles.iterator().next();
} }
bs = bitstreamService.create(context, targetBundle, bis); bs = bitstreamService.create(context, targetBundle, bis);
bs.setName(context, ce.filename); bs.setName(context, ce.filename);
// Identify the format // Identify the format
// FIXME - guessing format guesses license.txt incorrectly as a text file format! // FIXME - guessing format guesses license.txt incorrectly as a text file format!
BitstreamFormat fmt = bitstreamFormatService.guessFormat(context, bs); BitstreamFormat fmt = bitstreamFormatService.guessFormat(context, bs);
bitstreamService.setFormat(context, bs, fmt); bitstreamService.setFormat(context, bs, fmt);
if (ce.description != null) if (ce.description != null) {
{ bs.setDescription(context, ce.description);
bs.setDescription(context, ce.description); }
}
if ((ce.permissionsActionId != -1) && (ce.permissionsGroupName != null)) if ((ce.permissionsActionId != -1) && (ce.permissionsGroupName != null)) {
{ Group group = groupService.findByName(context, ce.permissionsGroupName);
Group group = groupService.findByName(context, ce.permissionsGroupName);
if (group != null) if (group != null) {
{
authorizeService.removeAllPolicies(context, bs); // remove the default policy authorizeService.removeAllPolicies(context, bs); // remove the default policy
authorizeService.createResourcePolicy(context, bs, group, null, ce.permissionsActionId, null); authorizeService.createResourcePolicy(context, bs, group, null, ce.permissionsActionId, null);
} }
} }
//update after all changes are applied //update after all changes are applied
bitstreamService.update(context, bs); bitstreamService.update(context, bs);
if (!suppressUndo) if (!suppressUndo) {
{ itarch.addUndoDeleteContents(bs.getID());
itarch.addUndoDeleteContents(bs.getID()); }
} return targetBundle.getName();
return targetBundle.getName();
} }
return ""; return "";
} }
} }

View File

@@ -11,119 +11,107 @@ import java.sql.SQLException;
import java.util.List; import java.util.List;
import org.dspace.authorize.AuthorizeException; import org.dspace.authorize.AuthorizeException;
import org.dspace.content.MetadataValue;
import org.dspace.content.Item; import org.dspace.content.Item;
import org.dspace.content.MetadataField; import org.dspace.content.MetadataField;
import org.dspace.content.MetadataSchema; import org.dspace.content.MetadataSchema;
import org.dspace.content.MetadataValue;
import org.dspace.content.factory.ContentServiceFactory; import org.dspace.content.factory.ContentServiceFactory;
import org.dspace.content.service.MetadataFieldService; import org.dspace.content.service.MetadataFieldService;
import org.dspace.content.service.MetadataSchemaService; import org.dspace.content.service.MetadataSchemaService;
import org.dspace.core.Context; import org.dspace.core.Context;
/** /**
* Action to add metadata to item * Action to add metadata to item
*
*/ */
public class AddMetadataAction extends UpdateMetadataAction { public class AddMetadataAction extends UpdateMetadataAction {
protected MetadataSchemaService metadataSchemaService = ContentServiceFactory.getInstance().getMetadataSchemaService(); protected MetadataSchemaService metadataSchemaService = ContentServiceFactory.getInstance()
.getMetadataSchemaService();
protected MetadataFieldService metadataFieldService = ContentServiceFactory.getInstance().getMetadataFieldService(); protected MetadataFieldService metadataFieldService = ContentServiceFactory.getInstance().getMetadataFieldService();
/** /**
* Adds metadata specified in the source archive * Adds metadata specified in the source archive
* *
* @param context DSpace Context * @param context DSpace Context
* @param itarch item archive * @param itarch item archive
* @param isTest test flag * @param isTest test flag
* @param suppressUndo undo flag * @param suppressUndo undo flag
* @throws AuthorizeException if authorization error * @throws AuthorizeException if authorization error
* @throws SQLException if database error * @throws SQLException if database error
*/ */
@Override @Override
public void execute(Context context, ItemArchive itarch, boolean isTest, public void execute(Context context, ItemArchive itarch, boolean isTest,
boolean suppressUndo) throws AuthorizeException, SQLException boolean suppressUndo) throws AuthorizeException, SQLException {
{ Item item = itarch.getItem();
Item item = itarch.getItem(); String dirname = itarch.getDirectoryName();
String dirname = itarch.getDirectoryName();
for (DtoMetadata dtom : itarch.getMetadataFields()) for (DtoMetadata dtom : itarch.getMetadataFields()) {
{ for (String f : targetFields) {
for (String f : targetFields) if (dtom.matches(f, false)) {
{ // match against metadata for this field/value in repository
if (dtom.matches(f, false)) // qualifier must be strictly matched, possibly null
{ List<MetadataValue> ardcv = null;
// match against metadata for this field/value in repository ardcv = itemService.getMetadata(item, dtom.schema, dtom.element, dtom.qualifier, Item.ANY);
// qualifier must be strictly matched, possibly null
List<MetadataValue> ardcv = null;
ardcv = itemService.getMetadata(item, dtom.schema, dtom.element, dtom.qualifier, Item.ANY);
boolean found = false; boolean found = false;
for (MetadataValue dcv : ardcv) for (MetadataValue dcv : ardcv) {
{ if (dcv.getValue().equals(dtom.value)) {
if (dcv.getValue().equals(dtom.value)) found = true;
{ break;
found = true; }
break; }
}
}
if (found) if (found) {
{ ItemUpdate.pr("Warning: No new metadata found to add to item " + dirname
ItemUpdate.pr("Warning: No new metadata found to add to item " + dirname + " for element " + f);
+ " for element " + f); } else {
} if (isTest) {
else ItemUpdate.pr("Metadata to add: " + dtom.toString());
{ //validity tests that would occur in actual processing
if (isTest) // If we're just test the import, let's check that the actual metadata field exists.
{ MetadataSchema foundSchema = metadataSchemaService.find(context, dtom.schema);
ItemUpdate.pr("Metadata to add: " + dtom.toString());
//validity tests that would occur in actual processing
// If we're just test the import, let's check that the actual metadata field exists.
MetadataSchema foundSchema = metadataSchemaService.find(context, dtom.schema);
if (foundSchema == null) if (foundSchema == null) {
{ ItemUpdate.pr("ERROR: schema '"
ItemUpdate.pr("ERROR: schema '" + dtom.schema + "' was not found in the registry; found on item " +
+ dtom.schema + "' was not found in the registry; found on item " + dirname); dirname);
} } else {
else MetadataField foundField = metadataFieldService
{ .findByElement(context, foundSchema, dtom.element, dtom.qualifier);
MetadataField foundField = metadataFieldService.findByElement(context, foundSchema, dtom.element, dtom.qualifier);
if (foundField == null) if (foundField == null) {
{ ItemUpdate.pr("ERROR: Metadata field: '" + dtom.schema + "." + dtom.element + "."
ItemUpdate.pr("ERROR: Metadata field: '" + dtom.schema + "." + dtom.element + "." + dtom.qualifier + "' not found in registry; found on item " +
+ dtom.qualifier + "' not found in registry; found on item " + dirname); dirname);
} }
} }
} } else {
else itemService
{ .addMetadata(context, item, dtom.schema, dtom.element, dtom.qualifier, dtom.language,
itemService.addMetadata(context, item, dtom.schema, dtom.element, dtom.qualifier, dtom.language, dtom.value); dtom.value);
ItemUpdate.pr("Metadata added: " + dtom.toString()); ItemUpdate.pr("Metadata added: " + dtom.toString());
if (!suppressUndo) if (!suppressUndo) {
{ //itarch.addUndoDtom(dtom);
//itarch.addUndoDtom(dtom); //ItemUpdate.pr("Undo metadata: " + dtom);
//ItemUpdate.pr("Undo metadata: " + dtom);
// add all as a replace record to be preceded by delete // add all as a replace record to be preceded by delete
for (MetadataValue dcval : ardcv) for (MetadataValue dcval : ardcv) {
{
MetadataField metadataField = dcval.getMetadataField(); MetadataField metadataField = dcval.getMetadataField();
MetadataSchema metadataSchema = metadataField.getMetadataSchema(); MetadataSchema metadataSchema = metadataField.getMetadataSchema();
itarch.addUndoMetadataField(DtoMetadata.create(metadataSchema.getName(), metadataField.getElement(), itarch.addUndoMetadataField(
metadataField.getQualifier(), dcval.getLanguage(), dcval.getValue())); DtoMetadata.create(metadataSchema.getName(), metadataField.getElement(),
} metadataField.getQualifier(), dcval.getLanguage(),
dcval.getValue()));
}
} }
} }
} }
break; // don't need to check if this field matches any other target fields break; // don't need to check if this field matches any other target fields
} }
} }
} }
} }
} }

View File

@@ -7,55 +7,49 @@
*/ */
package org.dspace.app.itemupdate; package org.dspace.app.itemupdate;
import java.io.IOException;
import java.util.Properties;
import java.io.InputStream;
import java.io.FileInputStream; import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.util.Properties;
import org.dspace.content.Bitstream; import org.dspace.content.Bitstream;
/** /**
* Filter interface to be used by ItemUpdate * Filter interface to be used by ItemUpdate
* to determine which bitstreams in an Item * to determine which bitstreams in an Item
* acceptable for removal. * acceptable for removal.
*
*/ */
public abstract class BitstreamFilter { public abstract class BitstreamFilter {
protected Properties props = null; protected Properties props = null;
/** /**
* The filter method * The filter method
* *
* @param bitstream Bitstream * @param bitstream Bitstream
* @return whether the bitstream matches the criteria * @return whether the bitstream matches the criteria
* @throws BitstreamFilterException if filter error * @throws BitstreamFilterException if filter error
*/ */
public abstract boolean accept(Bitstream bitstream) throws BitstreamFilterException; public abstract boolean accept(Bitstream bitstream) throws BitstreamFilterException;
/** /**
* * @param filepath - The complete path for the properties file
* @param filepath - The complete path for the properties file * @throws IOException if IO error
* @throws IOException if IO error */
*/ public void initProperties(String filepath)
public void initProperties(String filepath) throws IOException {
throws IOException props = new Properties();
{
props = new Properties();
InputStream in = null; InputStream in = null;
try try {
{
in = new FileInputStream(filepath); in = new FileInputStream(filepath);
props.load(in); props.load(in);
} } finally {
finally if (in != null) {
{
if (in != null)
{
in.close(); in.close();
} }
} }
} }
} }

View File

@@ -14,55 +14,44 @@ import org.dspace.content.Bitstream;
import org.dspace.content.Bundle; import org.dspace.content.Bundle;
/** /**
* BitstreamFilter implementation to filter by bundle name * BitstreamFilter implementation to filter by bundle name
*
*/ */
public class BitstreamFilterByBundleName extends BitstreamFilter { public class BitstreamFilterByBundleName extends BitstreamFilter {
protected String bundleName; protected String bundleName;
public BitstreamFilterByBundleName() public BitstreamFilterByBundleName() {
{ //empty
//empty }
}
/** /**
* Filter bitstream based on bundle name found in properties file * Filter bitstream based on bundle name found in properties file
* *
* @param bitstream Bitstream * @param bitstream Bitstream
* @throws BitstreamFilterException if filter error * @return whether bitstream is in bundle
* @return whether bitstream is in bundle * @throws BitstreamFilterException if filter error
* */
*/ @Override
@Override public boolean accept(Bitstream bitstream)
public boolean accept(Bitstream bitstream) throws BitstreamFilterException {
throws BitstreamFilterException if (bundleName == null) {
{ bundleName = props.getProperty("bundle");
if (bundleName == null) if (bundleName == null) {
{ throw new BitstreamFilterException("Property 'bundle' not found.");
bundleName = props.getProperty("bundle"); }
if (bundleName == null) }
{
throw new BitstreamFilterException("Property 'bundle' not found.");
}
}
try try {
{ List<Bundle> bundles = bitstream.getBundles();
List<Bundle> bundles = bitstream.getBundles(); for (Bundle b : bundles) {
for (Bundle b : bundles) if (b.getName().equals(bundleName)) {
{ return true;
if (b.getName().equals(bundleName)) }
{ }
return true; } catch (SQLException e) {
} throw new BitstreamFilterException(e);
} }
} return false;
catch(SQLException e) }
{
throw new BitstreamFilterException(e);
}
return false;
}
} }

View File

@@ -7,21 +7,20 @@
*/ */
package org.dspace.app.itemupdate; package org.dspace.app.itemupdate;
import java.util.regex.*; import java.util.regex.Matcher;
import java.util.regex.Pattern;
import org.dspace.content.Bitstream; import org.dspace.content.Bitstream;
/** /**
* BitstreamFilter implementation to filter by filename pattern * BitstreamFilter implementation to filter by filename pattern
*
*/ */
public class BitstreamFilterByFilename extends BitstreamFilter { public class BitstreamFilterByFilename extends BitstreamFilter {
protected Pattern pattern; protected Pattern pattern;
protected String filenameRegex; protected String filenameRegex;
public BitstreamFilterByFilename() public BitstreamFilterByFilename() {
{
//empty //empty
} }
@@ -34,13 +33,10 @@ public class BitstreamFilterByFilename extends BitstreamFilter {
* @throws BitstreamFilterException if filter error * @throws BitstreamFilterException if filter error
*/ */
@Override @Override
public boolean accept(Bitstream bitstream) throws BitstreamFilterException public boolean accept(Bitstream bitstream) throws BitstreamFilterException {
{ if (filenameRegex == null) {
if (filenameRegex == null)
{
filenameRegex = props.getProperty("filename"); filenameRegex = props.getProperty("filename");
if (filenameRegex == null) if (filenameRegex == null) {
{
throw new BitstreamFilterException("BitstreamFilter property 'filename' not found."); throw new BitstreamFilterException("BitstreamFilter property 'filename' not found.");
} }
pattern = Pattern.compile(filenameRegex); pattern = Pattern.compile(filenameRegex);

View File

@@ -8,30 +8,27 @@
package org.dspace.app.itemupdate; package org.dspace.app.itemupdate;
/** /**
* Exception class for BitstreamFilters * Exception class for BitstreamFilters
*
*/ */
public class BitstreamFilterException extends Exception public class BitstreamFilterException extends Exception {
{
private static final long serialVersionUID = 1L; private static final long serialVersionUID = 1L;
public BitstreamFilterException() {} public BitstreamFilterException() {
/** }
*
* @param msg exception message /**
*/ * @param msg exception message
public BitstreamFilterException(String msg) */
{ public BitstreamFilterException(String msg) {
super(msg); super(msg);
} }
/**
* /**
* @param e exception * @param e exception
*/ */
public BitstreamFilterException(Exception e) public BitstreamFilterException(Exception e) {
{ super(e);
super(e); }
}
} }

View File

@@ -8,148 +8,124 @@
package org.dspace.app.itemupdate; package org.dspace.app.itemupdate;
import java.text.ParseException; import java.text.ParseException;
import java.util.regex.*; import java.util.regex.Matcher;
import java.util.regex.Pattern;
import org.dspace.core.Constants; import org.dspace.core.Constants;
/** /**
* Holds the elements of a line in the Contents Entry file * Holds the elements of a line in the Contents Entry file
* *
* Based on private methods in ItemImport * Based on private methods in ItemImport
*
* Lacking a spec or full documentation for the file format,
* it looks from the source code that the ordering or elements is not fixed
*
* e.g.:
* {@code
* 48217870-MIT.pdf\tbundle: bundlename\tpermissions: -r 'MIT Users'\tdescription: Full printable version (MIT only)
* permissions: -[r|w] ['group name']
* description: <the description of the file>
* }
* *
* Lacking a spec or full documentation for the file format,
* it looks from the source code that the ordering or elements is not fixed
* *
* e.g.:
* {@code
* 48217870-MIT.pdf\tbundle: bundlename\tpermissions: -r 'MIT Users'\tdescription: Full printable version (MIT only)
* permissions: -[r|w] ['group name']
* description: <the description of the file>
* }
*/ */
public class ContentsEntry public class ContentsEntry {
{ public static final String HDR_BUNDLE = "bundle:";
public static final String HDR_BUNDLE = "bundle:"; public static final String HDR_PERMISSIONS = "permissions:";
public static final String HDR_PERMISSIONS = "permissions:"; public static final String HDR_DESCRIPTION = "description:";
public static final String HDR_DESCRIPTION = "description:";
public static final Pattern permissionsPattern = Pattern.compile("-([rw])\\s*'?([^']+)'?"); public static final Pattern permissionsPattern = Pattern.compile("-([rw])\\s*'?([^']+)'?");
final String filename; final String filename;
final String bundlename; final String bundlename;
final String permissionsGroupName; final String permissionsGroupName;
final int permissionsActionId; final int permissionsActionId;
final String description; final String description;
protected ContentsEntry(String filename, protected ContentsEntry(String filename,
String bundlename, String bundlename,
int permissionsActionId, int permissionsActionId,
String permissionsGroupName, String permissionsGroupName,
String description) String description) {
{ this.filename = filename;
this.filename = filename; this.bundlename = bundlename;
this.bundlename = bundlename; this.permissionsActionId = permissionsActionId;
this.permissionsActionId = permissionsActionId; this.permissionsGroupName = permissionsGroupName;
this.permissionsGroupName = permissionsGroupName; this.description = description;
this.description = description; }
}
/** /**
* Factory method parses a line from the Contents Entry file * Factory method parses a line from the Contents Entry file
* *
* @param line line as string * @param line line as string
* @return the parsed ContentsEntry object * @return the parsed ContentsEntry object
* @throws ParseException if parse error * @throws ParseException if parse error
*/ */
public static ContentsEntry parse(String line) public static ContentsEntry parse(String line)
throws ParseException throws ParseException {
{ String[] ar = line.split("\t");
String[] ar = line.split("\t"); ItemUpdate.pr("ce line split: " + ar.length);
ItemUpdate.pr("ce line split: " + ar.length);
String[] arp = new String[4]; String[] arp = new String[4];
arp[0] = ar[0]; //bitstream name doesn't have header and is always first arp[0] = ar[0]; //bitstream name doesn't have header and is always first
String groupName = null; String groupName = null;
int actionId = -1; int actionId = -1;
if (ar.length > 1) if (ar.length > 1) {
{ for (int i = 1; i < ar.length; i++) {
for (int i=1; i < ar.length; i++) ItemUpdate.pr("ce " + i + " : " + ar[i]);
{ if (ar[i].startsWith(HDR_BUNDLE)) {
ItemUpdate.pr("ce " + i + " : " + ar[i]); arp[1] = ar[i].substring(HDR_BUNDLE.length()).trim();
if (ar[i].startsWith(HDR_BUNDLE))
{
arp[1] = ar[i].substring(HDR_BUNDLE.length()).trim();
} } else if (ar[i].startsWith(HDR_PERMISSIONS)) {
else if (ar[i].startsWith(HDR_PERMISSIONS)) arp[2] = ar[i].substring(HDR_PERMISSIONS.length()).trim();
{
arp[2] = ar[i].substring(HDR_PERMISSIONS.length()).trim();
// parse into actionId and group name // parse into actionId and group name
Matcher m = permissionsPattern.matcher(arp[2]); Matcher m = permissionsPattern.matcher(arp[2]);
if (m.matches()) if (m.matches()) {
{ String action = m.group(1); //
String action = m.group(1); // if (action.equals("r")) {
if (action.equals("r")) actionId = Constants.READ;
{ } else if (action.equals("w")) {
actionId = Constants.READ; actionId = Constants.WRITE;
} }
else if (action.equals("w"))
{
actionId = Constants.WRITE;
}
groupName = m.group(2).trim(); groupName = m.group(2).trim();
} }
} } else if (ar[i].startsWith(HDR_DESCRIPTION)) {
else if (ar[i].startsWith(HDR_DESCRIPTION)) arp[3] = ar[i].substring(HDR_DESCRIPTION.length()).trim();
{
arp[3] = ar[i].substring(HDR_DESCRIPTION.length()).trim();
} } else {
else throw new ParseException("Unknown text in contents file: " + ar[i], 0);
{ }
throw new ParseException("Unknown text in contents file: " + ar[i], 0); }
} }
} return new ContentsEntry(arp[0], arp[1], actionId, groupName, arp[3]);
} }
return new ContentsEntry(arp[0], arp[1], actionId, groupName, arp[3]);
}
public String toString() public String toString() {
{ StringBuilder sb = new StringBuilder(filename);
StringBuilder sb = new StringBuilder(filename); if (bundlename != null) {
if (bundlename != null) sb.append(HDR_BUNDLE).append(" ").append(bundlename);
{ }
sb.append(HDR_BUNDLE).append(" ").append(bundlename);
}
if (permissionsGroupName != null) if (permissionsGroupName != null) {
{ sb.append(HDR_PERMISSIONS);
sb.append(HDR_PERMISSIONS); if (permissionsActionId == Constants.READ) {
if (permissionsActionId == Constants.READ) sb.append(" -r ");
{ } else if (permissionsActionId == Constants.WRITE) {
sb.append(" -r "); sb.append(" -w ");
} }
else if (permissionsActionId == Constants.WRITE) sb.append(permissionsGroupName);
{ }
sb.append(" -w ");
}
sb.append(permissionsGroupName);
}
if (description != null) if (description != null) {
{ sb.append(HDR_DESCRIPTION).append(" ").append(description);
sb.append(HDR_DESCRIPTION).append(" ").append(description); }
}
return sb.toString(); return sb.toString();
} }
} }

View File

@@ -14,99 +14,81 @@ import java.text.ParseException;
import java.util.List; import java.util.List;
import org.dspace.authorize.AuthorizeException; import org.dspace.authorize.AuthorizeException;
import org.dspace.content.*; import org.dspace.content.Bitstream;
import org.dspace.content.Bundle;
import org.dspace.content.DCDate;
import org.dspace.content.Item;
import org.dspace.core.Context; import org.dspace.core.Context;
/** /**
* Action to delete bitstreams * Action to delete bitstreams
* *
* Undo not supported for this UpdateAction * Undo not supported for this UpdateAction
*
* Derivatives of the bitstream to be deleted are not also deleted
* *
* Derivatives of the bitstream to be deleted are not also deleted
*/ */
public class DeleteBitstreamsAction extends UpdateBitstreamsAction public class DeleteBitstreamsAction extends UpdateBitstreamsAction {
{ /**
/** * Delete bitstream from item
* Delete bitstream from item *
* * @param context DSpace Context
* @param context DSpace Context * @param itarch item archive
* @param itarch item archive * @param isTest test flag
* @param isTest test flag * @param suppressUndo undo flag
* @param suppressUndo undo flag * @throws IOException if IO error
* @throws IOException if IO error * @throws IllegalArgumentException if arg exception
* @throws IllegalArgumentException if arg exception * @throws SQLException if database error
* @throws SQLException if database error * @throws AuthorizeException if authorization error
* @throws AuthorizeException if authorization error * @throws ParseException if parse error
* @throws ParseException if parse error */
*/ @Override
@Override
public void execute(Context context, ItemArchive itarch, boolean isTest, public void execute(Context context, ItemArchive itarch, boolean isTest,
boolean suppressUndo) throws IllegalArgumentException, IOException, boolean suppressUndo) throws IllegalArgumentException, IOException,
SQLException, AuthorizeException, ParseException SQLException, AuthorizeException, ParseException {
{ File f = new File(itarch.getDirectory(), ItemUpdate.DELETE_CONTENTS_FILE);
File f = new File(itarch.getDirectory(), ItemUpdate.DELETE_CONTENTS_FILE); if (!f.exists()) {
if (!f.exists()) ItemUpdate.pr("Warning: Delete_contents file for item " + itarch.getDirectoryName() + " not found.");
{ } else {
ItemUpdate.pr("Warning: Delete_contents file for item " + itarch.getDirectoryName() + " not found."); List<String> list = MetadataUtilities.readDeleteContentsFile(f);
} if (list.isEmpty()) {
else ItemUpdate.pr("Warning: empty delete_contents file for item " + itarch.getDirectoryName());
{ } else {
List<String> list = MetadataUtilities.readDeleteContentsFile(f); for (String id : list) {
if (list.isEmpty()) try {
{ Bitstream bs = bitstreamService.findByIdOrLegacyId(context, id);
ItemUpdate.pr("Warning: empty delete_contents file for item " + itarch.getDirectoryName() ); if (bs == null) {
} ItemUpdate.pr("Bitstream not found by id: " + id);
else } else {
{ List<Bundle> bundles = bs.getBundles();
for (String id : list) for (Bundle b : bundles) {
{ if (isTest) {
try ItemUpdate.pr("Delete bitstream with id = " + id);
{ } else {
Bitstream bs = bitstreamService.findByIdOrLegacyId(context, id); bundleService.removeBitstream(context, b, bs);
if (bs == null) ItemUpdate.pr("Deleted bitstream with id = " + id);
{
ItemUpdate.pr("Bitstream not found by id: " + id);
}
else
{
List<Bundle> bundles = bs.getBundles();
for (Bundle b : bundles)
{
if (isTest)
{
ItemUpdate.pr("Delete bitstream with id = " + id);
}
else
{
bundleService.removeBitstream(context, b, bs);
ItemUpdate.pr("Deleted bitstream with id = " + id);
} }
} }
if (alterProvenance) if (alterProvenance) {
{ DtoMetadata dtom = DtoMetadata.create("dc.description.provenance", "en", "");
DtoMetadata dtom = DtoMetadata.create("dc.description.provenance", "en", "");
String append = "Bitstream " + bs.getName() + " deleted on " + DCDate.getCurrent() + "; "; String append = "Bitstream " + bs.getName() + " deleted on " + DCDate
Item item = bundles.iterator().next().getItems().iterator().next(); .getCurrent() + "; ";
ItemUpdate.pr("Append provenance with: " + append); Item item = bundles.iterator().next().getItems().iterator().next();
ItemUpdate.pr("Append provenance with: " + append);
if (!isTest) if (!isTest) {
{ MetadataUtilities.appendMetadata(context, item, dtom, false, append);
MetadataUtilities.appendMetadata(context, item, dtom, false, append); }
} }
} }
} } catch (SQLException e) {
} ItemUpdate.pr("Error finding bitstream from id: " + id + " : " + e.toString());
catch(SQLException e) }
{ }
ItemUpdate.pr("Error finding bitstream from id: " + id + " : " + e.toString()); }
} }
} }
}
}
}
} }

View File

@@ -14,115 +14,104 @@ import java.util.ArrayList;
import java.util.List; import java.util.List;
import org.dspace.authorize.AuthorizeException; import org.dspace.authorize.AuthorizeException;
import org.dspace.content.*; import org.dspace.content.Bitstream;
import org.dspace.content.Bundle;
import org.dspace.content.DCDate;
import org.dspace.content.Item;
import org.dspace.core.Context; import org.dspace.core.Context;
/** /**
* Action to delete bitstreams using a specified filter implementing BitstreamFilter * Action to delete bitstreams using a specified filter implementing BitstreamFilter
* Derivatives for the target bitstreams are not deleted. * Derivatives for the target bitstreams are not deleted.
*
* The dc.description.provenance field is amended to reflect the deletions
*
* Note: Multiple filters are impractical if trying to manage multiple properties files
* in a commandline environment
* *
* The dc.description.provenance field is amended to reflect the deletions
* *
* Note: Multiple filters are impractical if trying to manage multiple properties files
* in a commandline environment
*/ */
public class DeleteBitstreamsByFilterAction extends UpdateBitstreamsAction { public class DeleteBitstreamsByFilterAction extends UpdateBitstreamsAction {
protected BitstreamFilter filter; protected BitstreamFilter filter;
/** /**
* Set filter * Set filter
* *
* @param filter BitstreamFilter * @param filter BitstreamFilter
*/ */
public void setBitstreamFilter(BitstreamFilter filter) public void setBitstreamFilter(BitstreamFilter filter) {
{ this.filter = filter;
this.filter = filter; }
}
/** /**
* Get filter * Get filter
* @return filter *
*/ * @return filter
public BitstreamFilter getBitstreamFilter() */
{ public BitstreamFilter getBitstreamFilter() {
return filter; return filter;
} }
/** /**
* Delete bitstream * Delete bitstream
* *
* @param context DSpace Context * @param context DSpace Context
* @param itarch item archive * @param itarch item archive
* @param isTest test flag * @param isTest test flag
* @param suppressUndo undo flag * @param suppressUndo undo flag
* @throws IOException if IO error * @throws IOException if IO error
* @throws SQLException if database error * @throws SQLException if database error
* @throws AuthorizeException if authorization error * @throws AuthorizeException if authorization error
* @throws ParseException if parse error * @throws ParseException if parse error
* @throws BitstreamFilterException if filter error * @throws BitstreamFilterException if filter error
*/ */
@Override @Override
public void execute(Context context, ItemArchive itarch, boolean isTest, public void execute(Context context, ItemArchive itarch, boolean isTest,
boolean suppressUndo) throws AuthorizeException, boolean suppressUndo) throws AuthorizeException,
BitstreamFilterException, IOException, ParseException, SQLException BitstreamFilterException, IOException, ParseException, SQLException {
{
List<String> deleted = new ArrayList<String>(); List<String> deleted = new ArrayList<String>();
Item item = itarch.getItem(); Item item = itarch.getItem();
List<Bundle> bundles = item.getBundles(); List<Bundle> bundles = item.getBundles();
for (Bundle b : bundles) for (Bundle b : bundles) {
{ List<Bitstream> bitstreams = b.getBitstreams();
List<Bitstream> bitstreams = b.getBitstreams(); String bundleName = b.getName();
String bundleName = b.getName();
for (Bitstream bs : bitstreams) for (Bitstream bs : bitstreams) {
{ if (filter.accept(bs)) {
if (filter.accept(bs)) if (isTest) {
{ ItemUpdate.pr("Delete from bundle " + bundleName + " bitstream " + bs.getName()
if (isTest) + " with id = " + bs.getID());
{ } else {
ItemUpdate.pr("Delete from bundle " + bundleName + " bitstream " + bs.getName() //provenance is not maintained for derivative bitstreams
+ " with id = " + bs.getID()); if (!bundleName.equals("THUMBNAIL") && !bundleName.equals("TEXT")) {
} deleted.add(bs.getName());
else }
{ bundleService.removeBitstream(context, b, bs);
//provenance is not maintained for derivative bitstreams ItemUpdate.pr("Deleted " + bundleName + " bitstream " + bs.getName()
if (!bundleName.equals("THUMBNAIL") && !bundleName.equals("TEXT")) + " with id = " + bs.getID());
{ }
deleted.add(bs.getName()); }
} }
bundleService.removeBitstream(context, b, bs);
ItemUpdate.pr("Deleted " + bundleName + " bitstream " + bs.getName()
+ " with id = " + bs.getID());
}
}
}
}
if (alterProvenance && !deleted.isEmpty())
{
StringBuilder sb = new StringBuilder(" Bitstreams deleted on ");
sb.append(DCDate.getCurrent()).append(": ");
for (String s : deleted)
{
sb.append(s).append(", ");
}
DtoMetadata dtom = DtoMetadata.create("dc.description.provenance", "en", "");
ItemUpdate.pr("Append provenance with: " + sb.toString());
if (!isTest)
{
MetadataUtilities.appendMetadata(context, item, dtom, false, sb.toString());
}
} }
}
if (alterProvenance && !deleted.isEmpty()) {
StringBuilder sb = new StringBuilder(" Bitstreams deleted on ");
sb.append(DCDate.getCurrent()).append(": ");
for (String s : deleted) {
sb.append(s).append(", ");
}
DtoMetadata dtom = DtoMetadata.create("dc.description.provenance", "en", "");
ItemUpdate.pr("Append provenance with: " + sb.toString());
if (!isTest) {
MetadataUtilities.appendMetadata(context, item, dtom, false, sb.toString());
}
}
}
} }

View File

@@ -12,60 +12,54 @@ import java.text.ParseException;
import java.util.List; import java.util.List;
import org.dspace.authorize.AuthorizeException; import org.dspace.authorize.AuthorizeException;
import org.dspace.content.Item;
import org.dspace.content.MetadataField; import org.dspace.content.MetadataField;
import org.dspace.content.MetadataSchema; import org.dspace.content.MetadataSchema;
import org.dspace.content.MetadataValue; import org.dspace.content.MetadataValue;
import org.dspace.content.Item;
import org.dspace.core.Context; import org.dspace.core.Context;
/** /**
* Action to delete metadata * Action to delete metadata
*
*
*/ */
public class DeleteMetadataAction extends UpdateMetadataAction { public class DeleteMetadataAction extends UpdateMetadataAction {
/** /**
* Delete metadata from item * Delete metadata from item
* *
* @param context DSpace Context * @param context DSpace Context
* @param itarch Item Archive * @param itarch Item Archive
* @param isTest test flag * @param isTest test flag
* @param suppressUndo undo flag * @param suppressUndo undo flag
* @throws SQLException if database error * @throws SQLException if database error
* @throws AuthorizeException if authorization error * @throws AuthorizeException if authorization error
* @throws ParseException if parse error * @throws ParseException if parse error
*/ */
@Override @Override
public void execute(Context context, ItemArchive itarch, boolean isTest, public void execute(Context context, ItemArchive itarch, boolean isTest,
boolean suppressUndo) throws AuthorizeException, ParseException, SQLException { boolean suppressUndo) throws AuthorizeException, ParseException, SQLException {
Item item = itarch.getItem(); Item item = itarch.getItem();
for (String f : targetFields) for (String f : targetFields) {
{ DtoMetadata dummy = DtoMetadata.create(f, Item.ANY, "");
DtoMetadata dummy = DtoMetadata.create(f, Item.ANY, ""); List<MetadataValue> ardcv = itemService.getMetadataByMetadataString(item, f);
List<MetadataValue> ardcv = itemService.getMetadataByMetadataString(item, f);
ItemUpdate.pr("Metadata to be deleted: "); ItemUpdate.pr("Metadata to be deleted: ");
for (MetadataValue dcv : ardcv) for (MetadataValue dcv : ardcv) {
{ ItemUpdate.pr(" " + MetadataUtilities.getDCValueString(dcv));
ItemUpdate.pr(" " + MetadataUtilities.getDCValueString(dcv)); }
}
if (!isTest) if (!isTest) {
{ if (!suppressUndo) {
if (!suppressUndo) for (MetadataValue dcv : ardcv) {
{
for (MetadataValue dcv : ardcv)
{
MetadataField metadataField = dcv.getMetadataField(); MetadataField metadataField = dcv.getMetadataField();
MetadataSchema metadataSchema = metadataField.getMetadataSchema(); MetadataSchema metadataSchema = metadataField.getMetadataSchema();
itarch.addUndoMetadataField(DtoMetadata.create(metadataSchema.getName(), metadataField.getElement(), itarch.addUndoMetadataField(
metadataField.getQualifier(), dcv.getLanguage(), dcv.getValue())); DtoMetadata.create(metadataSchema.getName(), metadataField.getElement(),
} metadataField.getQualifier(), dcv.getLanguage(), dcv.getValue()));
} }
}
itemService.clearMetadata(context, item, dummy.schema, dummy.element, dummy.qualifier, Item.ANY); itemService.clearMetadata(context, item, dummy.schema, dummy.element, dummy.qualifier, Item.ANY);
} }
} }
} }
} }

View File

@@ -10,15 +10,13 @@ package org.dspace.app.itemupdate;
import java.util.Properties; import java.util.Properties;
/** /**
* Bitstream filter to delete from TEXT bundle * Bitstream filter to delete from TEXT bundle
*
*/ */
public class DerivativeTextBitstreamFilter extends BitstreamFilterByBundleName { public class DerivativeTextBitstreamFilter extends BitstreamFilterByBundleName {
public DerivativeTextBitstreamFilter() public DerivativeTextBitstreamFilter() {
{ props = new Properties();
props = new Properties(); props.setProperty("bundle", "TEXT");
props.setProperty("bundle", "TEXT"); }
}
} }

View File

@@ -8,152 +8,131 @@
package org.dspace.app.itemupdate; package org.dspace.app.itemupdate;
import java.text.ParseException; import java.text.ParseException;
import org.dspace.content.Item; import org.dspace.content.Item;
/** /**
* A data transfer object class enhancement of org.dspace.content.DCValue, which is deprecated * A data transfer object class enhancement of org.dspace.content.DCValue, which is deprecated
* Name intended to not conflict with DSpace API classes for similar concepts but not usable in this context * Name intended to not conflict with DSpace API classes for similar concepts but not usable in this context
* *
* Adds some utility methods * Adds some utility methods
*
* Really not at all general enough but supports Dublin Core and the compound form notation {@code <schema>.<element>[.<qualifier>]}
*
* Does not support wildcard for qualifier
* *
* Really not at all general enough but supports Dublin Core and the compound form notation {@code <schema>
* .<element>[.<qualifier>]}
* *
* Does not support wildcard for qualifier
*/ */
class DtoMetadata class DtoMetadata {
{ final String schema;
final String schema; final String element;
final String element; final String qualifier;
final String qualifier; final String language;
final String language; final String value;
final String value;
protected DtoMetadata(String schema, String element, String qualifier, String language, String value) protected DtoMetadata(String schema, String element, String qualifier, String language, String value) {
{ this.schema = schema;
this.schema = schema; this.element = element;
this.element = element; this.qualifier = qualifier;
this.qualifier = qualifier; this.language = language;
this.language = language; this.value = value;
this.value = value;
}
/**
* Factory method
*
*
* @param schema not null, not empty - 'dc' is the standard case
* @param element not null, not empty
* @param qualifier null; don't allow empty string or * indicating 'any'
* @param language null or empty
* @param value value
* @return DtoMetadata object
* @throws IllegalArgumentException if arg error
*/
public static DtoMetadata create(String schema,
String element,
String qualifier,
String language,
String value)
throws IllegalArgumentException
{
if ((qualifier != null) && (qualifier.equals(Item.ANY) || qualifier.equals("")))
{
throw new IllegalArgumentException("Invalid qualifier: " + qualifier);
}
return new DtoMetadata(schema, element, qualifier, language, value);
} }
/** /**
* Factory method to create metadata object * Factory method
* *
* * @param schema not null, not empty - 'dc' is the standard case
* @param compoundForm of the form <schema>.<element>[.<qualifier>] * @param element not null, not empty
* @param language null or empty * @param qualifier null; don't allow empty string or * indicating 'any'
* @param value value * @param language null or empty
* @throws ParseException if parse error * @param value value
* @throws IllegalArgumentException if arg error * @return DtoMetadata object
*/ * @throws IllegalArgumentException if arg error
public static DtoMetadata create(String compoundForm, String language, String value) */
throws ParseException, IllegalArgumentException public static DtoMetadata create(String schema,
{ String element,
String[] ar = MetadataUtilities.parseCompoundForm(compoundForm); String qualifier,
String language,
String qual = null; String value)
if (ar.length > 2) throws IllegalArgumentException {
{ if ((qualifier != null) && (qualifier.equals(Item.ANY) || qualifier.equals(""))) {
qual = ar[2]; throw new IllegalArgumentException("Invalid qualifier: " + qualifier);
} }
return new DtoMetadata(schema, element, qualifier, language, value);
return create(ar[0], ar[1], qual, language, value);
} }
/** /**
* Determine if this metadata field matches the specified type: * Factory method to create metadata object
* schema.element or schema.element.qualifier *
* * @param compoundForm of the form <schema>.<element>[.<qualifier>]
* * @param language null or empty
* @param compoundForm of the form <schema>.<element>[.<qualifier>|.*] * @param value value
* @param wildcard allow wildcards in compoundForm param * @throws ParseException if parse error
* @return whether matches * @throws IllegalArgumentException if arg error
*/ */
public boolean matches(String compoundForm, boolean wildcard) public static DtoMetadata create(String compoundForm, String language, String value)
{ throws ParseException, IllegalArgumentException {
String[] ar = compoundForm.split("\\s*\\.\\s*"); //MetadataUtilities.parseCompoundForm(compoundForm); String[] ar = MetadataUtilities.parseCompoundForm(compoundForm);
if ((ar.length < 2) || (ar.length > 3)) String qual = null;
{ if (ar.length > 2) {
return false; qual = ar[2];
} }
if (!this.schema.equals(ar[0]) || !this.element.equals(ar[1])) return create(ar[0], ar[1], qual, language, value);
{ }
return false;
}
if (ar.length == 2) /**
{ * Determine if this metadata field matches the specified type:
if (this.qualifier != null) * schema.element or schema.element.qualifier
{ *
return false; * @param compoundForm of the form <schema>.<element>[.<qualifier>|.*]
} * @param wildcard allow wildcards in compoundForm param
} * @return whether matches
*/
public boolean matches(String compoundForm, boolean wildcard) {
String[] ar = compoundForm.split("\\s*\\.\\s*"); //MetadataUtilities.parseCompoundForm(compoundForm);
if (ar.length == 3) if ((ar.length < 2) || (ar.length > 3)) {
{ return false;
if (this.qualifier == null) }
{
return false;
}
if (wildcard && ar[2].equals(Item.ANY))
{
return true;
}
if (!this.qualifier.equals(ar[2]))
{
return false;
}
}
return true;
}
public String toString() if (!this.schema.equals(ar[0]) || !this.element.equals(ar[1])) {
{ return false;
String s = "\tSchema: " + schema + " Element: " + element; }
if (qualifier != null)
{ if (ar.length == 2) {
s+= " Qualifier: " + qualifier; if (this.qualifier != null) {
} return false;
s+= " Language: " + ((language == null) ? "[null]" : language); }
}
if (ar.length == 3) {
if (this.qualifier == null) {
return false;
}
if (wildcard && ar[2].equals(Item.ANY)) {
return true;
}
if (!this.qualifier.equals(ar[2])) {
return false;
}
}
return true;
}
public String toString() {
String s = "\tSchema: " + schema + " Element: " + element;
if (qualifier != null) {
s += " Qualifier: " + qualifier;
}
s += " Language: " + ((language == null) ? "[null]" : language);
s += " Value: " + value; s += " Value: " + value;
return s; return s;
} }
public String getValue() public String getValue() {
{ return value;
return value; }
}
} }

View File

@@ -10,12 +10,11 @@ package org.dspace.app.itemupdate;
import java.io.BufferedWriter; import java.io.BufferedWriter;
import java.io.File; import java.io.File;
import java.io.FileInputStream; import java.io.FileInputStream;
import java.io.FileWriter;
import java.io.FilenameFilter;
import java.io.FileNotFoundException; import java.io.FileNotFoundException;
import java.io.FileOutputStream; import java.io.FileOutputStream;
import java.io.InputStream; import java.io.FileWriter;
import java.io.IOException; import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream; import java.io.OutputStream;
import java.io.PrintWriter; import java.io.PrintWriter;
import java.sql.SQLException; import java.sql.SQLException;
@@ -23,16 +22,15 @@ import java.util.ArrayList;
import java.util.Iterator; import java.util.Iterator;
import java.util.List; import java.util.List;
import java.util.UUID; import java.util.UUID;
import javax.xml.parsers.DocumentBuilder; import javax.xml.parsers.DocumentBuilder;
import javax.xml.parsers.DocumentBuilderFactory; import javax.xml.parsers.DocumentBuilderFactory;
import javax.xml.parsers.ParserConfigurationException; import javax.xml.parsers.ParserConfigurationException;
import javax.xml.transform.Transformer; import javax.xml.transform.Transformer;
import javax.xml.transform.TransformerConfigurationException;
import javax.xml.transform.TransformerException; import javax.xml.transform.TransformerException;
import javax.xml.transform.TransformerFactory; import javax.xml.transform.TransformerFactory;
import javax.xml.transform.TransformerConfigurationException;
import org.apache.log4j.Logger; import org.apache.logging.log4j.Logger;
import org.dspace.app.util.LocalSchemaFilenameFilter; import org.dspace.app.util.LocalSchemaFilenameFilter;
import org.dspace.authorize.AuthorizeException; import org.dspace.authorize.AuthorizeException;
import org.dspace.content.DSpaceObject; import org.dspace.content.DSpaceObject;
@@ -40,20 +38,18 @@ import org.dspace.content.Item;
import org.dspace.content.factory.ContentServiceFactory; import org.dspace.content.factory.ContentServiceFactory;
import org.dspace.content.service.ItemService; import org.dspace.content.service.ItemService;
import org.dspace.core.Context; import org.dspace.core.Context;
import org.dspace.handle.factory.HandleServiceFactory; import org.dspace.handle.factory.HandleServiceFactory;
import org.dspace.handle.service.HandleService; import org.dspace.handle.service.HandleService;
import org.w3c.dom.Document; import org.w3c.dom.Document;
/** /**
* Encapsulates the Item in the context of the DSpace Archive Format * Encapsulates the Item in the context of the DSpace Archive Format
*
*/ */
public class ItemArchive { public class ItemArchive {
private static final Logger log = Logger.getLogger(ItemArchive.class); private static final Logger log = org.apache.logging.log4j.LogManager.getLogger(ItemArchive.class);
public static final String DUBLIN_CORE_XML = "dublin_core.xml"; public static final String DUBLIN_CORE_XML = "dublin_core.xml";
protected static DocumentBuilder builder = null; protected static DocumentBuilder builder = null;
protected Transformer transformer = null; protected Transformer transformer = null;
@@ -70,312 +66,278 @@ public class ItemArchive {
protected HandleService handleService; protected HandleService handleService;
protected ItemService itemService; protected ItemService itemService;
//constructors //constructors
protected ItemArchive() protected ItemArchive() {
{
handleService = HandleServiceFactory.getInstance().getHandleService(); handleService = HandleServiceFactory.getInstance().getHandleService();
itemService = ContentServiceFactory.getInstance().getItemService(); itemService = ContentServiceFactory.getInstance().getItemService();
} }
/** factory method /**
* * factory method
* Minimal requirements for dublin_core.xml for this application *
* is the presence of dc.identifier.uri * Minimal requirements for dublin_core.xml for this application
* which must contain the handle for the item * is the presence of dc.identifier.uri
* * which must contain the handle for the item
* @param context - The DSpace context *
* @param dir - The directory File in the source archive * @param context - The DSpace context
* @param itemField - The metadata field in which the Item identifier is located * @param dir - The directory File in the source archive
* if null, the default is the handle in the dc.identifier.uri field * @param itemField - The metadata field in which the Item identifier is located
* @return ItemArchive object * if null, the default is the handle in the dc.identifier.uri field
* @throws Exception if error * @return ItemArchive object
* * @throws Exception if error
*/ */
public static ItemArchive create(Context context, File dir, String itemField) public static ItemArchive create(Context context, File dir, String itemField)
throws Exception throws Exception {
{ ItemArchive itarch = new ItemArchive();
ItemArchive itarch = new ItemArchive(); itarch.dir = dir;
itarch.dir = dir; itarch.dirname = dir.getName();
itarch.dirname = dir.getName();
InputStream is = null; InputStream is = null;
try try {
{
is = new FileInputStream(new File(dir, DUBLIN_CORE_XML)); is = new FileInputStream(new File(dir, DUBLIN_CORE_XML));
itarch.dtomList = MetadataUtilities.loadDublinCore(getDocumentBuilder(), is); itarch.dtomList = MetadataUtilities.loadDublinCore(getDocumentBuilder(), is);
//The code to search for local schema files was copied from org.dspace.app.itemimport.ItemImportServiceImpl.java //The code to search for local schema files was copied from org.dspace.app.itemimport
// .ItemImportServiceImpl.java
File file[] = dir.listFiles(new LocalSchemaFilenameFilter()); File file[] = dir.listFiles(new LocalSchemaFilenameFilter());
for (int i = 0; i < file.length; i++) for (int i = 0; i < file.length; i++) {
{
is = new FileInputStream(file[i]); is = new FileInputStream(file[i]);
itarch.dtomList.addAll(MetadataUtilities.loadDublinCore(getDocumentBuilder(), is)); itarch.dtomList.addAll(MetadataUtilities.loadDublinCore(getDocumentBuilder(), is));
} }
} } finally {
finally if (is != null) {
{
if (is != null)
{
is.close(); is.close();
} }
} }
ItemUpdate.pr("Loaded metadata with " + itarch.dtomList.size() + " fields"); ItemUpdate.pr("Loaded metadata with " + itarch.dtomList.size() + " fields");
if (itemField == null) if (itemField == null) {
{ itarch.item = itarch.itemFromHandleInput(context); // sets the item instance var and seeds the undo list
itarch.item = itarch.itemFromHandleInput(context); // sets the item instance var and seeds the undo list } else {
} itarch.item = itarch.itemFromMetadataField(context, itemField);
else }
{
itarch.item = itarch.itemFromMetadataField(context, itemField);
}
if (itarch.item == null) if (itarch.item == null) {
{ throw new Exception("Item not instantiated: " + itarch.dirname);
throw new Exception("Item not instantiated: " + itarch.dirname); }
}
ItemUpdate.prv("item instantiated: " + itarch.item.getHandle()); ItemUpdate.prv("item instantiated: " + itarch.item.getHandle());
return itarch; return itarch;
} }
protected static DocumentBuilder getDocumentBuilder() protected static DocumentBuilder getDocumentBuilder()
throws ParserConfigurationException throws ParserConfigurationException {
{ if (builder == null) {
if (builder == null) builder = DocumentBuilderFactory.newInstance().newDocumentBuilder();
{ }
builder = DocumentBuilderFactory.newInstance().newDocumentBuilder(); return builder;
} }
return builder;
}
/** /**
* Getter for Transformer * Getter for Transformer
*
* @return Transformer * @return Transformer
* @throws TransformerConfigurationException if config error * @throws TransformerConfigurationException if config error
*/ */
protected Transformer getTransformer() protected Transformer getTransformer()
throws TransformerConfigurationException throws TransformerConfigurationException {
{ if (transformer == null) {
if (transformer == null) transformer = TransformerFactory.newInstance().newTransformer();
{ }
transformer = TransformerFactory.newInstance().newTransformer(); return transformer;
}
return transformer;
}
/**
* Getter for the DSpace item referenced in the archive
* @return DSpace item
*/
public Item getItem()
{
return item;
}
/**
* Getter for directory in archive on disk
* @return directory in archive
*/
public File getDirectory()
{
return dir;
}
/**
* Getter for directory name in archive
* @return directory name in archive
*/
public String getDirectoryName()
{
return dirname;
}
/**
* Add metadata field to undo list
* @param dtom DtoMetadata (represents metadata field)
*/
public void addUndoMetadataField(DtoMetadata dtom)
{
this.undoDtomList.add(dtom);
}
/**
* Getter for list of metadata fields
* @return list of metadata fields
*/
public List<DtoMetadata> getMetadataFields()
{
return dtomList;
}
/**
* Add bitstream id to delete contents file
* @param bitstreamId bitstream ID
*/
public void addUndoDeleteContents(UUID bitstreamId)
{
this.undoAddContents.add(bitstreamId);
}
/**
* Obtain item from DSpace based on handle
* This is the default implementation
* that uses the dc.identifier.uri metadatafield
* that contains the item handle as its value
* @param context DSpace Context
* @throws SQLException if database error
* @throws Exception if error
*/
private Item itemFromHandleInput(Context context)
throws SQLException, Exception
{
DtoMetadata dtom = getMetadataField("dc.identifier.uri");
if (dtom == null)
{
throw new Exception("No dc.identier.uri field found for handle");
}
this.addUndoMetadataField(dtom); //seed the undo list with the uri
String uri = dtom.value;
if (!uri.startsWith(ItemUpdate.HANDLE_PREFIX))
{
throw new Exception("dc.identifier.uri for item " + uri
+ " does not begin with prefix: " + ItemUpdate.HANDLE_PREFIX);
}
String handle = uri.substring(ItemUpdate.HANDLE_PREFIX.length());
DSpaceObject dso = handleService.resolveToObject(context, handle);
if (dso instanceof Item)
{
item = (Item) dso;
}
else
{
ItemUpdate.pr("Warning: item not instantiated");
throw new IllegalArgumentException("Item " + handle + " not instantiated.");
}
return item;
} }
/** /**
* Find and instantiate Item from the dublin_core.xml based * Getter for the DSpace item referenced in the archive
* on the specified itemField for the item identifier,
* *
* @return DSpace item
*/
public Item getItem() {
return item;
}
/**
* Getter for directory in archive on disk
*
* @return directory in archive
*/
public File getDirectory() {
return dir;
}
/**
* Getter for directory name in archive
*
* @return directory name in archive
*/
public String getDirectoryName() {
return dirname;
}
/**
* Add metadata field to undo list
*
* @param dtom DtoMetadata (represents metadata field)
*/
public void addUndoMetadataField(DtoMetadata dtom) {
this.undoDtomList.add(dtom);
}
/**
* Getter for list of metadata fields
*
* @return list of metadata fields
*/
public List<DtoMetadata> getMetadataFields() {
return dtomList;
}
/**
* Add bitstream id to delete contents file
*
* @param bitstreamId bitstream ID
*/
public void addUndoDeleteContents(UUID bitstreamId) {
this.undoAddContents.add(bitstreamId);
}
/**
* Obtain item from DSpace based on handle
* This is the default implementation
* that uses the dc.identifier.uri metadatafield
* that contains the item handle as its value
*
* @param context DSpace Context
* @throws SQLException if database error
* @throws Exception if error
*/
private Item itemFromHandleInput(Context context)
throws SQLException, Exception {
DtoMetadata dtom = getMetadataField("dc.identifier.uri");
if (dtom == null) {
throw new Exception("No dc.identier.uri field found for handle");
}
this.addUndoMetadataField(dtom); //seed the undo list with the uri
String uri = dtom.value;
if (!uri.startsWith(ItemUpdate.HANDLE_PREFIX)) {
throw new Exception("dc.identifier.uri for item " + uri
+ " does not begin with prefix: " + ItemUpdate.HANDLE_PREFIX);
}
String handle = uri.substring(ItemUpdate.HANDLE_PREFIX.length());
DSpaceObject dso = handleService.resolveToObject(context, handle);
if (dso instanceof Item) {
item = (Item) dso;
} else {
ItemUpdate.pr("Warning: item not instantiated");
throw new IllegalArgumentException("Item " + handle + " not instantiated.");
}
return item;
}
/**
* Find and instantiate Item from the dublin_core.xml based
* on the specified itemField for the item identifier,
* *
* @param context - the DSpace context * @param context - the DSpace context
* @param itemField - the compound form of the metadata element <schema>.<element>.<qualifier> * @param itemField - the compound form of the metadata element <schema>.<element>.<qualifier>
* @throws SQLException if database error * @throws SQLException if database error
* @throws Exception if error * @throws Exception if error
*/ */
private Item itemFromMetadataField(Context context, String itemField) private Item itemFromMetadataField(Context context, String itemField)
throws SQLException, AuthorizeException, Exception throws SQLException, AuthorizeException, Exception {
{ DtoMetadata dtom = getMetadataField(itemField);
DtoMetadata dtom = getMetadataField(itemField);
Item item = null; Item item = null;
if (dtom == null) if (dtom == null) {
{ throw new IllegalArgumentException("No field found for item identifier field: " + itemField);
throw new IllegalArgumentException("No field found for item identifier field: " + itemField); }
} ItemUpdate.prv("Metadata field to match for item: " + dtom.toString());
ItemUpdate.prv("Metadata field to match for item: " + dtom.toString());
this.addUndoMetadataField(dtom); //seed the undo list with the identifier field this.addUndoMetadataField(dtom); //seed the undo list with the identifier field
Iterator<Item> itr = itemService.findByMetadataField(context, dtom.schema, dtom.element, dtom.qualifier, dtom.value); Iterator<Item> itr = itemService
int count = 0; .findByMetadataField(context, dtom.schema, dtom.element, dtom.qualifier, dtom.value);
while (itr.hasNext()) int count = 0;
{ while (itr.hasNext()) {
item = itr.next(); item = itr.next();
count++; count++;
} }
ItemUpdate.prv("items matching = " + count ); ItemUpdate.prv("items matching = " + count);
if (count != 1) if (count != 1) {
{ throw new Exception("" + count + " items matching item identifier: " + dtom.value);
throw new Exception ("" + count + " items matching item identifier: " + dtom.value); }
}
return item; return item;
} }
/** /**
* Get DtoMetadata field * Get DtoMetadata field
*
* @param compoundForm compound form * @param compoundForm compound form
* @return DtoMetadata field * @return DtoMetadata field
*/ */
private DtoMetadata getMetadataField(String compoundForm) private DtoMetadata getMetadataField(String compoundForm) {
{ for (DtoMetadata dtom : dtomList) {
for (DtoMetadata dtom : dtomList) if (dtom.matches(compoundForm, false)) {
{ return dtom;
if (dtom.matches(compoundForm, false)) }
{ }
return dtom; return null;
}
}
return null;
} }
/** /**
* write undo directory and files to Disk in archive format * write undo directory and files to Disk in archive format
* *
* @param undoDir - the root directory of the undo archive * @param undoDir - the root directory of the undo archive
* @throws IOException if IO error * @throws IOException if IO error
* @throws ParserConfigurationException if config error * @throws ParserConfigurationException if config error
* @throws TransformerConfigurationException if transformer config error * @throws TransformerConfigurationException if transformer config error
* @throws TransformerException if transformer error * @throws TransformerException if transformer error
* @throws FileNotFoundException if file not found * @throws FileNotFoundException if file not found
*/ */
public void writeUndo(File undoDir) public void writeUndo(File undoDir)
throws IOException, ParserConfigurationException, TransformerConfigurationException, throws IOException, ParserConfigurationException, TransformerConfigurationException,
TransformerException, FileNotFoundException TransformerException, FileNotFoundException {
{ // create directory for item
// create directory for item File dir = new File(undoDir, dirname);
File dir = new File(undoDir, dirname); if (!dir.exists() && !dir.mkdir()) {
if (!dir.exists() && !dir.mkdir())
{
log.error("Unable to create undo directory"); log.error("Unable to create undo directory");
} }
OutputStream out = null; OutputStream out = null;
try try {
{
out = new FileOutputStream(new File(dir, "dublin_core.xml")); out = new FileOutputStream(new File(dir, "dublin_core.xml"));
Document doc = MetadataUtilities.writeDublinCore(getDocumentBuilder(), undoDtomList); Document doc = MetadataUtilities.writeDublinCore(getDocumentBuilder(), undoDtomList);
MetadataUtilities.writeDocument(doc, getTransformer(), out); MetadataUtilities.writeDocument(doc, getTransformer(), out);
// if undo has delete bitstream // if undo has delete bitstream
if (undoAddContents.size() > 0) if (undoAddContents.size() > 0) {
{
PrintWriter pw = null; PrintWriter pw = null;
try try {
{
File f = new File(dir, ItemUpdate.DELETE_CONTENTS_FILE); File f = new File(dir, ItemUpdate.DELETE_CONTENTS_FILE);
pw = new PrintWriter(new BufferedWriter(new FileWriter(f))); pw = new PrintWriter(new BufferedWriter(new FileWriter(f)));
for (UUID i : undoAddContents) for (UUID i : undoAddContents) {
{
pw.println(i); pw.println(i);
} }
} } finally {
finally
{
pw.close(); pw.close();
} }
} }
} } finally {
finally if (out != null) {
{
if (out != null)
{
out.close(); out.close();
} }
} }
} }
} //end class } //end class

View File

@@ -7,48 +7,63 @@
*/ */
package org.dspace.app.itemupdate; package org.dspace.app.itemupdate;
import org.apache.commons.cli.*; import java.io.BufferedWriter;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileWriter;
import java.io.FilenameFilter;
import java.io.IOException;
import java.io.PrintWriter;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.UUID;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.CommandLineParser;
import org.apache.commons.cli.HelpFormatter;
import org.apache.commons.cli.Option;
import org.apache.commons.cli.Options;
import org.apache.commons.cli.PosixParser;
import org.dspace.content.Item; import org.dspace.content.Item;
import org.dspace.content.factory.ContentServiceFactory; import org.dspace.content.factory.ContentServiceFactory;
import org.dspace.content.service.ItemService; import org.dspace.content.service.ItemService;
import org.dspace.core.ConfigurationManager;
import org.dspace.core.Context; import org.dspace.core.Context;
import org.dspace.eperson.EPerson; import org.dspace.eperson.EPerson;
import org.dspace.eperson.factory.EPersonServiceFactory; import org.dspace.eperson.factory.EPersonServiceFactory;
import org.dspace.eperson.service.EPersonService; import org.dspace.eperson.service.EPersonService;
import org.dspace.handle.factory.HandleServiceFactory;
import java.io.*; import org.dspace.handle.service.HandleService;
import java.util.*;
/** /**
* Provides some batch editing capabilities for items in DSpace:
* Metadata fields - Add, Delete
* Bitstreams - Add, Delete
* *
* Provides some batch editing capabilities for items in DSpace: * The design has been for compatibility with ItemImporter
* Metadata fields - Add, Delete * in the use of the DSpace archive format which is used to
* Bitstreams - Add, Delete * specify changes on a per item basis. The directory names
* to correspond to each item are arbitrary and will only be
* used for logging purposes. The reference to the item is
* from a required dc.identifier with the item handle to be
* included in the dublin_core.xml (or similar metadata) file.
* *
* The design has been for compatibility with ItemImporter * Any combination of these actions is permitted in a single run of this class
* in the use of the DSpace archive format which is used to * The order of actions is important when used in combination.
* specify changes on a per item basis. The directory names * It is the responsibility of the calling class (here, ItemUpdate)
* to correspond to each item are arbitrary and will only be * to register UpdateAction classes in the order to which they are
* used for logging purposes. The reference to the item is * to be performed.
* from a required dc.identifier with the item handle to be
* included in the dublin_core.xml (or similar metadata) file.
*
* Any combination of these actions is permitted in a single run of this class
* The order of actions is important when used in combination.
* It is the responsibility of the calling class (here, ItemUpdate)
* to register UpdateAction classes in the order to which they are
* to be performed.
* *
* *
* It is unfortunate that so much code needs to be borrowed * It is unfortunate that so much code needs to be borrowed
* from ItemImport as it is not reusable in private methods, etc. * from ItemImport as it is not reusable in private methods, etc.
* Some of this has been placed into the MetadataUtilities class * Some of this has been placed into the MetadataUtilities class
* for possible reuse elsewhere. * for possible reuse elsewhere.
*
* *
* @author W. Hays based on a conceptual design by R. Rodgers * @author W. Hays based on a conceptual design by R. Rodgers
*
*/ */
public class ItemUpdate { public class ItemUpdate {
@@ -64,32 +79,29 @@ public class ItemUpdate {
protected static final EPersonService epersonService = EPersonServiceFactory.getInstance().getEPersonService(); protected static final EPersonService epersonService = EPersonServiceFactory.getInstance().getEPersonService();
protected static final ItemService itemService = ContentServiceFactory.getInstance().getItemService(); protected static final ItemService itemService = ContentServiceFactory.getInstance().getItemService();
protected static final HandleService handleService = HandleServiceFactory.getInstance().getHandleService();
static static {
{
filterAliases.put("ORIGINAL", "org.dspace.app.itemupdate.OriginalBitstreamFilter"); filterAliases.put("ORIGINAL", "org.dspace.app.itemupdate.OriginalBitstreamFilter");
filterAliases.put("ORIGINAL_AND_DERIVATIVES", "org.dspace.app.itemupdate.OriginalWithDerivativesBitstreamFilter"); filterAliases
.put("ORIGINAL_AND_DERIVATIVES", "org.dspace.app.itemupdate.OriginalWithDerivativesBitstreamFilter");
filterAliases.put("TEXT", "org.dspace.app.itemupdate.DerivativeTextBitstreamFilter"); filterAliases.put("TEXT", "org.dspace.app.itemupdate.DerivativeTextBitstreamFilter");
filterAliases.put("THUMBNAIL", "org.dspace.app.itemupdate.ThumbnailBitstreamFilter"); filterAliases.put("THUMBNAIL", "org.dspace.app.itemupdate.ThumbnailBitstreamFilter");
} }
// File listing filter to check for folders // File listing filter to check for folders
static FilenameFilter directoryFilter = new FilenameFilter() static FilenameFilter directoryFilter = new FilenameFilter() {
{
@Override @Override
public boolean accept(File dir, String n) public boolean accept(File dir, String n) {
{
File f = new File(dir.getAbsolutePath() + File.separatorChar + n); File f = new File(dir.getAbsolutePath() + File.separatorChar + n);
return f.isDirectory(); return f.isDirectory();
} }
}; };
// File listing filter to check for files (not directories) // File listing filter to check for files (not directories)
static FilenameFilter fileFilter = new FilenameFilter() static FilenameFilter fileFilter = new FilenameFilter() {
{
@Override @Override
public boolean accept(File dir, String n) public boolean accept(File dir, String n) {
{
File f = new File(dir.getAbsolutePath() + File.separatorChar + n); File f = new File(dir.getAbsolutePath() + File.separatorChar + n);
return (f.isFile()); return (f.isFile());
} }
@@ -101,11 +113,9 @@ public class ItemUpdate {
protected String eperson; protected String eperson;
/** /**
*
* @param argv the command line arguments given * @param argv the command line arguments given
*/ */
public static void main(String[] argv) public static void main(String[] argv) {
{
// create an options object and populate it // create an options object and populate it
CommandLineParser parser = new PosixParser(); CommandLineParser parser = new PosixParser();
@@ -116,20 +126,23 @@ public class ItemUpdate {
options.addOption("s", "source", true, "root directory of source dspace archive "); options.addOption("s", "source", true, "root directory of source dspace archive ");
//actions on items //actions on items
options.addOption("a", "addmetadata", true, "add metadata specified for each item; multiples separated by semicolon ';'"); options.addOption("a", "addmetadata", true,
"add metadata specified for each item; multiples separated by semicolon ';'");
options.addOption("d", "deletemetadata", true, "delete metadata specified for each item"); options.addOption("d", "deletemetadata", true, "delete metadata specified for each item");
options.addOption("A", "addbitstreams", false, "add bitstreams as specified for each item"); options.addOption("A", "addbitstreams", false, "add bitstreams as specified for each item");
// extra work to get optional argument // extra work to get optional argument
Option delBitstreamOption = new Option("D", "deletebitstreams", true, "delete bitstreams as specified for each item"); Option delBitstreamOption = new Option("D", "deletebitstreams", true,
"delete bitstreams as specified for each item");
delBitstreamOption.setOptionalArg(true); delBitstreamOption.setOptionalArg(true);
delBitstreamOption.setArgName("BitstreamFilter"); delBitstreamOption.setArgName("BitstreamFilter");
options.addOption(delBitstreamOption); options.addOption(delBitstreamOption);
//other params //other params
options.addOption("e", "eperson", true, "email of eperson doing the update"); options.addOption("e", "eperson", true, "email of eperson doing the update");
options.addOption("i", "itemfield", true, "optional metadata field that containing item identifier; default is dc.identifier.uri"); options.addOption("i", "itemfield", true,
"optional metadata field that containing item identifier; default is dc.identifier.uri");
options.addOption("F", "filter-properties", true, "filter class name; only for deleting bitstream"); options.addOption("F", "filter-properties", true, "filter class name; only for deleting bitstream");
options.addOption("v", "verbose", false, "verbose logging"); options.addOption("v", "verbose", false, "verbose logging");
@@ -147,12 +160,10 @@ public class ItemUpdate {
Context context = null; Context context = null;
ItemUpdate iu = new ItemUpdate(); ItemUpdate iu = new ItemUpdate();
try try {
{
CommandLine line = parser.parse(options, argv); CommandLine line = parser.parse(options, argv);
if (line.hasOption('h')) if (line.hasOption('h')) {
{
HelpFormatter myhelp = new HelpFormatter(); HelpFormatter myhelp = new HelpFormatter();
myhelp.printHelp("ItemUpdate", options); myhelp.printHelp("ItemUpdate", options);
pr(""); pr("");
@@ -166,91 +177,79 @@ public class ItemUpdate {
System.exit(0); System.exit(0);
} }
if (line.hasOption('v')) if (line.hasOption('v')) {
{
verbose = true; verbose = true;
} }
if (line.hasOption('P')) if (line.hasOption('P')) {
{
alterProvenance = false; alterProvenance = false;
pr("Suppressing changes to Provenance field option"); pr("Suppressing changes to Provenance field option");
} }
iu.eperson = line.getOptionValue('e'); // db ID or email iu.eperson = line.getOptionValue('e'); // db ID or email
if (!line.hasOption('s')) // item specific changes from archive dir if (!line.hasOption('s')) { // item specific changes from archive dir
{
pr("Missing source archive option"); pr("Missing source archive option");
System.exit(1); System.exit(1);
} }
String sourcedir = line.getOptionValue('s'); String sourcedir = line.getOptionValue('s');
if (line.hasOption('t')) //test if (line.hasOption('t')) { //test
{
isTest = true; isTest = true;
pr("**Test Run** - not actually updating items."); pr("**Test Run** - not actually updating items.");
} }
if (line.hasOption('i')) if (line.hasOption('i')) {
{
itemField = line.getOptionValue('i'); itemField = line.getOptionValue('i');
} }
if (line.hasOption('d')) if (line.hasOption('d')) {
{
String[] targetFields = line.getOptionValues('d'); String[] targetFields = line.getOptionValues('d');
DeleteMetadataAction delMetadataAction = (DeleteMetadataAction) iu.actionMgr.getUpdateAction(DeleteMetadataAction.class); DeleteMetadataAction delMetadataAction = (DeleteMetadataAction) iu.actionMgr
.getUpdateAction(DeleteMetadataAction.class);
delMetadataAction.addTargetFields(targetFields); delMetadataAction.addTargetFields(targetFields);
//undo is an add //undo is an add
for (String field : targetFields) for (String field : targetFields) {
{
iu.undoActionList.add(" -a " + field + " "); iu.undoActionList.add(" -a " + field + " ");
} }
pr("Delete metadata for fields: "); pr("Delete metadata for fields: ");
for (String s : targetFields) for (String s : targetFields) {
{
pr(" " + s); pr(" " + s);
} }
} }
if (line.hasOption('a')) if (line.hasOption('a')) {
{
String[] targetFields = line.getOptionValues('a'); String[] targetFields = line.getOptionValues('a');
AddMetadataAction addMetadataAction = (AddMetadataAction) iu.actionMgr.getUpdateAction(AddMetadataAction.class); AddMetadataAction addMetadataAction = (AddMetadataAction) iu.actionMgr
.getUpdateAction(AddMetadataAction.class);
addMetadataAction.addTargetFields(targetFields); addMetadataAction.addTargetFields(targetFields);
//undo is a delete followed by an add of a replace record for target fields //undo is a delete followed by an add of a replace record for target fields
for (String field : targetFields) for (String field : targetFields) {
{
iu.undoActionList.add(" -d " + field + " "); iu.undoActionList.add(" -d " + field + " ");
} }
for (String field : targetFields) for (String field : targetFields) {
{
iu.undoActionList.add(" -a " + field + " "); iu.undoActionList.add(" -a " + field + " ");
} }
pr("Add metadata for fields: "); pr("Add metadata for fields: ");
for (String s : targetFields) for (String s : targetFields) {
{
pr(" " + s); pr(" " + s);
} }
} }
if (line.hasOption('D')) // undo not supported if (line.hasOption('D')) { // undo not supported
{
pr("Delete bitstreams "); pr("Delete bitstreams ");
String[] filterNames = line.getOptionValues('D'); String[] filterNames = line.getOptionValues('D');
if ((filterNames != null) && (filterNames.length > 1)) if ((filterNames != null) && (filterNames.length > 1)) {
{
pr("Error: Only one filter can be a used at a time."); pr("Error: Only one filter can be a used at a time.");
System.exit(1); System.exit(1);
} }
@@ -258,84 +257,71 @@ public class ItemUpdate {
String filterName = line.getOptionValue('D'); String filterName = line.getOptionValue('D');
pr("Filter argument: " + filterName); pr("Filter argument: " + filterName);
if (filterName == null) // indicates using delete_contents files if (filterName == null) { // indicates using delete_contents files
{ DeleteBitstreamsAction delAction = (DeleteBitstreamsAction) iu.actionMgr
DeleteBitstreamsAction delAction = (DeleteBitstreamsAction) iu.actionMgr.getUpdateAction(DeleteBitstreamsAction.class); .getUpdateAction(DeleteBitstreamsAction.class);
delAction.setAlterProvenance(alterProvenance); delAction.setAlterProvenance(alterProvenance);
} } else {
else
{
// check if param is on ALIAS list // check if param is on ALIAS list
String filterClassname = filterAliases.get(filterName); String filterClassname = filterAliases.get(filterName);
if (filterClassname == null) if (filterClassname == null) {
{
filterClassname = filterName; filterClassname = filterName;
} }
BitstreamFilter filter = null; BitstreamFilter filter = null;
try try {
{
Class<?> cfilter = Class.forName(filterClassname); Class<?> cfilter = Class.forName(filterClassname);
pr("BitstreamFilter class to instantiate: " + cfilter.toString()); pr("BitstreamFilter class to instantiate: " + cfilter.toString());
filter = (BitstreamFilter) cfilter.newInstance(); //unfortunate cast, an erasure consequence filter = (BitstreamFilter) cfilter.newInstance(); //unfortunate cast, an erasure consequence
} } catch (Exception e) {
catch(Exception e)
{
pr("Error: Failure instantiating bitstream filter class: " + filterClassname); pr("Error: Failure instantiating bitstream filter class: " + filterClassname);
System.exit(1); System.exit(1);
} }
String filterPropertiesName = line.getOptionValue('F'); String filterPropertiesName = line.getOptionValue('F');
if (filterPropertiesName != null) //not always required if (filterPropertiesName != null) { //not always required
{ try {
try
{
// TODO try multiple relative locations, e.g. source dir // TODO try multiple relative locations, e.g. source dir
if (!filterPropertiesName.startsWith("/")) if (!filterPropertiesName.startsWith("/")) {
{
filterPropertiesName = sourcedir + File.separator + filterPropertiesName; filterPropertiesName = sourcedir + File.separator + filterPropertiesName;
} }
filter.initProperties(filterPropertiesName); filter.initProperties(filterPropertiesName);
} } catch (Exception e) {
catch(Exception e) pr("Error: Failure finding properties file for bitstream filter class: " +
{ filterPropertiesName);
pr("Error: Failure finding properties file for bitstream filter class: " + filterPropertiesName);
System.exit(1); System.exit(1);
} }
} }
DeleteBitstreamsByFilterAction delAction = DeleteBitstreamsByFilterAction delAction =
(DeleteBitstreamsByFilterAction) iu.actionMgr.getUpdateAction(DeleteBitstreamsByFilterAction.class); (DeleteBitstreamsByFilterAction) iu.actionMgr
.getUpdateAction(DeleteBitstreamsByFilterAction.class);
delAction.setAlterProvenance(alterProvenance); delAction.setAlterProvenance(alterProvenance);
delAction.setBitstreamFilter(filter); delAction.setBitstreamFilter(filter);
//undo not supported //undo not supported
} }
} }
if (line.hasOption('A')) if (line.hasOption('A')) {
{
pr("Add bitstreams "); pr("Add bitstreams ");
AddBitstreamsAction addAction = (AddBitstreamsAction) iu.actionMgr.getUpdateAction(AddBitstreamsAction.class); AddBitstreamsAction addAction = (AddBitstreamsAction) iu.actionMgr
.getUpdateAction(AddBitstreamsAction.class);
addAction.setAlterProvenance(alterProvenance); addAction.setAlterProvenance(alterProvenance);
iu.undoActionList.add(" -D "); // delete_contents file will be written, no arg required iu.undoActionList.add(" -D "); // delete_contents file will be written, no arg required
} }
if (!iu.actionMgr.hasActions()) if (!iu.actionMgr.hasActions()) {
{
pr("Error - an action must be specified"); pr("Error - an action must be specified");
System.exit(1); System.exit(1);
} } else {
else
{
pr("Actions to be performed: "); pr("Actions to be performed: ");
for (UpdateAction ua : iu.actionMgr) for (UpdateAction ua : iu.actionMgr) {
{
pr(" " + ua.getClass().getName()); pr(" " + ua.getClass().getName());
} }
} }
@@ -346,36 +332,25 @@ public class ItemUpdate {
iu.setEPerson(context, iu.eperson); iu.setEPerson(context, iu.eperson);
context.turnOffAuthorisationSystem(); context.turnOffAuthorisationSystem();
HANDLE_PREFIX = ConfigurationManager.getProperty("handle.canonical.prefix"); HANDLE_PREFIX = handleService.getCanonicalPrefix();
if (HANDLE_PREFIX == null || HANDLE_PREFIX.length() == 0)
{
HANDLE_PREFIX = "http://hdl.handle.net/";
}
iu.processArchive(context, sourcedir, itemField, metadataIndexName, alterProvenance, isTest); iu.processArchive(context, sourcedir, itemField, metadataIndexName, alterProvenance, isTest);
context.complete(); // complete all transactions context.complete(); // complete all transactions
} } catch (Exception e) {
catch (Exception e) if (context != null && context.isValid()) {
{
if (context != null && context.isValid())
{
context.abort(); context.abort();
} }
e.printStackTrace(); e.printStackTrace();
pr(e.toString()); pr(e.toString());
status = 1; status = 1;
} } finally {
finally {
context.restoreAuthSystemState(); context.restoreAuthSystemState();
} }
if (isTest) if (isTest) {
{
pr("***End of Test Run***"); pr("***End of Test Run***");
} } else {
else
{
pr("End."); pr("End.");
} }
@@ -385,23 +360,21 @@ public class ItemUpdate {
/** /**
* process an archive * process an archive
* *
* @param context DSpace Context * @param context DSpace Context
* @param sourceDirPath source path * @param sourceDirPath source path
* @param itemField item field * @param itemField item field
* @param metadataIndexName index name * @param metadataIndexName index name
* @param alterProvenance whether to alter provenance * @param alterProvenance whether to alter provenance
* @param isTest test flag * @param isTest test flag
* @throws Exception if error * @throws Exception if error
*/ */
protected void processArchive(Context context, String sourceDirPath, String itemField, protected void processArchive(Context context, String sourceDirPath, String itemField,
String metadataIndexName, boolean alterProvenance, boolean isTest) String metadataIndexName, boolean alterProvenance, boolean isTest)
throws Exception throws Exception {
{
// open and process the source directory // open and process the source directory
File sourceDir = new File(sourceDirPath); File sourceDir = new File(sourceDirPath);
if ((sourceDir == null) || !sourceDir.exists() || !sourceDir.isDirectory()) if ((sourceDir == null) || !sourceDir.exists() || !sourceDir.isDirectory()) {
{
pr("Error, cannot open archive source directory " + sourceDirPath); pr("Error, cannot open archive source directory " + sourceDirPath);
throw new Exception("error with archive source directory " + sourceDirPath); throw new Exception("error with archive source directory " + sourceDirPath);
} }
@@ -412,89 +385,73 @@ public class ItemUpdate {
//Undo is suppressed to prevent undo of undo //Undo is suppressed to prevent undo of undo
boolean suppressUndo = false; boolean suppressUndo = false;
File fSuppressUndo = new File(sourceDir, SUPPRESS_UNDO_FILENAME); File fSuppressUndo = new File(sourceDir, SUPPRESS_UNDO_FILENAME);
if (fSuppressUndo.exists()) if (fSuppressUndo.exists()) {
{
suppressUndo = true; suppressUndo = true;
} }
File undoDir = null; //sibling directory of source archive File undoDir = null; //sibling directory of source archive
if (!suppressUndo && !isTest) if (!suppressUndo && !isTest) {
{
undoDir = initUndoArchive(sourceDir); undoDir = initUndoArchive(sourceDir);
} }
int itemCount = 0; int itemCount = 0;
int successItemCount = 0; int successItemCount = 0;
for (String dirname : dircontents) for (String dirname : dircontents) {
{
itemCount++; itemCount++;
pr(""); pr("");
pr("processing item " + dirname); pr("processing item " + dirname);
try try {
{
ItemArchive itarch = ItemArchive.create(context, new File(sourceDir, dirname), itemField); ItemArchive itarch = ItemArchive.create(context, new File(sourceDir, dirname), itemField);
for (UpdateAction action : actionMgr) for (UpdateAction action : actionMgr) {
{
pr("action: " + action.getClass().getName()); pr("action: " + action.getClass().getName());
action.execute(context, itarch, isTest, suppressUndo); action.execute(context, itarch, isTest, suppressUndo);
if (!isTest && !suppressUndo) if (!isTest && !suppressUndo) {
{
itarch.writeUndo(undoDir); itarch.writeUndo(undoDir);
} }
} }
if (!isTest) if (!isTest) {
{
Item item = itarch.getItem(); Item item = itarch.getItem();
itemService.update(context, item); //need to update before commit itemService.update(context, item); //need to update before commit
context.uncacheEntity(item);} context.uncacheEntity(item);
ItemUpdate.pr("Item " + dirname + " completed"); }
successItemCount++; ItemUpdate.pr("Item " + dirname + " completed");
} successItemCount++;
catch(Exception e) } catch (Exception e) {
{ pr("Exception processing item " + dirname + ": " + e.toString());
pr("Exception processing item " + dirname + ": " + e.toString());
e.printStackTrace(); e.printStackTrace();
} }
} }
if (!suppressUndo && !isTest) if (!suppressUndo && !isTest) {
{
StringBuilder sb = new StringBuilder("dsrun org.dspace.app.itemupdate.ItemUpdate "); StringBuilder sb = new StringBuilder("dsrun org.dspace.app.itemupdate.ItemUpdate ");
sb.append(" -e ").append(this.eperson); sb.append(" -e ").append(this.eperson);
sb.append(" -s ").append(undoDir); sb.append(" -s ").append(undoDir);
if (itemField != null) if (itemField != null) {
{
sb.append(" -i ").append(itemField); sb.append(" -i ").append(itemField);
} }
if (!alterProvenance) if (!alterProvenance) {
{
sb.append(" -P "); sb.append(" -P ");
} }
if (isTest) if (isTest) {
{
sb.append(" -t "); sb.append(" -t ");
} }
for (String actionOption : undoActionList) for (String actionOption : undoActionList) {
{
sb.append(actionOption); sb.append(actionOption);
} }
PrintWriter pw = null; PrintWriter pw = null;
try try {
{ File cmdFile = new File(undoDir.getParent(), undoDir.getName() + "_command.sh");
File cmdFile = new File (undoDir.getParent(), undoDir.getName() + "_command.sh");
pw = new PrintWriter(new BufferedWriter(new FileWriter(cmdFile))); pw = new PrintWriter(new BufferedWriter(new FileWriter(cmdFile)));
pw.println(sb.toString()); pw.println(sb.toString());
} } finally {
finally
{
pw.close(); pw.close();
} }
} }
@@ -512,41 +469,36 @@ public class ItemUpdate {
* @param sourceDir - the original source directory * @param sourceDir - the original source directory
* @return the directory of the undo archive * @return the directory of the undo archive
* @throws FileNotFoundException if file doesn't exist * @throws FileNotFoundException if file doesn't exist
* @throws IOException if IO error * @throws IOException if IO error
*/ */
protected File initUndoArchive(File sourceDir) protected File initUndoArchive(File sourceDir)
throws FileNotFoundException, IOException throws FileNotFoundException, IOException {
{
File parentDir = sourceDir.getCanonicalFile().getParentFile(); File parentDir = sourceDir.getCanonicalFile().getParentFile();
if (parentDir == null) if (parentDir == null) {
{ throw new FileNotFoundException(
throw new FileNotFoundException("Parent directory of archive directory not found; unable to write UndoArchive; no processing performed"); "Parent directory of archive directory not found; unable to write UndoArchive; no processing " +
"performed");
} }
String sourceDirName = sourceDir.getName(); String sourceDirName = sourceDir.getName();
int seqNo = 1; int seqNo = 1;
File undoDir = new File(parentDir, "undo_" + sourceDirName + "_" + seqNo); File undoDir = new File(parentDir, "undo_" + sourceDirName + "_" + seqNo);
while (undoDir.exists()) while (undoDir.exists()) {
{ undoDir = new File(parentDir, "undo_" + sourceDirName + "_" + ++seqNo); //increment
undoDir = new File(parentDir, "undo_" + sourceDirName+ "_" + ++seqNo); //increment
} }
// create root directory // create root directory
if (!undoDir.mkdir()) if (!undoDir.mkdir()) {
{
pr("ERROR creating Undo Archive directory " + undoDir.getCanonicalPath()); pr("ERROR creating Undo Archive directory " + undoDir.getCanonicalPath());
throw new IOException("ERROR creating Undo Archive directory " + undoDir.getCanonicalPath()); throw new IOException("ERROR creating Undo Archive directory " + undoDir.getCanonicalPath());
} }
//Undo is suppressed to prevent undo of undo //Undo is suppressed to prevent undo of undo
File fSuppressUndo = new File(undoDir, ItemUpdate.SUPPRESS_UNDO_FILENAME); File fSuppressUndo = new File(undoDir, ItemUpdate.SUPPRESS_UNDO_FILENAME);
try try {
{
fSuppressUndo.createNewFile(); fSuppressUndo.createNewFile();
} } catch (IOException e) {
catch(IOException e)
{
pr("ERROR creating Suppress Undo File " + e.toString()); pr("ERROR creating Suppress Undo File " + e.toString());
throw e; throw e;
} }
@@ -557,33 +509,29 @@ public class ItemUpdate {
/** /**
* Set EPerson doing import * Set EPerson doing import
*
* @param context DSpace Context * @param context DSpace Context
* @param eperson EPerson obj * @param eperson EPerson obj
* @throws Exception if error * @throws Exception if error
*/ */
protected void setEPerson(Context context, String eperson) protected void setEPerson(Context context, String eperson)
throws Exception throws Exception {
{ if (eperson == null) {
if (eperson == null)
{
pr("Error - an eperson to do the importing must be specified"); pr("Error - an eperson to do the importing must be specified");
pr(" (run with -h flag for details)"); pr(" (run with -h flag for details)");
throw new Exception("EPerson not specified."); } throw new Exception("EPerson not specified.");
}
EPerson myEPerson = null; EPerson myEPerson = null;
if (eperson.indexOf('@') != -1) if (eperson.indexOf('@') != -1) {
{
// @ sign, must be an email // @ sign, must be an email
myEPerson = epersonService.findByEmail(context, eperson); myEPerson = epersonService.findByEmail(context, eperson);
} } else {
else
{
myEPerson = epersonService.find(context, UUID.fromString(eperson)); myEPerson = epersonService.find(context, UUID.fromString(eperson));
} }
if (myEPerson == null) if (myEPerson == null) {
{
pr("Error, eperson cannot be found: " + eperson); pr("Error, eperson cannot be found: " + eperson);
throw new Exception("Invalid EPerson"); throw new Exception("Invalid EPerson");
} }
@@ -598,19 +546,17 @@ public class ItemUpdate {
* *
* @param s String * @param s String
*/ */
static void pr(String s) static void pr(String s) {
{
System.out.println(s); System.out.println(s);
} }
/** /**
* print if verbose flag is set * print if verbose flag is set
*
* @param s String * @param s String
*/ */
static void prv(String s) static void prv(String s) {
{ if (verbose) {
if (verbose)
{
System.out.println(s); System.out.println(s);
} }
} }

View File

@@ -11,14 +11,13 @@ import java.io.BufferedReader;
import java.io.File; import java.io.File;
import java.io.FileNotFoundException; import java.io.FileNotFoundException;
import java.io.FileReader; import java.io.FileReader;
import java.io.InputStream;
import java.io.IOException; import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream; import java.io.OutputStream;
import java.sql.SQLException; import java.sql.SQLException;
import java.text.ParseException; import java.text.ParseException;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.List; import java.util.List;
import javax.xml.parsers.DocumentBuilder; import javax.xml.parsers.DocumentBuilder;
import javax.xml.parsers.ParserConfigurationException; import javax.xml.parsers.ParserConfigurationException;
import javax.xml.transform.Result; import javax.xml.transform.Result;
@@ -29,12 +28,17 @@ import javax.xml.transform.TransformerException;
import javax.xml.transform.dom.DOMSource; import javax.xml.transform.dom.DOMSource;
import javax.xml.transform.stream.StreamResult; import javax.xml.transform.stream.StreamResult;
import org.apache.commons.lang.StringUtils; import org.apache.commons.lang3.StringUtils;
import org.apache.xpath.XPathAPI; import org.apache.xpath.XPathAPI;
import org.dspace.authorize.AuthorizeException;
import org.dspace.content.*; import org.dspace.content.Item;
import org.dspace.content.MetadataField;
import org.dspace.content.MetadataSchema;
import org.dspace.content.MetadataSchemaEnum;
import org.dspace.content.MetadataValue;
import org.dspace.content.factory.ContentServiceFactory; import org.dspace.content.factory.ContentServiceFactory;
import org.dspace.content.service.ItemService; import org.dspace.content.service.ItemService;
import org.dspace.core.ConfigurationManager;
import org.dspace.core.Context; import org.dspace.core.Context;
import org.w3c.dom.Document; import org.w3c.dom.Document;
import org.w3c.dom.Element; import org.w3c.dom.Element;
@@ -43,311 +47,265 @@ import org.w3c.dom.Node;
import org.w3c.dom.NodeList; import org.w3c.dom.NodeList;
import org.xml.sax.SAXException; import org.xml.sax.SAXException;
import org.dspace.authorize.AuthorizeException;
import org.dspace.core.ConfigurationManager;
/** /**
* Miscellaneous methods for metadata handling that build on the API * Miscellaneous methods for metadata handling that build on the API
* which might have general utility outside of the specific use * which might have general utility outside of the specific use
* in context in ItemUpdate. * in context in ItemUpdate.
*
* The XML methods were based on those in ItemImport
*
* *
* The XML methods were based on those in ItemImport
*/ */
public class MetadataUtilities { public class MetadataUtilities {
protected static final ItemService itemService = ContentServiceFactory.getInstance().getItemService(); protected static final ItemService itemService = ContentServiceFactory.getInstance().getItemService();
/** /**
* * Default constructor
* Working around Item API to delete a value-specific Metadatum
For a given element/qualifier/lang:
get all DCValues
clear (i.e. delete) all of these DCValues
* add them back, minus the one to actually delete
*
* @param context DSpace Context
* @param item Item Object
* @param dtom metadata field
* @param isLanguageStrict whether strict or not
* @throws SQLException if database error
* @return true if metadata field is found with matching value and was deleted
*/ */
public static boolean deleteMetadataByValue(Context context, Item item, DtoMetadata dtom, boolean isLanguageStrict) throws SQLException { private MetadataUtilities() { }
List<MetadataValue> ar = null;
if (isLanguageStrict) /**
{ // get all for given type * Working around Item API to delete a value-specific Metadatum
ar = itemService.getMetadata(item, dtom.schema, dtom.element, dtom.qualifier, dtom.language); * For a given element/qualifier/lang:
} * get all DCValues
else * clear (i.e. delete) all of these DCValues
{ * add them back, minus the one to actually delete
ar = itemService.getMetadata(item, dtom.schema, dtom.element, dtom.qualifier, Item.ANY); *
} * @param context DSpace Context
* @param item Item Object
* @param dtom metadata field
* @param isLanguageStrict whether strict or not
* @return true if metadata field is found with matching value and was deleted
* @throws SQLException if database error
*/
public static boolean deleteMetadataByValue(Context context, Item item, DtoMetadata dtom, boolean isLanguageStrict)
throws SQLException {
List<MetadataValue> ar = null;
boolean found = false; if (isLanguageStrict) { // get all for given type
ar = itemService.getMetadata(item, dtom.schema, dtom.element, dtom.qualifier, dtom.language);
} else {
ar = itemService.getMetadata(item, dtom.schema, dtom.element, dtom.qualifier, Item.ANY);
}
//build new set minus the one to delete boolean found = false;
List<String> vals = new ArrayList<String>();
for (MetadataValue dcv : ar)
{
if (dcv.getValue().equals(dtom.value))
{
found = true;
}
else
{
vals.add(dcv.getValue());
}
}
if (found) //remove all for given type ??synchronize this block?? //build new set minus the one to delete
{ List<String> vals = new ArrayList<String>();
if (isLanguageStrict) for (MetadataValue dcv : ar) {
{ if (dcv.getValue().equals(dtom.value)) {
found = true;
} else {
vals.add(dcv.getValue());
}
}
if (found) { //remove all for given type ??synchronize this block??
if (isLanguageStrict) {
itemService.clearMetadata(context, item, dtom.schema, dtom.element, dtom.qualifier, dtom.language); itemService.clearMetadata(context, item, dtom.schema, dtom.element, dtom.qualifier, dtom.language);
} } else {
else
{
itemService.clearMetadata(context, item, dtom.schema, dtom.element, dtom.qualifier, Item.ANY); itemService.clearMetadata(context, item, dtom.schema, dtom.element, dtom.qualifier, Item.ANY);
} }
itemService.addMetadata(context, item, dtom.schema, dtom.element, dtom.qualifier, dtom.language, vals); itemService.addMetadata(context, item, dtom.schema, dtom.element, dtom.qualifier, dtom.language, vals);
} }
return found; return found;
} }
/** /**
* Append text to value metadata field to item * Append text to value metadata field to item
* *
* @param context DSpace Context * @param context DSpace Context
* @param item DSpace Item * @param item DSpace Item
* @param dtom metadata field * @param dtom metadata field
* @param isLanguageStrict if strict * @param isLanguageStrict if strict
* @param textToAppend text to append * @param textToAppend text to append
* @throws IllegalArgumentException - When target metadata field is not found * @throws IllegalArgumentException - When target metadata field is not found
* @throws SQLException if database error * @throws SQLException if database error
*/ */
public static void appendMetadata(Context context, Item item, DtoMetadata dtom, boolean isLanguageStrict, public static void appendMetadata(Context context, Item item, DtoMetadata dtom, boolean isLanguageStrict,
String textToAppend) String textToAppend)
throws IllegalArgumentException, SQLException { throws IllegalArgumentException, SQLException {
List<MetadataValue> ar = null; List<MetadataValue> ar = null;
// get all values for given element/qualifier // get all values for given element/qualifier
if (isLanguageStrict) // get all for given element/qualifier if (isLanguageStrict) { // get all for given element/qualifier
{ ar = itemService.getMetadata(item, dtom.schema, dtom.element, dtom.qualifier, dtom.language);
ar = itemService.getMetadata(item, dtom.schema, dtom.element, dtom.qualifier, dtom.language); } else {
} ar = itemService.getMetadata(item, dtom.schema, dtom.element, dtom.qualifier, Item.ANY);
else }
{
ar = itemService.getMetadata(item, dtom.schema, dtom.element, dtom.qualifier, Item.ANY);
}
if (ar.size() == 0) if (ar.size() == 0) {
{ throw new IllegalArgumentException("Metadata to append to not found");
throw new IllegalArgumentException("Metadata to append to not found"); }
}
int idx = 0; //index of field to change int idx = 0; //index of field to change
if (ar.size() > 1) //need to pick one, can't be sure it's the last one if (ar.size() > 1) { //need to pick one, can't be sure it's the last one
{ // TODO maybe get highest id ?
// TODO maybe get highest id ? }
}
//build new set minus the one to delete //build new set minus the one to delete
List<String> vals = new ArrayList<String>(); List<String> vals = new ArrayList<String>();
for (int i=0; i < ar.size(); i++) for (int i = 0; i < ar.size(); i++) {
{ if (i == idx) {
if (i == idx) vals.add(ar.get(i).getValue() + textToAppend);
{ } else {
vals.add(ar.get(i).getValue() + textToAppend); vals.add(ar.get(i).getValue());
} }
else }
{
vals.add(ar.get(i).getValue());
}
}
if (isLanguageStrict) if (isLanguageStrict) {
{
itemService.clearMetadata(context, item, dtom.schema, dtom.element, dtom.qualifier, dtom.language); itemService.clearMetadata(context, item, dtom.schema, dtom.element, dtom.qualifier, dtom.language);
} } else {
else
{
itemService.clearMetadata(context, item, dtom.schema, dtom.element, dtom.qualifier, Item.ANY); itemService.clearMetadata(context, item, dtom.schema, dtom.element, dtom.qualifier, Item.ANY);
} }
itemService.addMetadata(context, item, dtom.schema, dtom.element, dtom.qualifier, dtom.language, vals); itemService.addMetadata(context, item, dtom.schema, dtom.element, dtom.qualifier, dtom.language, vals);
} }
/** /**
* Modification of method from ItemImporter.loadDublinCore * Modification of method from ItemImporter.loadDublinCore
* as a Factory method * as a Factory method
*
* @param docBuilder DocumentBuilder
* @param is - InputStream of dublin_core.xml
* @return list of DtoMetadata representing the metadata fields relating to an Item
* @throws SQLException if database error
* @throws IOException if IO error
* @throws ParserConfigurationException if parser config error
* @throws SAXException if XML error
* @throws TransformerException if transformer error
* @throws AuthorizeException if authorization error
*/
public static List<DtoMetadata> loadDublinCore(DocumentBuilder docBuilder, InputStream is)
throws SQLException, IOException, ParserConfigurationException,
SAXException, TransformerException, AuthorizeException
{
Document document = docBuilder.parse(is);
List<DtoMetadata> dtomList = new ArrayList<DtoMetadata>();
// Get the schema, for backward compatibility we will default to the
// dublin core schema if the schema name is not available in the import file
String schema = null;
NodeList metadata = XPathAPI.selectNodeList(document, "/dublin_core");
Node schemaAttr = metadata.item(0).getAttributes().getNamedItem("schema");
if (schemaAttr == null)
{
schema = MetadataSchema.DC_SCHEMA;
}
else
{
schema = schemaAttr.getNodeValue();
}
// Get the nodes corresponding to formats
NodeList dcNodes = XPathAPI.selectNodeList(document, "/dublin_core/dcvalue");
for (int i = 0; i < dcNodes.getLength(); i++)
{
Node n = dcNodes.item(i);
String value = getStringValue(n).trim();
// compensate for empty value getting read as "null", which won't display
if (value == null)
{
value = "";
}
String element = getAttributeValue(n, "element");
if (element != null)
{
element = element.trim();
}
String qualifier = getAttributeValue(n, "qualifier");
if (qualifier != null)
{
qualifier = qualifier.trim();
}
String language = getAttributeValue(n, "language");
if (language != null)
{
language = language.trim();
}
if ("none".equals(qualifier) || "".equals(qualifier))
{
qualifier = null;
}
// a goofy default, but consistent with DSpace treatment elsewhere
if (language == null)
{
language = "en";
}
else if ("".equals(language))
{
language = ConfigurationManager.getProperty("default.language");
}
DtoMetadata dtom = DtoMetadata.create(schema, element, qualifier, language, value);
ItemUpdate.pr(dtom.toString());
dtomList.add(dtom);
}
return dtomList;
}
/**
* Write dublin_core.xml
* *
* @param docBuilder DocumentBuilder * @param docBuilder DocumentBuilder
* @param dtomList List of metadata fields * @param is - InputStream of dublin_core.xml
* @return xml document * @return list of DtoMetadata representing the metadata fields relating to an Item
* @throws SQLException if database error
* @throws IOException if IO error
* @throws ParserConfigurationException if parser config error * @throws ParserConfigurationException if parser config error
* @throws TransformerConfigurationException if transformer config error * @throws SAXException if XML error
* @throws TransformerException if transformer error * @throws TransformerException if transformer error
* @throws AuthorizeException if authorization error
*/ */
public static Document writeDublinCore(DocumentBuilder docBuilder, List<DtoMetadata> dtomList) public static List<DtoMetadata> loadDublinCore(DocumentBuilder docBuilder, InputStream is)
throws ParserConfigurationException, TransformerConfigurationException, TransformerException throws SQLException, IOException, ParserConfigurationException,
{ SAXException, TransformerException, AuthorizeException {
Document document = docBuilder.parse(is);
List<DtoMetadata> dtomList = new ArrayList<DtoMetadata>();
// Get the schema, for backward compatibility we will default to the
// dublin core schema if the schema name is not available in the import file
String schema = null;
NodeList metadata = XPathAPI.selectNodeList(document, "/dublin_core");
Node schemaAttr = metadata.item(0).getAttributes().getNamedItem("schema");
if (schemaAttr == null) {
schema = MetadataSchemaEnum.DC.getName();
} else {
schema = schemaAttr.getNodeValue();
}
// Get the nodes corresponding to formats
NodeList dcNodes = XPathAPI.selectNodeList(document, "/dublin_core/dcvalue");
for (int i = 0; i < dcNodes.getLength(); i++) {
Node n = dcNodes.item(i);
String value = getStringValue(n).trim();
// compensate for empty value getting read as "null", which won't display
if (value == null) {
value = "";
}
String element = getAttributeValue(n, "element");
if (element != null) {
element = element.trim();
}
String qualifier = getAttributeValue(n, "qualifier");
if (qualifier != null) {
qualifier = qualifier.trim();
}
String language = getAttributeValue(n, "language");
if (language != null) {
language = language.trim();
}
if ("none".equals(qualifier) || "".equals(qualifier)) {
qualifier = null;
}
// a goofy default, but consistent with DSpace treatment elsewhere
if (language == null) {
language = "en";
} else if ("".equals(language)) {
language = ConfigurationManager.getProperty("default.language");
}
DtoMetadata dtom = DtoMetadata.create(schema, element, qualifier, language, value);
ItemUpdate.pr(dtom.toString());
dtomList.add(dtom);
}
return dtomList;
}
/**
* Write dublin_core.xml
*
* @param docBuilder DocumentBuilder
* @param dtomList List of metadata fields
* @return xml document
* @throws ParserConfigurationException if parser config error
* @throws TransformerConfigurationException if transformer config error
* @throws TransformerException if transformer error
*/
public static Document writeDublinCore(DocumentBuilder docBuilder, List<DtoMetadata> dtomList)
throws ParserConfigurationException, TransformerConfigurationException, TransformerException {
Document doc = docBuilder.newDocument(); Document doc = docBuilder.newDocument();
Element root = doc.createElement("dublin_core"); Element root = doc.createElement("dublin_core");
doc.appendChild(root); doc.appendChild(root);
for (DtoMetadata dtom : dtomList) for (DtoMetadata dtom : dtomList) {
{ Element mel = doc.createElement("dcvalue");
Element mel = doc.createElement("dcvalue"); mel.setAttribute("element", dtom.element);
mel.setAttribute("element", dtom.element); if (dtom.qualifier == null) {
if (dtom.qualifier == null) mel.setAttribute("qualifier", "none");
{ } else {
mel.setAttribute("qualifier", "none"); mel.setAttribute("qualifier", dtom.qualifier);
} }
else
{
mel.setAttribute("qualifier", dtom.qualifier);
}
if (StringUtils.isEmpty(dtom.language)) if (StringUtils.isEmpty(dtom.language)) {
{ mel.setAttribute("language", "en");
mel.setAttribute("language", "en"); } else {
} mel.setAttribute("language", dtom.language);
else }
{ mel.setTextContent(dtom.value);
mel.setAttribute("language", dtom.language); root.appendChild(mel);
}
mel.setTextContent(dtom.value);
root.appendChild(mel);
} }
return doc; return doc;
} }
/** /**
* write xml document to output stream * write xml document to output stream
* @param doc XML Document *
* @param doc XML Document
* @param transformer XML Transformer * @param transformer XML Transformer
* @param out OutputStream * @param out OutputStream
* @throws IOException if IO Error * @throws IOException if IO Error
* @throws TransformerException if Transformer error * @throws TransformerException if Transformer error
*/ */
public static void writeDocument(Document doc, Transformer transformer, OutputStream out) public static void writeDocument(Document doc, Transformer transformer, OutputStream out)
throws IOException, TransformerException throws IOException, TransformerException {
{
Source src = new DOMSource(doc); Source src = new DOMSource(doc);
Result dest = new StreamResult(out); Result dest = new StreamResult(out);
transformer.transform(src, dest); transformer.transform(src, dest);
} }
// XML utility methods // XML utility methods
/** /**
* Lookup an attribute from a DOM node. * Lookup an attribute from a DOM node.
* @param n Node *
* @param n Node
* @param name name * @param name name
* @return attribute value * @return attribute value
*/ */
private static String getAttributeValue(Node n, String name) private static String getAttributeValue(Node n, String name) {
{
NamedNodeMap nm = n.getAttributes(); NamedNodeMap nm = n.getAttributes();
for (int i = 0; i < nm.getLength(); i++) for (int i = 0; i < nm.getLength(); i++) {
{
Node node = nm.item(i); Node node = nm.item(i);
if (name.equals(node.getNodeName())) if (name.equals(node.getNodeName())) {
{
return node.getNodeValue(); return node.getNodeValue();
} }
} }
@@ -357,19 +315,17 @@ public class MetadataUtilities {
/** /**
* Return the String value of a Node. * Return the String value of a Node.
*
* @param node node * @param node node
* @return string value * @return string value
*/ */
private static String getStringValue(Node node) private static String getStringValue(Node node) {
{
String value = node.getNodeValue(); String value = node.getNodeValue();
if (node.hasChildNodes()) if (node.hasChildNodes()) {
{
Node first = node.getFirstChild(); Node first = node.getFirstChild();
if (first.getNodeType() == Node.TEXT_NODE) if (first.getNodeType() == Node.TEXT_NODE) {
{
return first.getNodeValue(); return first.getNodeValue();
} }
} }
@@ -384,151 +340,127 @@ public class MetadataUtilities {
* @param f file * @param f file
* @return list of ContentsEntry * @return list of ContentsEntry
* @throws FileNotFoundException if file doesn't exist * @throws FileNotFoundException if file doesn't exist
* @throws IOException if IO error * @throws IOException if IO error
* @throws ParseException if parse error * @throws ParseException if parse error
*/ */
public static List<ContentsEntry> readContentsFile(File f) public static List<ContentsEntry> readContentsFile(File f)
throws FileNotFoundException, IOException, ParseException throws FileNotFoundException, IOException, ParseException {
{ List<ContentsEntry> list = new ArrayList<ContentsEntry>();
List<ContentsEntry> list = new ArrayList<ContentsEntry>();
BufferedReader in = null; BufferedReader in = null;
try try {
{ in = new BufferedReader(new FileReader(f));
in = new BufferedReader(new FileReader(f)); String line = null;
String line = null;
while ((line = in.readLine()) != null) while ((line = in.readLine()) != null) {
{ line = line.trim();
line = line.trim(); if ("".equals(line)) {
if ("".equals(line)) continue;
{ }
continue; ItemUpdate.pr("Contents entry: " + line);
} list.add(ContentsEntry.parse(line));
ItemUpdate.pr("Contents entry: " + line); }
list.add(ContentsEntry.parse(line)); } finally {
} try {
} in.close();
finally } catch (IOException e) {
{ //skip
try }
{ }
in.close();
}
catch(IOException e)
{
//skip
}
}
return list; return list;
} }
/** /**
*
* @param f file * @param f file
* @return list of lines as strings * @return list of lines as strings
* @throws FileNotFoundException if file doesn't exist * @throws FileNotFoundException if file doesn't exist
* @throws IOException if IO Error * @throws IOException if IO Error
*/ */
public static List<String> readDeleteContentsFile(File f) public static List<String> readDeleteContentsFile(File f)
throws FileNotFoundException, IOException throws FileNotFoundException, IOException {
{ List<String> list = new ArrayList<>();
List<String> list = new ArrayList<>();
BufferedReader in = null; BufferedReader in = null;
try try {
{ in = new BufferedReader(new FileReader(f));
in = new BufferedReader(new FileReader(f)); String line = null;
String line = null;
while ((line = in.readLine()) != null) while ((line = in.readLine()) != null) {
{ line = line.trim();
line = line.trim(); if ("".equals(line)) {
if ("".equals(line)) continue;
{ }
continue;
}
list.add(line); list.add(line);
} }
} } finally {
finally try {
{ in.close();
try } catch (IOException e) {
{ //skip
in.close(); }
} }
catch(IOException e)
{
//skip
}
}
return list; return list;
} }
/** /**
* Get display of Metadatum * Get display of Metadatum
* *
* @param dcv MetadataValue * @param dcv MetadataValue
* @return string displaying elements of the Metadatum * @return string displaying elements of the Metadatum
*/ */
public static String getDCValueString(MetadataValue dcv) public static String getDCValueString(MetadataValue dcv) {
{
MetadataField metadataField = dcv.getMetadataField(); MetadataField metadataField = dcv.getMetadataField();
MetadataSchema metadataSchema = metadataField.getMetadataSchema(); MetadataSchema metadataSchema = metadataField.getMetadataSchema();
return "schema: " + metadataSchema.getName() + "; element: " + metadataField.getElement() + "; qualifier: " + metadataField.getQualifier() + return "schema: " + metadataSchema.getName() + "; element: " + metadataField
"; language: " + dcv.getLanguage() + "; value: " + dcv.getValue(); .getElement() + "; qualifier: " + metadataField.getQualifier() +
"; language: " + dcv.getLanguage() + "; value: " + dcv.getValue();
} }
/** /**
* Return compound form of a metadata field (i.e. schema.element.qualifier) * Return compound form of a metadata field (i.e. schema.element.qualifier)
* @param schema schema *
* @param element element * @param schema schema
* @param qualifier qualifier * @param element element
* @return a String representation of the two- or three-part form of a metadata element * @param qualifier qualifier
* e.g. dc.identifier.uri * @return a String representation of the two- or three-part form of a metadata element
*/ * e.g. dc.identifier.uri
public static String getCompoundForm(String schema, String element, String qualifier) */
{ public static String getCompoundForm(String schema, String element, String qualifier) {
StringBuilder sb = new StringBuilder(); StringBuilder sb = new StringBuilder();
sb.append(schema).append(".").append(element); sb.append(schema).append(".").append(element);
if (qualifier != null) if (qualifier != null) {
{ sb.append(".").append(qualifier);
sb.append(".").append(qualifier); }
} return sb.toString();
return sb.toString(); }
}
/** /**
* Parses metadata field given in the form {@code <schema>.<element>[.<qualifier>|.*]} * Parses metadata field given in the form {@code <schema>.<element>[.<qualifier>|.*]}
* checks for correct number of elements (2 or 3) and for empty strings * checks for correct number of elements (2 or 3) and for empty strings
* *
* @param compoundForm compound form of metadata field * @param compoundForm compound form of metadata field
* @return String Array * @return String Array
* @throws ParseException if validity checks fail * @throws ParseException if validity checks fail
* */
*/ public static String[] parseCompoundForm(String compoundForm)
public static String[] parseCompoundForm(String compoundForm) throws ParseException {
throws ParseException String[] ar = compoundForm.split("\\s*\\.\\s*"); //trim ends
{
String[] ar = compoundForm.split("\\s*\\.\\s*"); //trim ends
if ("".equals(ar[0])) if ("".equals(ar[0])) {
{ throw new ParseException("schema is empty string: " + compoundForm, 0);
throw new ParseException("schema is empty string: " + compoundForm, 0); }
}
if ((ar.length < 2) || (ar.length > 3) || "".equals(ar[1])) if ((ar.length < 2) || (ar.length > 3) || "".equals(ar[1])) {
{ throw new ParseException("element is malformed or empty string: " + compoundForm, 0);
throw new ParseException("element is malformed or empty string: " + compoundForm, 0); }
}
return ar; return ar;
} }
} }

View File

@@ -14,44 +14,36 @@ import org.dspace.content.Bitstream;
import org.dspace.content.Bundle; import org.dspace.content.Bundle;
/** /**
* Filter all bitstreams in the ORIGINAL bundle * Filter all bitstreams in the ORIGINAL bundle
* Also delete all derivative bitstreams, i.e. * Also delete all derivative bitstreams, i.e.
* all bitstreams in the TEXT and THUMBNAIL bundles * all bitstreams in the TEXT and THUMBNAIL bundles
*/ */
public class OriginalBitstreamFilter extends BitstreamFilterByBundleName public class OriginalBitstreamFilter extends BitstreamFilterByBundleName {
{ public OriginalBitstreamFilter() {
public OriginalBitstreamFilter() //empty
{ }
//empty
}
/** /**
* Tests bitstreams for containment in an ORIGINAL bundle * Tests bitstreams for containment in an ORIGINAL bundle
* @param bitstream Bitstream *
* @return true if the bitstream is in the ORIGINAL bundle * @param bitstream Bitstream
* * @return true if the bitstream is in the ORIGINAL bundle
* @throws BitstreamFilterException if filter error * @throws BitstreamFilterException if filter error
*/ */
@Override @Override
public boolean accept(Bitstream bitstream) public boolean accept(Bitstream bitstream)
throws BitstreamFilterException throws BitstreamFilterException {
{ try {
try List<Bundle> bundles = bitstream.getBundles();
{ for (Bundle bundle : bundles) {
List<Bundle> bundles = bitstream.getBundles(); if (bundle.getName().equals("ORIGINAL")) {
for (Bundle bundle : bundles) return true;
{ }
if (bundle.getName().equals("ORIGINAL")) }
{ } catch (SQLException e) {
return true; throw new BitstreamFilterException(e);
} }
} return false;
} }
catch(SQLException e)
{
throw new BitstreamFilterException(e);
}
return false;
}
} }

View File

@@ -14,49 +14,40 @@ import org.dspace.content.Bitstream;
import org.dspace.content.Bundle; import org.dspace.content.Bundle;
/** /**
* Filter all bitstreams in the ORIGINAL bundle * Filter all bitstreams in the ORIGINAL bundle
* Also delete all derivative bitstreams, i.e. * Also delete all derivative bitstreams, i.e.
* all bitstreams in the TEXT and THUMBNAIL bundles * all bitstreams in the TEXT and THUMBNAIL bundles
*/ */
public class OriginalWithDerivativesBitstreamFilter extends BitstreamFilter public class OriginalWithDerivativesBitstreamFilter extends BitstreamFilter {
{ protected String[] bundlesToEmpty = {"ORIGINAL", "TEXT", "THUMBNAIL"};
protected String[] bundlesToEmpty = { "ORIGINAL", "TEXT", "THUMBNAIL" };
public OriginalWithDerivativesBitstreamFilter() public OriginalWithDerivativesBitstreamFilter() {
{ //empty
//empty }
}
/** /**
* Tests bitstream for membership in specified bundles (ORIGINAL, TEXT, THUMBNAIL) * Tests bitstream for membership in specified bundles (ORIGINAL, TEXT, THUMBNAIL)
* *
* @param bitstream Bitstream * @param bitstream Bitstream
* @throws BitstreamFilterException if error * @return true if bitstream is in specified bundles
* @return true if bitstream is in specified bundles * @throws BitstreamFilterException if error
*/ */
@Override @Override
public boolean accept(Bitstream bitstream) public boolean accept(Bitstream bitstream)
throws BitstreamFilterException throws BitstreamFilterException {
{ try {
try List<Bundle> bundles = bitstream.getBundles();
{ for (Bundle b : bundles) {
List<Bundle> bundles = bitstream.getBundles(); for (String bn : bundlesToEmpty) {
for (Bundle b : bundles) if (b.getName().equals(bn)) {
{ return true;
for (String bn : bundlesToEmpty) }
{ }
if (b.getName().equals(bn)) }
{ } catch (SQLException e) {
return true; throw new BitstreamFilterException(e);
} }
} return false;
} }
}
catch(SQLException e)
{
throw new BitstreamFilterException(e);
}
return false;
}
} }

View File

@@ -10,15 +10,13 @@ package org.dspace.app.itemupdate;
import java.util.Properties; import java.util.Properties;
/** /**
* Bitstream filter targetting the THUMBNAIL bundle * Bitstream filter targetting the THUMBNAIL bundle
*
*/ */
public class ThumbnailBitstreamFilter extends BitstreamFilterByBundleName { public class ThumbnailBitstreamFilter extends BitstreamFilterByBundleName {
public ThumbnailBitstreamFilter() public ThumbnailBitstreamFilter() {
{ props = new Properties();
props = new Properties(); props.setProperty("bundle", "THUMBNAIL");
props.setProperty("bundle", "THUMBNAIL"); }
}
} }

View File

@@ -12,24 +12,22 @@ import org.dspace.content.service.ItemService;
import org.dspace.core.Context; import org.dspace.core.Context;
/** /**
* Interface for actions to update an item * Interface for actions to update an item
*
*/ */
public interface UpdateAction public interface UpdateAction {
{
public ItemService itemService = ContentServiceFactory.getInstance().getItemService(); public ItemService itemService = ContentServiceFactory.getInstance().getItemService();
/** /**
* Action to update item * Action to update item
* *
* @param context DSpace context * @param context DSpace context
* @param itarch item archive * @param itarch item archive
* @param isTest test flag * @param isTest test flag
* @param suppressUndo undo flag * @param suppressUndo undo flag
* @throws Exception if error * @throws Exception if error
*/ */
public void execute(Context context, ItemArchive itarch, boolean isTest, boolean suppressUndo) public void execute(Context context, ItemArchive itarch, boolean isTest, boolean suppressUndo)
throws Exception; throws Exception;
} }

View File

@@ -12,36 +12,32 @@ import org.dspace.content.service.BitstreamService;
import org.dspace.content.service.BundleService; import org.dspace.content.service.BundleService;
/** /**
* Base class for Bitstream actions * Base class for Bitstream actions
*
*
*/ */
public abstract class UpdateBitstreamsAction implements UpdateAction { public abstract class UpdateBitstreamsAction implements UpdateAction {
protected boolean alterProvenance = true; protected boolean alterProvenance = true;
protected BundleService bundleService = ContentServiceFactory.getInstance().getBundleService(); protected BundleService bundleService = ContentServiceFactory.getInstance().getBundleService();
protected BitstreamService bitstreamService = ContentServiceFactory.getInstance().getBitstreamService(); protected BitstreamService bitstreamService = ContentServiceFactory.getInstance().getBitstreamService();
/** /**
* Set variable to indicate that the dc.description.provenance field may * Set variable to indicate that the dc.description.provenance field may
* be changed as a result of Bitstream changes by ItemUpdate * be changed as a result of Bitstream changes by ItemUpdate
* @param alterProvenance whether to alter provenance *
*/ * @param alterProvenance whether to alter provenance
public void setAlterProvenance(boolean alterProvenance) */
{ public void setAlterProvenance(boolean alterProvenance) {
this.alterProvenance = alterProvenance; this.alterProvenance = alterProvenance;
} }
/** /**
* * @return boolean value to indicate whether the dc.description.provenance field may
* @return boolean value to indicate whether the dc.description.provenance field may * be changed as a result of Bitstream changes by ItemUpdate
* be changed as a result of Bitstream changes by ItemUpdate */
*/ public boolean getAlterProvenance() {
public boolean getAlterProvenance() return alterProvenance;
{ }
return alterProvenance;
}
} }

View File

@@ -11,60 +11,57 @@ import java.util.HashSet;
import java.util.Set; import java.util.Set;
/** /**
* This abstract subclass for metadata actions * This abstract subclass for metadata actions
* maintains a collection for the target metadata fields * maintains a collection for the target metadata fields
* expressed as a string in the compound notation ( {@code <schema>.<element>.<qualifier>} ) * expressed as a string in the compound notation ( {@code <schema>.<element>.<qualifier>} )
* on which to apply the action when the method execute is called. * on which to apply the action when the method execute is called.
*
* Implemented as a Set to avoid problems with duplicates
*
* *
* Implemented as a Set to avoid problems with duplicates
*/ */
public abstract class UpdateMetadataAction implements UpdateAction { public abstract class UpdateMetadataAction implements UpdateAction {
protected Set<String> targetFields = new HashSet<String>(); protected Set<String> targetFields = new HashSet<String>();
/** /**
* Get target fields * Get target fields
* *
* @return set of fields to update * @return set of fields to update
*/ */
public Set<String> getTargetFields() { public Set<String> getTargetFields() {
return targetFields; return targetFields;
} }
/** /**
* Set target fields * Set target fields
* *
* @param targetFields Set of target fields to update * @param targetFields Set of target fields to update
*/ */
public void addTargetFields(Set<String> targetFields) { public void addTargetFields(Set<String> targetFields) {
for (String tf : targetFields) for (String tf : targetFields) {
{ this.targetFields.add(tf);
this.targetFields.add(tf); }
}
} }
/** /**
* Add array of target fields to update * Add array of target fields to update
* @param targetFields array of target fields to update *
*/ * @param targetFields array of target fields to update
public void addTargetFields(String[] targetFields) { */
for (String tf : targetFields) public void addTargetFields(String[] targetFields) {
{ for (String tf : targetFields) {
this.targetFields.add(tf); this.targetFields.add(tf);
} }
} }
/** /**
* Add single field to update * Add single field to update
* *
* @param targetField target field to update * @param targetField target field to update
*/ */
public void addTargetField(String targetField) { public void addTargetField(String targetField) {
this.targetFields.add(targetField); this.targetFields.add(targetField);
} }
} }

View File

@@ -15,29 +15,29 @@ import java.io.Reader;
import java.io.StreamTokenizer; import java.io.StreamTokenizer;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.List; import java.util.List;
import org.jdom.Document; import org.jdom.Document;
/** /**
*
* @author mwood * @author mwood
*/ */
public class CommandRunner public class CommandRunner {
{
/**
* Default constructor
*/
private CommandRunner() { }
/** /**
*
* @param args the command line arguments given * @param args the command line arguments given
* @throws IOException if IO error * @throws IOException if IO error
* @throws FileNotFoundException if file doesn't exist * @throws FileNotFoundException if file doesn't exist
*/ */
public static void main(String[] args) public static void main(String[] args)
throws FileNotFoundException, IOException throws FileNotFoundException, IOException {
{ if (args.length > 0) {
if (args.length > 0)
{
runManyCommands(args[0]); runManyCommands(args[0]);
} } else {
else
{
runManyCommands("-"); runManyCommands("-");
} }
// There is no sensible way to use the status returned by runManyCommands(). // There is no sensible way to use the status returned by runManyCommands().
@@ -54,19 +54,15 @@ public class CommandRunner
* *
* @param script the file of command lines to be executed. * @param script the file of command lines to be executed.
* @return status code * @return status code
* @throws IOException if IO error * @throws IOException if IO error
* @throws FileNotFoundException if file doesn't exist * @throws FileNotFoundException if file doesn't exist
*/ */
static int runManyCommands(String script) static int runManyCommands(String script)
throws FileNotFoundException, IOException throws FileNotFoundException, IOException {
{
Reader input; Reader input;
if ("-".equals(script)) if ("-".equals(script)) {
{
input = new InputStreamReader(System.in); input = new InputStreamReader(System.in);
} } else {
else
{
input = new FileReader(script); input = new FileReader(script);
} }
@@ -89,22 +85,16 @@ public class CommandRunner
int status = 0; int status = 0;
List<String> tokens = new ArrayList<String>(); List<String> tokens = new ArrayList<String>();
Document commandConfigs = ScriptLauncher.getConfig(); Document commandConfigs = ScriptLauncher.getConfig();
while (StreamTokenizer.TT_EOF != tokenizer.nextToken()) while (StreamTokenizer.TT_EOF != tokenizer.nextToken()) {
{ if (StreamTokenizer.TT_EOL == tokenizer.ttype) {
if (StreamTokenizer.TT_EOL == tokenizer.ttype) if (tokens.size() > 0) {
{
if (tokens.size() > 0)
{
status = ScriptLauncher.runOneCommand(commandConfigs, tokens.toArray(new String[tokens.size()])); status = ScriptLauncher.runOneCommand(commandConfigs, tokens.toArray(new String[tokens.size()]));
if (status > 0) if (status > 0) {
{
break; break;
} }
tokens.clear(); tokens.clear();
} }
} } else {
else
{
tokens.add(tokenizer.sval); tokens.add(tokenizer.sval);
} }
} }

View File

@@ -12,6 +12,15 @@ import java.io.IOException;
import java.lang.reflect.Method; import java.lang.reflect.Method;
import java.util.List; import java.util.List;
import java.util.TreeMap; import java.util.TreeMap;
import org.apache.commons.cli.ParseException;
import org.apache.log4j.Logger;
import org.dspace.scripts.DSpaceRunnable;
import org.dspace.scripts.configuration.ScriptConfiguration;
import org.dspace.scripts.factory.ScriptServiceFactory;
import org.dspace.scripts.handler.DSpaceRunnableHandler;
import org.dspace.scripts.handler.impl.CommandLineDSpaceRunnableHandler;
import org.dspace.scripts.service.ScriptService;
import org.dspace.servicemanager.DSpaceKernelImpl; import org.dspace.servicemanager.DSpaceKernelImpl;
import org.dspace.servicemanager.DSpaceKernelInit; import org.dspace.servicemanager.DSpaceKernelInit;
import org.dspace.services.RequestService; import org.dspace.services.RequestService;
@@ -25,38 +34,41 @@ import org.jdom.input.SAXBuilder;
* @author Stuart Lewis * @author Stuart Lewis
* @author Mark Diggory * @author Mark Diggory
*/ */
public class ScriptLauncher public class ScriptLauncher {
{
/** The service manager kernel */ private static final Logger log = Logger.getLogger(ScriptLauncher.class);
/**
* The service manager kernel
*/
private static transient DSpaceKernelImpl kernelImpl; private static transient DSpaceKernelImpl kernelImpl;
/**
* Default constructor
*/
private ScriptLauncher() {
}
/** /**
* Execute the DSpace script launcher * Execute the DSpace script launcher
* *
* @param args Any parameters required to be passed to the scripts it executes * @param args Any parameters required to be passed to the scripts it executes
* @throws IOException if IO error * @throws IOException if IO error
* @throws FileNotFoundException if file doesn't exist * @throws FileNotFoundException if file doesn't exist
*/ */
public static void main(String[] args) public static void main(String[] args)
throws FileNotFoundException, IOException throws FileNotFoundException, IOException, IllegalAccessException, InstantiationException {
{
// Initialise the service manager kernel // Initialise the service manager kernel
try try {
{
kernelImpl = DSpaceKernelInit.getKernel(null); kernelImpl = DSpaceKernelInit.getKernel(null);
if (!kernelImpl.isRunning()) if (!kernelImpl.isRunning()) {
{
kernelImpl.start(); kernelImpl.start();
} }
} catch (Exception e) } catch (Exception e) {
{
// Failed to start so destroy it and log and throw an exception // Failed to start so destroy it and log and throw an exception
try try {
{
kernelImpl.destroy(); kernelImpl.destroy();
} } catch (Exception e1) {
catch (Exception e1)
{
// Nothing to do // Nothing to do
} }
String message = "Failure during kernel init: " + e.getMessage(); String message = "Failure during kernel init: " + e.getMessage();
@@ -69,50 +81,98 @@ public class ScriptLauncher
Document commandConfigs = getConfig(); Document commandConfigs = getConfig();
// Check that there is at least one argument (if not display command options) // Check that there is at least one argument (if not display command options)
if (args.length < 1) if (args.length < 1) {
{
System.err.println("You must provide at least one command argument"); System.err.println("You must provide at least one command argument");
display(commandConfigs); display(commandConfigs);
System.exit(1); System.exit(1);
} }
// Look up command in the configuration, and execute. // Look up command in the configuration, and execute.
int status;
status = runOneCommand(commandConfigs, args); CommandLineDSpaceRunnableHandler commandLineDSpaceRunnableHandler = new CommandLineDSpaceRunnableHandler();
int status = handleScript(args, commandConfigs, commandLineDSpaceRunnableHandler, kernelImpl);
// Destroy the service kernel if it is still alive // Destroy the service kernel if it is still alive
if (kernelImpl != null) if (kernelImpl != null) {
{
kernelImpl.destroy(); kernelImpl.destroy();
kernelImpl = null; kernelImpl = null;
} }
System.exit(status); System.exit(status);
}
/**
* This method will take the arguments from a commandline input and it'll find the script that the first argument
* refers to and it'll execute this script.
* It can return a 1 or a 0 depending on whether the script failed or passed respectively
* @param args The arguments for the script and the script as first one in the array
* @param commandConfigs The Document
* @param dSpaceRunnableHandler The DSpaceRunnableHandler for this execution
* @param kernelImpl The relevant DSpaceKernelImpl
* @return A 1 or 0 depending on whether the script failed or passed respectively
*/
public static int handleScript(String[] args, Document commandConfigs,
DSpaceRunnableHandler dSpaceRunnableHandler,
DSpaceKernelImpl kernelImpl) throws InstantiationException, IllegalAccessException {
int status;
ScriptService scriptService = ScriptServiceFactory.getInstance().getScriptService();
ScriptConfiguration scriptConfiguration = scriptService.getScriptConfiguration(args[0]);
DSpaceRunnable script = null;
if (scriptConfiguration != null) {
script = scriptService.createDSpaceRunnableForScriptConfiguration(scriptConfiguration);
}
if (script != null) {
status = executeScript(args, dSpaceRunnableHandler, script);
} else {
status = runOneCommand(commandConfigs, args, kernelImpl);
}
return status;
}
/**
* This method will simply execute the script
* @param args The arguments of the script with the script name as first place in the array
* @param dSpaceRunnableHandler The relevant DSpaceRunnableHandler
* @param script The script to be executed
* @return A 1 or 0 depending on whether the script failed or passed respectively
*/
private static int executeScript(String[] args, DSpaceRunnableHandler dSpaceRunnableHandler,
DSpaceRunnable script) {
try {
script.initialize(args, dSpaceRunnableHandler, null);
script.run();
return 0;
} catch (ParseException e) {
script.printHelp();
e.printStackTrace();
return 1;
}
}
protected static int runOneCommand(Document commandConfigs, String[] args) {
return runOneCommand(commandConfigs, args, kernelImpl);
} }
/** /**
* Recognize and execute a single command. * Recognize and execute a single command.
* *
* @param doc Document * @param commandConfigs Document
* @param args the command line arguments given * @param args the command line arguments given
*/ */
static int runOneCommand(Document commandConfigs, String[] args) protected static int runOneCommand(Document commandConfigs, String[] args, DSpaceKernelImpl kernelImpl) {
{
String request = args[0]; String request = args[0];
Element root = commandConfigs.getRootElement(); Element root = commandConfigs.getRootElement();
List<Element> commands = root.getChildren("command"); List<Element> commands = root.getChildren("command");
Element command = null; Element command = null;
for (Element candidate : commands) for (Element candidate : commands) {
{ if (request.equalsIgnoreCase(candidate.getChild("name").getValue())) {
if (request.equalsIgnoreCase(candidate.getChild("name").getValue()))
{
command = candidate; command = candidate;
break; break;
} }
} }
if (null == command) if (null == command) {
{
// The command wasn't found // The command wasn't found
System.err.println("Command not found: " + args[0]); System.err.println("Command not found: " + args[0]);
display(commandConfigs); display(commandConfigs);
@@ -121,33 +181,26 @@ public class ScriptLauncher
// Run each step // Run each step
List<Element> steps = command.getChildren("step"); List<Element> steps = command.getChildren("step");
for (Element step : steps) for (Element step : steps) {
{
// Instantiate the class // Instantiate the class
Class target = null; Class target = null;
// Is it the special case 'dsrun' where the user provides the class name? // Is it the special case 'dsrun' where the user provides the class name?
String className; String className;
if ("dsrun".equals(request)) if ("dsrun".equals(request)) {
{ if (args.length < 2) {
if (args.length < 2)
{
System.err.println("Error in launcher.xml: Missing class name"); System.err.println("Error in launcher.xml: Missing class name");
return 1; return 1;
} }
className = args[1]; className = args[1];
} } else {
else {
className = step.getChild("class").getValue(); className = step.getChild("class").getValue();
} }
try try {
{
target = Class.forName(className, target = Class.forName(className,
true, true,
Thread.currentThread().getContextClassLoader()); Thread.currentThread().getContextClassLoader());
} } catch (ClassNotFoundException e) {
catch (ClassNotFoundException e)
{
System.err.println("Error in launcher.xml: Invalid class name: " + className); System.err.println("Error in launcher.xml: Invalid class name: " + className);
return 1; return 1;
} }
@@ -158,26 +211,20 @@ public class ScriptLauncher
Class[] argTypes = {useargs.getClass()}; Class[] argTypes = {useargs.getClass()};
boolean passargs = true; boolean passargs = true;
if ((step.getAttribute("passuserargs") != null) && if ((step.getAttribute("passuserargs") != null) &&
("false".equalsIgnoreCase(step.getAttribute("passuserargs").getValue()))) ("false".equalsIgnoreCase(step.getAttribute("passuserargs").getValue()))) {
{
passargs = false; passargs = false;
} }
if ((args.length == 1) || (("dsrun".equals(request)) && (args.length == 2)) || (!passargs)) if ((args.length == 1) || (("dsrun".equals(request)) && (args.length == 2)) || (!passargs)) {
{
useargs = new String[0]; useargs = new String[0];
} } else {
else
{
// The number of arguments to ignore // The number of arguments to ignore
// If dsrun is the command, ignore the next, as it is the class name not an arg // If dsrun is the command, ignore the next, as it is the class name not an arg
int x = 1; int x = 1;
if ("dsrun".equals(request)) if ("dsrun".equals(request)) {
{
x = 2; x = 2;
} }
String[] argsnew = new String[useargs.length - x]; String[] argsnew = new String[useargs.length - x];
for (int i = x; i < useargs.length; i++) for (int i = x; i < useargs.length; i++) {
{
argsnew[i - x] = useargs[i]; argsnew[i - x] = useargs[i];
} }
useargs = argsnew; useargs = argsnew;
@@ -185,16 +232,13 @@ public class ScriptLauncher
// Add any extra properties // Add any extra properties
List<Element> bits = step.getChildren("argument"); List<Element> bits = step.getChildren("argument");
if (step.getChild("argument") != null) if (step.getChild("argument") != null) {
{
String[] argsnew = new String[useargs.length + bits.size()]; String[] argsnew = new String[useargs.length + bits.size()];
int i = 0; int i = 0;
for (Element arg : bits) for (Element arg : bits) {
{
argsnew[i++] = arg.getValue(); argsnew[i++] = arg.getValue();
} }
for (; i < bits.size() + useargs.length; i++) for (; i < bits.size() + useargs.length; i++) {
{
argsnew[i] = useargs[i - bits.size()]; argsnew[i] = useargs[i - bits.size()];
} }
useargs = argsnew; useargs = argsnew;
@@ -202,11 +246,10 @@ public class ScriptLauncher
// Establish the request service startup // Establish the request service startup
RequestService requestService = kernelImpl.getServiceManager().getServiceByName( RequestService requestService = kernelImpl.getServiceManager().getServiceByName(
RequestService.class.getName(), RequestService.class); RequestService.class.getName(), RequestService.class);
if (requestService == null) if (requestService == null) {
{
throw new IllegalStateException( throw new IllegalStateException(
"Could not get the DSpace RequestService to start the request transaction"); "Could not get the DSpace RequestService to start the request transaction");
} }
// Establish a request related to the current session // Establish a request related to the current session
@@ -214,26 +257,23 @@ public class ScriptLauncher
requestService.startRequest(); requestService.startRequest();
// Run the main() method // Run the main() method
try try {
{
Object[] arguments = {useargs}; Object[] arguments = {useargs};
// Useful for debugging, so left in the code... // Useful for debugging, so left in the code...
/**System.out.print("About to execute: " + className); /**System.out.print("About to execute: " + className);
for (String param : useargs) for (String param : useargs)
{ {
System.out.print(" " + param); System.out.print(" " + param);
} }
System.out.println("");**/ System.out.println("");**/
Method main = target.getMethod("main", argTypes); Method main = target.getMethod("main", argTypes);
main.invoke(null, arguments); main.invoke(null, arguments);
// ensure we close out the request (happy request) // ensure we close out the request (happy request)
requestService.endRequest(null); requestService.endRequest(null);
} } catch (Exception e) {
catch (Exception e)
{
// Failure occurred in the request so we destroy it // Failure occurred in the request so we destroy it
requestService.endRequest(e); requestService.endRequest(e);
@@ -254,20 +294,20 @@ public class ScriptLauncher
* *
* @return The XML configuration file Document * @return The XML configuration file Document
*/ */
protected static Document getConfig() protected static Document getConfig() {
{ return getConfig(kernelImpl);
}
public static Document getConfig(DSpaceKernelImpl kernelImpl) {
// Load the launcher configuration file // Load the launcher configuration file
String config = kernelImpl.getConfigurationService().getProperty("dspace.dir") + String config = kernelImpl.getConfigurationService().getProperty("dspace.dir") +
System.getProperty("file.separator") + "config" + System.getProperty("file.separator") + "config" +
System.getProperty("file.separator") + "launcher.xml"; System.getProperty("file.separator") + "launcher.xml";
SAXBuilder saxBuilder = new SAXBuilder(); SAXBuilder saxBuilder = new SAXBuilder();
Document doc = null; Document doc = null;
try try {
{
doc = saxBuilder.build(config); doc = saxBuilder.build(config);
} } catch (Exception e) {
catch (Exception e)
{
System.err.println("Unable to load the launcher configuration file: [dspace]/config/launcher.xml"); System.err.println("Unable to load the launcher configuration file: [dspace]/config/launcher.xml");
System.err.println(e.getMessage()); System.err.println(e.getMessage());
e.printStackTrace(); e.printStackTrace();
@@ -278,10 +318,10 @@ public class ScriptLauncher
/** /**
* Display the commands that the current launcher config file knows about * Display the commands that the current launcher config file knows about
*
* @param commandConfigs configs as Document * @param commandConfigs configs as Document
*/ */
private static void display(Document commandConfigs) private static void display(Document commandConfigs) {
{
// List all command elements // List all command elements
List<Element> commands = commandConfigs.getRootElement().getChildren("command"); List<Element> commands = commandConfigs.getRootElement().getChildren("command");
@@ -289,17 +329,15 @@ public class ScriptLauncher
// We cannot just use commands.sort() because it tries to remove and // We cannot just use commands.sort() because it tries to remove and
// reinsert Elements within other Elements, and that doesn't work. // reinsert Elements within other Elements, and that doesn't work.
TreeMap<String, Element> sortedCommands = new TreeMap<>(); TreeMap<String, Element> sortedCommands = new TreeMap<>();
for (Element command : commands) for (Element command : commands) {
{
sortedCommands.put(command.getChild("name").getValue(), command); sortedCommands.put(command.getChild("name").getValue(), command);
} }
// Display the sorted list // Display the sorted list
System.out.println("Usage: dspace [command-name] {parameters}"); System.out.println("Usage: dspace [command-name] {parameters}");
for (Element command : sortedCommands.values()) for (Element command : sortedCommands.values()) {
{
System.out.println(" - " + command.getChild("name").getValue() + System.out.println(" - " + command.getChild("name").getValue() +
": " + command.getChild("description").getValue()); ": " + command.getChild("description").getValue());
} }
} }
} }

View File

@@ -7,12 +7,12 @@
*/ */
package org.dspace.app.mediafilter; package org.dspace.app.mediafilter;
import java.awt.image.BufferedImage;
import java.awt.Color; import java.awt.Color;
import java.awt.Font; import java.awt.Font;
import java.awt.FontMetrics; import java.awt.FontMetrics;
import java.awt.Graphics2D; import java.awt.Graphics2D;
import java.awt.Rectangle; import java.awt.Rectangle;
import java.awt.image.BufferedImage;
/** /**
* Class to attach a footer to an image using ImageMagick. * Class to attach a footer to an image using ImageMagick.
@@ -20,143 +20,117 @@ import java.awt.Rectangle;
* This version of the code is basically Ninh's but reorganised a little. Used with permission. * This version of the code is basically Ninh's but reorganised a little. Used with permission.
*/ */
public class Brand public class Brand {
{ private int brandWidth;
private int brandWidth; private int brandHeight;
private int brandHeight; private Font font;
private Font font; private int xOffset;
private int xOffset;
/** /**
* Constructor to set up footer image attributes. * Constructor to set up footer image attributes.
* *
* @param brandWidth length of the footer in pixels * @param brandWidth length of the footer in pixels
* @param brandHeight height of the footer in pixels * @param brandHeight height of the footer in pixels
* @param font font to use for text on the footer * @param font font to use for text on the footer
* @param xOffset number of pixels text should be indented from left-hand side of footer * @param xOffset number of pixels text should be indented from left-hand side of footer
* */
*/ public Brand(int brandWidth,
public Brand(int brandWidth, int brandHeight,
int brandHeight, Font font,
Font font, int xOffset) {
int xOffset) this.brandWidth = brandWidth;
{ this.brandHeight = brandHeight;
this.brandWidth = brandWidth; this.font = font;
this.brandHeight = brandHeight; this.xOffset = xOffset;
this.font = font; }
this.xOffset = xOffset;
}
/** /**
* Create the brand image * Create the brand image
* *
* @param brandLeftText text that should appear in the bottom left of the image * @param brandLeftText text that should appear in the bottom left of the image
* @param shortLeftText abbreviated form of brandLeftText that will be substituted if * @param shortLeftText abbreviated form of brandLeftText that will be substituted if
* the image is resized such that brandLeftText will not fit. <code>null</code> if not * the image is resized such that brandLeftText will not fit. <code>null</code> if not
* required * required
* @param brandRightText text that should appear in the bottom right of the image * @param brandRightText text that should appear in the bottom right of the image
* * @return BufferedImage a BufferedImage object describing the brand image file
* @return BufferedImage a BufferedImage object describing the brand image file */
*/ public BufferedImage create(String brandLeftText,
public BufferedImage create(String brandLeftText, String shortLeftText,
String shortLeftText, String brandRightText) {
String brandRightText) BrandText[] allBrandText = null;
{
BrandText[] allBrandText = null;
BufferedImage brandImage = BufferedImage brandImage =
new BufferedImage(brandWidth, brandHeight, BufferedImage.TYPE_INT_RGB); new BufferedImage(brandWidth, brandHeight, BufferedImage.TYPE_INT_RGB);
if (brandWidth >= 350) if (brandWidth >= 350) {
{ allBrandText = new BrandText[] {
allBrandText = new BrandText[] new BrandText(BrandText.BL, brandLeftText),
{ new BrandText(BrandText.BR, brandRightText)
new BrandText(BrandText.BL, brandLeftText), };
new BrandText(BrandText.BR, brandRightText) } else if (brandWidth >= 190) {
}; allBrandText = new BrandText[] {
} new BrandText(BrandText.BL, shortLeftText),
else if (brandWidth >= 190) new BrandText(BrandText.BR, brandRightText)
{ };
allBrandText = new BrandText[] } else {
{ allBrandText = new BrandText[] {
new BrandText(BrandText.BL, shortLeftText), new BrandText(BrandText.BR, brandRightText)
new BrandText(BrandText.BR, brandRightText) };
}; }
}
else
{
allBrandText = new BrandText[]
{
new BrandText(BrandText.BR, brandRightText)
};
}
if (allBrandText != null && allBrandText.length > 0) if (allBrandText != null && allBrandText.length > 0) {
{ for (int i = 0; i < allBrandText.length; ++i) {
for (int i = 0; i < allBrandText.length; ++i) drawImage(brandImage, allBrandText[i]);
{ }
drawImage(brandImage, allBrandText[i]); }
}
}
return brandImage; return brandImage;
} }
/** /**
* do the text placements and preparatory work for the brand image generation * do the text placements and preparatory work for the brand image generation
* *
* @param brandImage a BufferedImage object where the image is created * @param brandImage a BufferedImage object where the image is created
* @param identifier and Identifier object describing what text is to be placed in what * @param identifier and Identifier object describing what text is to be placed in what
* position within the brand * position within the brand
*/ */
private void drawImage(BufferedImage brandImage, private void drawImage(BufferedImage brandImage,
BrandText brandText) BrandText brandText) {
{ int imgWidth = brandImage.getWidth();
int imgWidth = brandImage.getWidth(); int imgHeight = brandImage.getHeight();
int imgHeight = brandImage.getHeight();
int bx, by, tx, ty, bWidth, bHeight; Graphics2D g2 = brandImage.createGraphics();
g2.setFont(font);
FontMetrics fm = g2.getFontMetrics();
Graphics2D g2 = brandImage.createGraphics(); int bWidth = fm.stringWidth(brandText.getText()) + xOffset * 2 + 1;
g2.setFont(font); int bHeight = fm.getHeight();
FontMetrics fm = g2.getFontMetrics();
int bx = 0;
int by = 0;
bWidth = fm.stringWidth(brandText.getText()) + xOffset * 2 + 1; if (brandText.getLocation().equals(BrandText.TL)) {
bHeight = fm.getHeight(); bx = 0;
by = 0;
} else if (brandText.getLocation().equals(BrandText.TR)) {
bx = imgWidth - bWidth;
by = 0;
} else if (brandText.getLocation().equals(BrandText.BL)) {
bx = 0;
by = imgHeight - bHeight;
} else if (brandText.getLocation().equals(BrandText.BR)) {
bx = imgWidth - bWidth;
by = imgHeight - bHeight;
}
bx = 0; Rectangle box = new Rectangle(bx, by, bWidth, bHeight);
by = 0; int tx = bx + xOffset;
int ty = by + fm.getAscent();
if (brandText.getLocation().equals(BrandText.TL)) g2.setColor(Color.black);
{ g2.fill(box);
bx = 0; g2.setColor(Color.white);
by = 0; g2.drawString(brandText.getText(), tx, ty);
} }
else if (brandText.getLocation().equals(BrandText.TR))
{
bx = imgWidth - bWidth;
by = 0;
}
else if (brandText.getLocation().equals(BrandText.BL))
{
bx = 0;
by = imgHeight - bHeight;
}
else if (brandText.getLocation().equals(BrandText.BR))
{
bx = imgWidth - bWidth;
by = imgHeight - bHeight;
}
Rectangle box = new Rectangle(bx, by, bWidth, bHeight);
tx = bx + xOffset;
ty = by + fm.getAscent();
g2.setColor(Color.black);
g2.fill(box);
g2.setColor(Color.white);
g2.drawString(brandText.getText(), tx, ty);
}
} }

View File

@@ -13,73 +13,75 @@ package org.dspace.app.mediafilter;
* This is a copy of Picture Australia's PiObj class re-organised with methods. * This is a copy of Picture Australia's PiObj class re-organised with methods.
* Thanks to Ninh Nguyen at the National Library for providing the original source. * Thanks to Ninh Nguyen at the National Library for providing the original source.
*/ */
class BrandText class BrandText {
{ /**
/** Bottom Left */ * Bottom Left
public static final String BL = "bl"; */
/** Bottom Right */ public static final String BL = "bl";
public static final String BR = "br"; /**
/** Top Left */ * Bottom Right
public static final String TL = "tl"; */
/** Top Right */ public static final String BR = "br";
public static final String TR = "tr"; /**
* Top Left
*/
public static final String TL = "tl";
/**
* Top Right
*/
public static final String TR = "tr";
private String location; private String location;
private String text; private String text;
/** /**
* Constructor for an Identifier object containing a text string and * Constructor for an Identifier object containing a text string and
* its location within a rectangular area. * its location within a rectangular area.
* *
* @param location one of the class location constants e.g. <code>Identifier.BL</code> * @param location one of the class location constants e.g. <code>Identifier.BL</code>
* @param the text associated with the location * @param the text associated with the location
*/ */
public BrandText(String location, String text) public BrandText(String location, String text) {
{ this.location = location;
this.location = location; this.text = text;
this.text = text; }
}
/** /**
* get the location the text of the Identifier object is associated with * get the location the text of the Identifier object is associated with
* *
* @return String one the class location constants e.g. <code>Identifier.BL</code> * @return String one the class location constants e.g. <code>Identifier.BL</code>
*/ */
public String getLocation() public String getLocation() {
{ return location;
return location; }
}
/** /**
* get the text associated with the Identifier object * get the text associated with the Identifier object
* *
* @return String the text associated with the Identifier object * @return String the text associated with the Identifier object
*/ */
public String getText() public String getText() {
{ return text;
return text; }
}
/** /**
* set the location associated with the Identifier object * set the location associated with the Identifier object
* *
* @param location one of the class location constants * @param location one of the class location constants
*/ */
public void setLocation(String location) public void setLocation(String location) {
{ this.location = location;
this.location = location; }
}
/** /**
* set the text associated with the Identifier object * set the text associated with the Identifier object
* *
* @param text any text string (typically a branding or identifier) * @param text any text string (typically a branding or identifier)
*/ */
public void setText(String text) public void setText(String text) {
{ this.text = text;
this.text = text; }
}
} }

View File

@@ -7,16 +7,13 @@
*/ */
package org.dspace.app.mediafilter; package org.dspace.app.mediafilter;
import java.awt.image.*; import java.awt.image.BufferedImage;
import java.io.InputStream; import java.io.InputStream;
import javax.imageio.ImageIO; import javax.imageio.ImageIO;
import org.dspace.content.Item; import org.dspace.content.Item;
import org.dspace.core.ConfigurationManager; import org.dspace.core.ConfigurationManager;
import org.dspace.app.mediafilter.JPEGFilter;
/** /**
* Filter image bitstreams, scaling the image to be within the bounds of * Filter image bitstreams, scaling the image to be within the bounds of
* thumbnail.maxwidth, thumbnail.maxheight, the size we want our thumbnail to be * thumbnail.maxwidth, thumbnail.maxheight, the size we want our thumbnail to be
@@ -24,21 +21,17 @@ import org.dspace.app.mediafilter.JPEGFilter;
* *
* @author Jason Sherman jsherman@usao.edu * @author Jason Sherman jsherman@usao.edu
*/ */
public class BrandedPreviewJPEGFilter extends MediaFilter public class BrandedPreviewJPEGFilter extends MediaFilter {
{
@Override @Override
public String getFilteredName(String oldFilename) public String getFilteredName(String oldFilename) {
{
return oldFilename + ".preview.jpg"; return oldFilename + ".preview.jpg";
} }
/** /**
* @return String bundle name * @return String bundle name
*
*/ */
@Override @Override
public String getBundleName() public String getBundleName() {
{
return "BRANDED_PREVIEW"; return "BRANDED_PREVIEW";
} }
@@ -46,8 +39,7 @@ public class BrandedPreviewJPEGFilter extends MediaFilter
* @return String bitstreamformat * @return String bitstreamformat
*/ */
@Override @Override
public String getFormatString() public String getFormatString() {
{
return "JPEG"; return "JPEG";
} }
@@ -55,42 +47,40 @@ public class BrandedPreviewJPEGFilter extends MediaFilter
* @return String description * @return String description
*/ */
@Override @Override
public String getDescription() public String getDescription() {
{
return "Generated Branded Preview"; return "Generated Branded Preview";
} }
/** /**
* @param currentItem item * @param currentItem item
* @param source * @param source source input stream
* source input stream * @param verbose verbose mode
* @param verbose verbose mode
*
* @return InputStream the resulting input stream * @return InputStream the resulting input stream
* @throws Exception if error * @throws Exception if error
*/ */
@Override @Override
public InputStream getDestinationStream(Item currentItem, InputStream source, boolean verbose) public InputStream getDestinationStream(Item currentItem, InputStream source, boolean verbose)
throws Exception throws Exception {
{
// read in bitstream's image // read in bitstream's image
BufferedImage buf = ImageIO.read(source); BufferedImage buf = ImageIO.read(source);
// get config params // get config params
float xmax = (float) ConfigurationManager float xmax = (float) ConfigurationManager
.getIntProperty("webui.preview.maxwidth"); .getIntProperty("webui.preview.maxwidth");
float ymax = (float) ConfigurationManager float ymax = (float) ConfigurationManager
.getIntProperty("webui.preview.maxheight"); .getIntProperty("webui.preview.maxheight");
boolean blurring = (boolean) ConfigurationManager boolean blurring = (boolean) ConfigurationManager
.getBooleanProperty("webui.preview.blurring"); .getBooleanProperty("webui.preview.blurring");
boolean hqscaling = (boolean) ConfigurationManager boolean hqscaling = (boolean) ConfigurationManager
.getBooleanProperty("webui.preview.hqscaling"); .getBooleanProperty("webui.preview.hqscaling");
int brandHeight = ConfigurationManager.getIntProperty("webui.preview.brand.height"); int brandHeight = ConfigurationManager.getIntProperty("webui.preview.brand.height");
String brandFont = ConfigurationManager.getProperty("webui.preview.brand.font"); String brandFont = ConfigurationManager.getProperty("webui.preview.brand.font");
int brandFontPoint = ConfigurationManager.getIntProperty("webui.preview.brand.fontpoint"); int brandFontPoint = ConfigurationManager.getIntProperty("webui.preview.brand.fontpoint");
JPEGFilter jpegFilter = new JPEGFilter(); JPEGFilter jpegFilter = new JPEGFilter();
return jpegFilter.getThumbDim(currentItem, buf, verbose, xmax, ymax, blurring, hqscaling, brandHeight, brandFontPoint, brandFont); return jpegFilter
.getThumbDim(currentItem, buf, verbose, xmax, ymax, blurring, hqscaling, brandHeight, brandFontPoint,
brandFont);
} }
} }

View File

@@ -11,12 +11,11 @@ import java.io.InputStream;
import java.nio.charset.StandardCharsets; import java.nio.charset.StandardCharsets;
import org.apache.commons.io.IOUtils; import org.apache.commons.io.IOUtils;
import org.apache.logging.log4j.Logger;
import org.apache.poi.POITextExtractor; import org.apache.poi.POITextExtractor;
import org.apache.poi.extractor.ExtractorFactory; import org.apache.poi.extractor.ExtractorFactory;
import org.apache.poi.hssf.extractor.ExcelExtractor; import org.apache.poi.hssf.extractor.ExcelExtractor;
import org.apache.poi.xssf.extractor.XSSFExcelExtractor; import org.apache.poi.xssf.extractor.XSSFExcelExtractor;
import org.apache.log4j.Logger;
import org.dspace.content.Item; import org.dspace.content.Item;
/* /*
@@ -35,79 +34,62 @@ import org.dspace.content.Item;
* filter.org.dspace.app.mediafilter.ExcelFilter.inputFormats = Microsoft Excel, Microsoft Excel XML * filter.org.dspace.app.mediafilter.ExcelFilter.inputFormats = Microsoft Excel, Microsoft Excel XML
* *
*/ */
public class ExcelFilter extends MediaFilter public class ExcelFilter extends MediaFilter {
{
private static Logger log = Logger.getLogger(ExcelFilter.class); private static Logger log = org.apache.logging.log4j.LogManager.getLogger(ExcelFilter.class);
public String getFilteredName(String oldFilename) public String getFilteredName(String oldFilename) {
{
return oldFilename + ".txt"; return oldFilename + ".txt";
} }
/** /**
* @return String bundle name * @return String bundle name
*
*/ */
public String getBundleName() public String getBundleName() {
{
return "TEXT"; return "TEXT";
} }
/** /**
* @return String bitstream format * @return String bitstream format
*
*
*/ */
public String getFormatString() public String getFormatString() {
{
return "Text"; return "Text";
} }
/** /**
* @return String description * @return String description
*/ */
public String getDescription() public String getDescription() {
{
return "Extracted text"; return "Extracted text";
} }
/** /**
* @param item item * @param item item
* @param source source input stream * @param source source input stream
* @param verbose verbose mode * @param verbose verbose mode
*
* @return InputStream the resulting input stream * @return InputStream the resulting input stream
* @throws Exception if error * @throws Exception if error
*/ */
@Override @Override
public InputStream getDestinationStream(Item item, InputStream source, boolean verbose) public InputStream getDestinationStream(Item item, InputStream source, boolean verbose)
throws Exception throws Exception {
{
String extractedText = null; String extractedText = null;
try try {
{
POITextExtractor theExtractor = ExtractorFactory.createExtractor(source); POITextExtractor theExtractor = ExtractorFactory.createExtractor(source);
if (theExtractor instanceof ExcelExtractor) if (theExtractor instanceof ExcelExtractor) {
{
// for xls file // for xls file
extractedText = (theExtractor).getText(); extractedText = (theExtractor).getText();
} } else if (theExtractor instanceof XSSFExcelExtractor) {
else if (theExtractor instanceof XSSFExcelExtractor)
{
// for xlsx file // for xlsx file
extractedText = (theExtractor).getText(); extractedText = (theExtractor).getText();
} }
} } catch (Exception e) {
catch (Exception e)
{
log.error("Error filtering bitstream: " + e.getMessage(), e); log.error("Error filtering bitstream: " + e.getMessage(), e);
throw e; throw e;
} }
if (extractedText != null) if (extractedText != null) {
{
// generate an input stream with the extracted text // generate an input stream with the extracted text
return IOUtils.toInputStream(extractedText, StandardCharsets.UTF_8); return IOUtils.toInputStream(extractedText, StandardCharsets.UTF_8);
} }

View File

@@ -18,50 +18,46 @@ import org.dspace.core.Context;
* from one format to another. This interface should be implemented by any class * from one format to another. This interface should be implemented by any class
* which defines a "filter" to be run by the MediaFilterManager. * which defines a "filter" to be run by the MediaFilterManager.
*/ */
public interface FormatFilter public interface FormatFilter {
{
/** /**
* Get a filename for a newly created filtered bitstream * Get a filename for a newly created filtered bitstream
* *
* @param sourceName * @param sourceName name of source bitstream
* name of source bitstream
* @return filename generated by the filter - for example, document.pdf * @return filename generated by the filter - for example, document.pdf
* becomes document.pdf.txt * becomes document.pdf.txt
*/ */
public String getFilteredName(String sourceName); public String getFilteredName(String sourceName);
/** /**
* @return name of the bundle this filter will stick its generated * @return name of the bundle this filter will stick its generated
* Bitstreams * Bitstreams
*/ */
public String getBundleName(); public String getBundleName();
/** /**
* @return name of the bitstream format (say "HTML" or "Microsoft Word") * @return name of the bitstream format (say "HTML" or "Microsoft Word")
* returned by this filter look in the bitstream format registry or * returned by this filter look in the bitstream format registry or
* mediafilter.cfg for valid format strings. * mediafilter.cfg for valid format strings.
*/ */
public String getFormatString(); public String getFormatString();
/** /**
* @return string to describe the newly-generated Bitstream - how it was * @return string to describe the newly-generated Bitstream - how it was
* produced is a good idea * produced is a good idea
*/ */
public String getDescription(); public String getDescription();
/** /**
* Read the source stream and produce the filtered content. * Read the source stream and produce the filtered content.
* *
* @param item Item * @param item Item
* @param source * @param source input stream
* input stream
* @param verbose verbosity flag * @param verbose verbosity flag
*
* @return result of filter's transformation as a byte stream. * @return result of filter's transformation as a byte stream.
* @throws Exception if error * @throws Exception if error
*/ */
public InputStream getDestinationStream(Item item, InputStream source, boolean verbose) public InputStream getDestinationStream(Item item, InputStream source, boolean verbose)
throws Exception; throws Exception;
/** /**
* Perform any pre-processing of the source bitstream *before* the actual * Perform any pre-processing of the source bitstream *before* the actual
@@ -71,18 +67,16 @@ public interface FormatFilter
* is necessary). Return false if bitstream should be skipped * is necessary). Return false if bitstream should be skipped
* for any reason. * for any reason.
* *
* * @param c context
* @param c context * @param item item containing bitstream to process
* @param item item containing bitstream to process * @param source source bitstream to be processed
* @param source source bitstream to be processed
* @param verbose verbose mode * @param verbose verbose mode
*
* @return true if bitstream processing should continue, * @return true if bitstream processing should continue,
* false if this bitstream should be skipped * false if this bitstream should be skipped
* @throws Exception if error * @throws Exception if error
*/ */
public boolean preProcessBitstream(Context c, Item item, Bitstream source, boolean verbose) public boolean preProcessBitstream(Context c, Item item, Bitstream source, boolean verbose)
throws Exception; throws Exception;
/** /**
* Perform any post-processing of the generated bitstream *after* this * Perform any post-processing of the generated bitstream *after* this
@@ -92,17 +86,13 @@ public interface FormatFilter
* is necessary). Return false if bitstream should be skipped * is necessary). Return false if bitstream should be skipped
* for some reason. * for some reason.
* *
* * @param c context
* @param c * @param item item containing bitstream to process
* context * @param generatedBitstream the bitstream which was generated by
* @param item * this filter.
* item containing bitstream to process
* @param generatedBitstream
* the bitstream which was generated by
* this filter.
* @throws Exception if error * @throws Exception if error
*/ */
public void postProcessBitstream(Context c, Item item, Bitstream generatedBitstream) public void postProcessBitstream(Context c, Item item, Bitstream generatedBitstream)
throws Exception; throws Exception;
} }

View File

@@ -7,36 +7,31 @@
*/ */
package org.dspace.app.mediafilter; package org.dspace.app.mediafilter;
import org.dspace.content.Item;
import java.io.ByteArrayInputStream; import java.io.ByteArrayInputStream;
import java.io.InputStream; import java.io.InputStream;
import javax.swing.text.Document; import javax.swing.text.Document;
import javax.swing.text.html.HTMLEditorKit; import javax.swing.text.html.HTMLEditorKit;
import org.dspace.content.Item;
/* /*
* *
* to do: helpful error messages - can't find mediafilter.cfg - can't * to do: helpful error messages - can't find mediafilter.cfg - can't
* instantiate filter - bitstream format doesn't exist * instantiate filter - bitstream format doesn't exist
* *
*/ */
public class HTMLFilter extends MediaFilter public class HTMLFilter extends MediaFilter {
{
@Override @Override
public String getFilteredName(String oldFilename) public String getFilteredName(String oldFilename) {
{
return oldFilename + ".txt"; return oldFilename + ".txt";
} }
/** /**
* @return String bundle name * @return String bundle name
*
*/ */
@Override @Override
public String getBundleName() public String getBundleName() {
{
return "TEXT"; return "TEXT";
} }
@@ -44,8 +39,7 @@ public class HTMLFilter extends MediaFilter
* @return String bitstreamformat * @return String bitstreamformat
*/ */
@Override @Override
public String getFormatString() public String getFormatString() {
{
return "Text"; return "Text";
} }
@@ -53,23 +47,20 @@ public class HTMLFilter extends MediaFilter
* @return String description * @return String description
*/ */
@Override @Override
public String getDescription() public String getDescription() {
{
return "Extracted text"; return "Extracted text";
} }
/** /**
* @param currentItem item * @param currentItem item
* @param source source input stream * @param source source input stream
* @param verbose verbose mode * @param verbose verbose mode
*
* @return InputStream the resulting input stream * @return InputStream the resulting input stream
* @throws Exception if error * @throws Exception if error
*/ */
@Override @Override
public InputStream getDestinationStream(Item currentItem, InputStream source, boolean verbose) public InputStream getDestinationStream(Item currentItem, InputStream source, boolean verbose)
throws Exception throws Exception {
{
// try and read the document - set to ignore character set directive, // try and read the document - set to ignore character set directive,
// assuming that the input stream is already set properly (I hope) // assuming that the input stream is already set properly (I hope)
HTMLEditorKit kit = new HTMLEditorKit(); HTMLEditorKit kit = new HTMLEditorKit();

View File

@@ -7,53 +7,46 @@
*/ */
package org.dspace.app.mediafilter; package org.dspace.app.mediafilter;
import org.dspace.content.Item;
import java.io.ByteArrayInputStream; import java.io.ByteArrayInputStream;
import java.io.File; import java.io.File;
import java.io.InputStream; import java.io.InputStream;
import java.nio.file.Files; import java.nio.file.Files;
import org.dspace.content.Item;
/** /**
* Filter image bitstreams, scaling the image to be within the bounds of * Filter image bitstreams, scaling the image to be within the bounds of
* thumbnail.maxwidth, thumbnail.maxheight, the size we want our thumbnail to be * thumbnail.maxwidth, thumbnail.maxheight, the size we want our thumbnail to be
* no bigger than. Creates only JPEGs. * no bigger than. Creates only JPEGs.
*/ */
public class ImageMagickImageThumbnailFilter extends ImageMagickThumbnailFilter public class ImageMagickImageThumbnailFilter extends ImageMagickThumbnailFilter {
{
/** /**
* @param currentItem item * @param currentItem item
* @param source source input stream * @param source source input stream
* @param verbose verbose mode * @param verbose verbose mode
*
* @return InputStream the resulting input stream * @return InputStream the resulting input stream
* @throws Exception if error * @throws Exception if error
*/ */
@Override @Override
public InputStream getDestinationStream(Item currentItem, InputStream source, boolean verbose) public InputStream getDestinationStream(Item currentItem, InputStream source, boolean verbose)
throws Exception throws Exception {
{ File f = inputStreamToTempFile(source, "imthumb", ".tmp");
File f = inputStreamToTempFile(source, "imthumb", ".tmp"); File f2 = null;
File f2 = null; try {
try f2 = getThumbnailFile(f, verbose);
{ byte[] bytes = Files.readAllBytes(f2.toPath());
f2 = getThumbnailFile(f, verbose); return new ByteArrayInputStream(bytes);
byte[] bytes = Files.readAllBytes(f2.toPath()); } finally {
return new ByteArrayInputStream(bytes); //noinspection ResultOfMethodCallIgnored
} f.delete();
finally if (f2 != null) {
{ //noinspection ResultOfMethodCallIgnored
//noinspection ResultOfMethodCallIgnored f2.delete();
f.delete(); }
if (f2 != null) }
{ }
//noinspection ResultOfMethodCallIgnored
f2.delete();
}
}
}
} }

View File

@@ -7,43 +7,37 @@
*/ */
package org.dspace.app.mediafilter; package org.dspace.app.mediafilter;
import org.dspace.content.Item;
import java.io.ByteArrayInputStream; import java.io.ByteArrayInputStream;
import java.io.File; import java.io.File;
import java.io.InputStream; import java.io.InputStream;
import java.nio.file.Files; import java.nio.file.Files;
import org.dspace.content.Item;
public class ImageMagickPdfThumbnailFilter extends ImageMagickThumbnailFilter { public class ImageMagickPdfThumbnailFilter extends ImageMagickThumbnailFilter {
@Override @Override
public InputStream getDestinationStream(Item currentItem, InputStream source, boolean verbose) public InputStream getDestinationStream(Item currentItem, InputStream source, boolean verbose)
throws Exception throws Exception {
{ File f = inputStreamToTempFile(source, "impdfthumb", ".pdf");
File f = inputStreamToTempFile(source, "impdfthumb", ".pdf"); File f2 = null;
File f2 = null; File f3 = null;
File f3 = null; try {
try f2 = getImageFile(f, 0, verbose);
{ f3 = getThumbnailFile(f2, verbose);
f2 = getImageFile(f, 0, verbose); byte[] bytes = Files.readAllBytes(f3.toPath());
f3 = getThumbnailFile(f2, verbose); return new ByteArrayInputStream(bytes);
byte[] bytes = Files.readAllBytes(f3.toPath()); } finally {
return new ByteArrayInputStream(bytes); //noinspection ResultOfMethodCallIgnored
} f.delete();
finally if (f2 != null) {
{ //noinspection ResultOfMethodCallIgnored
//noinspection ResultOfMethodCallIgnored f2.delete();
f.delete(); }
if (f2 != null) if (f3 != null) {
{ //noinspection ResultOfMethodCallIgnored
//noinspection ResultOfMethodCallIgnored f3.delete();
f2.delete(); }
} }
if (f3 != null)
{
//noinspection ResultOfMethodCallIgnored
f3.delete();
}
}
} }
} }

View File

@@ -14,191 +14,189 @@ import java.io.InputStream;
import java.util.regex.Pattern; import java.util.regex.Pattern;
import java.util.regex.PatternSyntaxException; import java.util.regex.PatternSyntaxException;
import javax.imageio.ImageIO;
import org.dspace.content.Bitstream; import org.dspace.content.Bitstream;
import org.dspace.content.Bundle; import org.dspace.content.Bundle;
import org.dspace.content.Item; import org.dspace.content.Item;
import org.dspace.content.factory.ContentServiceFactory; import org.dspace.content.factory.ContentServiceFactory;
import org.dspace.content.service.ItemService; import org.dspace.content.service.ItemService;
import org.dspace.core.ConfigurationManager;
import org.dspace.core.Context; import org.dspace.core.Context;
import org.im4java.core.ConvertCmd; import org.im4java.core.ConvertCmd;
import org.im4java.core.Info;
import org.im4java.core.IM4JavaException; import org.im4java.core.IM4JavaException;
import org.im4java.core.IMOperation; import org.im4java.core.IMOperation;
import org.im4java.core.Info;
import org.im4java.process.ProcessStarter; import org.im4java.process.ProcessStarter;
import org.dspace.core.ConfigurationManager;
/** /**
* Filter image bitstreams, scaling the image to be within the bounds of * Filter image bitstreams, scaling the image to be within the bounds of
* thumbnail.maxwidth, thumbnail.maxheight, the size we want our thumbnail to be * thumbnail.maxwidth, thumbnail.maxheight, the size we want our thumbnail to be
* no bigger than. Creates only JPEGs. * no bigger than. Creates only JPEGs.
*/ */
public abstract class ImageMagickThumbnailFilter extends MediaFilter { public abstract class ImageMagickThumbnailFilter extends MediaFilter {
protected static int width = 180; protected static int width = 180;
protected static int height = 120; protected static int height = 120;
private static boolean flatten = true; private static boolean flatten = true;
static String bitstreamDescription = "IM Thumbnail"; static String bitstreamDescription = "IM Thumbnail";
static final String defaultPattern = "Generated Thumbnail"; static final String defaultPattern = "Generated Thumbnail";
static Pattern replaceRegex = Pattern.compile(defaultPattern); static Pattern replaceRegex = Pattern.compile(defaultPattern);
protected final ItemService itemService = ContentServiceFactory.getInstance().getItemService(); protected final ItemService itemService = ContentServiceFactory.getInstance().getItemService();
static String cmyk_profile; static String cmyk_profile;
static String srgb_profile; static String srgb_profile;
static { static {
String pre = ImageMagickThumbnailFilter.class.getName(); String pre = ImageMagickThumbnailFilter.class.getName();
String s = ConfigurationManager.getProperty(pre + ".ProcessStarter"); String s = ConfigurationManager.getProperty(pre + ".ProcessStarter");
ProcessStarter.setGlobalSearchPath(s); ProcessStarter.setGlobalSearchPath(s);
width = ConfigurationManager.getIntProperty("thumbnail.maxwidth", width); width = ConfigurationManager.getIntProperty("thumbnail.maxwidth", width);
height = ConfigurationManager.getIntProperty("thumbnail.maxheight", height); height = ConfigurationManager.getIntProperty("thumbnail.maxheight", height);
flatten = ConfigurationManager.getBooleanProperty(pre + ".flatten", flatten); flatten = ConfigurationManager.getBooleanProperty(pre + ".flatten", flatten);
String description = ConfigurationManager.getProperty(pre + ".bitstreamDescription"); String description = ConfigurationManager.getProperty(pre + ".bitstreamDescription");
cmyk_profile = ConfigurationManager.getProperty(pre + ".cmyk_profile"); cmyk_profile = ConfigurationManager.getProperty(pre + ".cmyk_profile");
srgb_profile = ConfigurationManager.getProperty(pre + ".srgb_profile"); srgb_profile = ConfigurationManager.getProperty(pre + ".srgb_profile");
if (description != null) {
bitstreamDescription = description;
}
try {
String patt = ConfigurationManager.getProperty(pre + ".replaceRegex");
replaceRegex = Pattern.compile(patt == null ? defaultPattern : patt);
} catch (PatternSyntaxException e) {
System.err.println("Invalid thumbnail replacement pattern: " + e.getMessage());
}
}
public ImageMagickThumbnailFilter() {
}
@Override
public String getFilteredName(String oldFilename) {
return oldFilename + ".jpg";
}
/**
* @return String bundle name
*/
@Override
public String getBundleName() {
return "THUMBNAIL";
}
/**
* @return String bitstreamformat
*/
@Override
public String getFormatString() {
return "JPEG";
}
/**
* @return String bitstreamDescription
*/
@Override
public String getDescription() {
return bitstreamDescription;
}
public File inputStreamToTempFile(InputStream source, String prefix, String suffix) throws IOException {
File f = File.createTempFile(prefix, suffix);
f.deleteOnExit();
FileOutputStream fos = new FileOutputStream(f);
byte[] buffer = new byte[1024];
int len = source.read(buffer);
while (len != -1) {
fos.write(buffer, 0, len);
len = source.read(buffer);
}
fos.close();
return f;
}
public File getThumbnailFile(File f, boolean verbose)
throws IOException, InterruptedException, IM4JavaException {
File f2 = new File(f.getParentFile(), f.getName() + ".jpg");
f2.deleteOnExit();
ConvertCmd cmd = new ConvertCmd();
IMOperation op = new IMOperation();
op.autoOrient();
op.addImage(f.getAbsolutePath());
op.thumbnail(width, height);
op.addImage(f2.getAbsolutePath());
if (verbose) {
System.out.println("IM Thumbnail Param: " + op);
}
cmd.run(op);
return f2;
}
public File getImageFile(File f, int page, boolean verbose)
throws IOException, InterruptedException, IM4JavaException {
File f2 = new File(f.getParentFile(), f.getName() + ".jpg");
f2.deleteOnExit();
ConvertCmd cmd = new ConvertCmd();
IMOperation op = new IMOperation();
String s = "[" + page + "]";
op.addImage(f.getAbsolutePath() + s);
if (flatten) {
op.flatten();
}
// PDFs using the CMYK color system can be handled specially if
// profiles are defined
if (cmyk_profile != null && srgb_profile != null) {
Info imageInfo = new Info(f.getAbsolutePath() + s, true);
String imageClass = imageInfo.getImageClass();
if (imageClass.contains("CMYK")) {
op.profile(cmyk_profile);
op.profile(srgb_profile);
}
}
op.addImage(f2.getAbsolutePath());
if (verbose) {
System.out.println("IM Image Param: " + op);
}
cmd.run(op);
return f2;
}
@Override
public boolean preProcessBitstream(Context c, Item item, Bitstream source, boolean verbose) throws Exception {
String nsrc = source.getName();
for (Bundle b : itemService.getBundles(item, "THUMBNAIL")) {
for (Bitstream bit : b.getBitstreams()) {
String n = bit.getName();
if (n != null) {
if (nsrc != null) {
if (!n.startsWith(nsrc)) {
continue;
}
}
}
String description = bit.getDescription();
// If anything other than a generated thumbnail
// is found, halt processing
if (description != null) { if (description != null) {
bitstreamDescription = description; if (replaceRegex.matcher(description).matches()) {
} if (verbose) {
try { System.out.println(description + " " + nsrc
String patt = ConfigurationManager.getProperty(pre + ".replaceRegex"); + " matches pattern and is replacable.");
replaceRegex = Pattern.compile(patt == null ? defaultPattern : patt);
} catch (PatternSyntaxException e) {
System.err.println("Invalid thumbnail replacement pattern: " + e.getMessage());
}
}
public ImageMagickThumbnailFilter() {
}
@Override
public String getFilteredName(String oldFilename) {
return oldFilename + ".jpg";
}
/**
* @return String bundle name
*
*/
@Override
public String getBundleName() {
return "THUMBNAIL";
}
/**
* @return String bitstreamformat
*/
@Override
public String getFormatString() {
return "JPEG";
}
/**
* @return String bitstreamDescription
*/
@Override
public String getDescription() {
return bitstreamDescription;
}
public File inputStreamToTempFile(InputStream source, String prefix, String suffix) throws IOException {
File f = File.createTempFile(prefix, suffix);
f.deleteOnExit();
FileOutputStream fos = new FileOutputStream(f);
byte[] buffer = new byte[1024];
int len = source.read(buffer);
while (len != -1) {
fos.write(buffer, 0, len);
len = source.read(buffer);
}
fos.close();
return f;
}
public File getThumbnailFile(File f, boolean verbose)
throws IOException, InterruptedException, IM4JavaException {
File f2 = new File(f.getParentFile(), f.getName() + ".jpg");
f2.deleteOnExit();
ConvertCmd cmd = new ConvertCmd();
IMOperation op = new IMOperation();
op.addImage(f.getAbsolutePath());
op.thumbnail(width, height);
op.addImage(f2.getAbsolutePath());
if (verbose) {
System.out.println("IM Thumbnail Param: " + op);
}
cmd.run(op);
return f2;
}
public File getImageFile(File f, int page, boolean verbose)
throws IOException, InterruptedException, IM4JavaException {
File f2 = new File(f.getParentFile(), f.getName() + ".jpg");
f2.deleteOnExit();
ConvertCmd cmd = new ConvertCmd();
IMOperation op = new IMOperation();
String s = "[" + page + "]";
op.addImage(f.getAbsolutePath() + s);
if (flatten) {
op.flatten();
}
// PDFs using the CMYK color system can be handled specially if
// profiles are defined
if (cmyk_profile != null && srgb_profile != null) {
Info imageInfo = new Info(f.getAbsolutePath(), true);
String imageClass = imageInfo.getImageClass();
if (imageClass.contains("CMYK")) {
op.profile(cmyk_profile);
op.profile(srgb_profile);
} }
} continue;
op.addImage(f2.getAbsolutePath()); }
if (verbose) { if (description.equals(bitstreamDescription)) {
System.out.println("IM Image Param: " + op); if (verbose) {
} System.out.println(bitstreamDescription + " " + nsrc
cmd.run(op); + " is replacable.");
return f2;
}
@Override
public boolean preProcessBitstream(Context c, Item item, Bitstream source, boolean verbose) throws Exception {
String nsrc = source.getName();
for (Bundle b : itemService.getBundles(item, "THUMBNAIL")) {
for (Bitstream bit : b.getBitstreams()) {
String n = bit.getName();
if (n != null) {
if (nsrc != null) {
if (!n.startsWith(nsrc))
continue;
}
}
String description = bit.getDescription();
// If anything other than a generated thumbnail
// is found, halt processing
if (description != null) {
if (replaceRegex.matcher(description).matches()) {
if (verbose) {
System.out.println(description + " " + nsrc
+ " matches pattern and is replacable.");
}
continue;
}
if (description.equals(bitstreamDescription)) {
if (verbose) {
System.out.println(bitstreamDescription + " " + nsrc
+ " is replacable.");
}
continue;
}
}
System.out.println("Custom Thumbnail exists for " + nsrc + " for item "
+ item.getHandle() + ". Thumbnail will not be generated. ");
return false;
} }
continue;
}
} }
System.out.println("Custom Thumbnail exists for " + nsrc + " for item "
return true; // assume that the thumbnail is a custom one + item.getHandle() + ". Thumbnail will not be generated. ");
return false;
}
} }
return true; // assume that the thumbnail is a custom one
}
} }

View File

@@ -7,16 +7,18 @@
*/ */
package org.dspace.app.mediafilter; package org.dspace.app.mediafilter;
import java.awt.Graphics2D;
import java.awt.Color; import java.awt.Color;
import java.awt.image.*; import java.awt.Font;
import java.awt.Graphics2D;
import java.awt.RenderingHints; import java.awt.RenderingHints;
import java.awt.Transparency; import java.awt.Transparency;
import java.awt.Font; import java.awt.image.BufferedImage;
import java.awt.image.BufferedImageOp;
import java.awt.image.ConvolveOp;
import java.awt.image.Kernel;
import java.io.ByteArrayInputStream; import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream; import java.io.ByteArrayOutputStream;
import java.io.InputStream; import java.io.InputStream;
import javax.imageio.ImageIO; import javax.imageio.ImageIO;
import org.dspace.content.Item; import org.dspace.content.Item;
@@ -29,21 +31,17 @@ import org.dspace.core.ConfigurationManager;
* *
* @author Jason Sherman jsherman@usao.edu * @author Jason Sherman jsherman@usao.edu
*/ */
public class JPEGFilter extends MediaFilter implements SelfRegisterInputFormats public class JPEGFilter extends MediaFilter implements SelfRegisterInputFormats {
{
@Override @Override
public String getFilteredName(String oldFilename) public String getFilteredName(String oldFilename) {
{
return oldFilename + ".jpg"; return oldFilename + ".jpg";
} }
/** /**
* @return String bundle name * @return String bundle name
*
*/ */
@Override @Override
public String getBundleName() public String getBundleName() {
{
return "THUMBNAIL"; return "THUMBNAIL";
} }
@@ -51,8 +49,7 @@ public class JPEGFilter extends MediaFilter implements SelfRegisterInputFormats
* @return String bitstreamformat * @return String bitstreamformat
*/ */
@Override @Override
public String getFormatString() public String getFormatString() {
{
return "JPEG"; return "JPEG";
} }
@@ -60,23 +57,20 @@ public class JPEGFilter extends MediaFilter implements SelfRegisterInputFormats
* @return String description * @return String description
*/ */
@Override @Override
public String getDescription() public String getDescription() {
{
return "Generated Thumbnail"; return "Generated Thumbnail";
} }
/** /**
* @param currentItem item * @param currentItem item
* @param source source input stream * @param source source input stream
* @param verbose verbose mode * @param verbose verbose mode
*
* @return InputStream the resulting input stream * @return InputStream the resulting input stream
* @throws Exception if error * @throws Exception if error
*/ */
@Override @Override
public InputStream getDestinationStream(Item currentItem, InputStream source, boolean verbose) public InputStream getDestinationStream(Item currentItem, InputStream source, boolean verbose)
throws Exception throws Exception {
{
// read in bitstream's image // read in bitstream's image
BufferedImage buf = ImageIO.read(source); BufferedImage buf = ImageIO.read(source);
@@ -84,45 +78,42 @@ public class JPEGFilter extends MediaFilter implements SelfRegisterInputFormats
} }
public InputStream getThumb(Item currentItem, BufferedImage buf, boolean verbose) public InputStream getThumb(Item currentItem, BufferedImage buf, boolean verbose)
throws Exception throws Exception {
{
// get config params // get config params
float xmax = (float) ConfigurationManager float xmax = (float) ConfigurationManager
.getIntProperty("thumbnail.maxwidth"); .getIntProperty("thumbnail.maxwidth");
float ymax = (float) ConfigurationManager float ymax = (float) ConfigurationManager
.getIntProperty("thumbnail.maxheight"); .getIntProperty("thumbnail.maxheight");
boolean blurring = (boolean) ConfigurationManager boolean blurring = (boolean) ConfigurationManager
.getBooleanProperty("thumbnail.blurring"); .getBooleanProperty("thumbnail.blurring");
boolean hqscaling = (boolean) ConfigurationManager boolean hqscaling = (boolean) ConfigurationManager
.getBooleanProperty("thumbnail.hqscaling"); .getBooleanProperty("thumbnail.hqscaling");
return getThumbDim(currentItem, buf, verbose, xmax, ymax, blurring, hqscaling, 0, 0, null); return getThumbDim(currentItem, buf, verbose, xmax, ymax, blurring, hqscaling, 0, 0, null);
} }
public InputStream getThumbDim(Item currentItem, BufferedImage buf, boolean verbose, float xmax, float ymax, boolean blurring, boolean hqscaling, int brandHeight, int brandFontPoint, String brandFont) public InputStream getThumbDim(Item currentItem, BufferedImage buf, boolean verbose, float xmax, float ymax,
throws Exception boolean blurring, boolean hqscaling, int brandHeight, int brandFontPoint,
{ String brandFont)
throws Exception {
// now get the image dimensions // now get the image dimensions
float xsize = (float) buf.getWidth(null); float xsize = (float) buf.getWidth(null);
float ysize = (float) buf.getHeight(null); float ysize = (float) buf.getHeight(null);
// if verbose flag is set, print out dimensions // if verbose flag is set, print out dimensions
// to STDOUT // to STDOUT
if (verbose) if (verbose) {
{
System.out.println("original size: " + xsize + "," + ysize); System.out.println("original size: " + xsize + "," + ysize);
} }
// scale by x first if needed // scale by x first if needed
if (xsize > xmax) if (xsize > xmax) {
{
// calculate scaling factor so that xsize * scale = new size (max) // calculate scaling factor so that xsize * scale = new size (max)
float scale_factor = xmax / xsize; float scale_factor = xmax / xsize;
// if verbose flag is set, print out extracted text // if verbose flag is set, print out extracted text
// to STDOUT // to STDOUT
if (verbose) if (verbose) {
{
System.out.println("x scale factor: " + scale_factor); System.out.println("x scale factor: " + scale_factor);
} }
@@ -133,15 +124,13 @@ public class JPEGFilter extends MediaFilter implements SelfRegisterInputFormats
// if verbose flag is set, print out extracted text // if verbose flag is set, print out extracted text
// to STDOUT // to STDOUT
if (verbose) if (verbose) {
{
System.out.println("size after fitting to maximum width: " + xsize + "," + ysize); System.out.println("size after fitting to maximum width: " + xsize + "," + ysize);
} }
} }
// scale by y if needed // scale by y if needed
if (ysize > ymax) if (ysize > ymax) {
{
float scale_factor = ymax / ysize; float scale_factor = ymax / ysize;
// now reduce x size // now reduce x size
@@ -151,31 +140,28 @@ public class JPEGFilter extends MediaFilter implements SelfRegisterInputFormats
} }
// if verbose flag is set, print details to STDOUT // if verbose flag is set, print details to STDOUT
if (verbose) if (verbose) {
{
System.out.println("size after fitting to maximum height: " + xsize + ", " System.out.println("size after fitting to maximum height: " + xsize + ", "
+ ysize); + ysize);
} }
// create an image buffer for the thumbnail with the new xsize, ysize // create an image buffer for the thumbnail with the new xsize, ysize
BufferedImage thumbnail = new BufferedImage((int) xsize, (int) ysize, BufferedImage thumbnail = new BufferedImage((int) xsize, (int) ysize,
BufferedImage.TYPE_INT_RGB); BufferedImage.TYPE_INT_RGB);
// Use blurring if selected in config. // Use blurring if selected in config.
// a little blur before scaling does wonders for keeping moire in check. // a little blur before scaling does wonders for keeping moire in check.
if (blurring) if (blurring) {
{ // send the buffered image off to get blurred.
// send the buffered image off to get blurred. buf = getBlurredInstance((BufferedImage) buf);
buf = getBlurredInstance((BufferedImage) buf);
} }
// Use high quality scaling method if selected in config. // Use high quality scaling method if selected in config.
// this has a definite performance penalty. // this has a definite performance penalty.
if (hqscaling) if (hqscaling) {
{ // send the buffered image off to get an HQ downscale.
// send the buffered image off to get an HQ downscale. buf = getScaledInstance((BufferedImage) buf, (int) xsize, (int) ysize,
buf = getScaledInstance((BufferedImage) buf, (int) xsize, (int) ysize, (Object) RenderingHints.VALUE_INTERPOLATION_BICUBIC, (boolean) true);
(Object) RenderingHints.VALUE_INTERPOLATION_BICUBIC, (boolean) true);
} }
// now render the image into the thumbnail buffer // now render the image into the thumbnail buffer
@@ -188,7 +174,7 @@ public class JPEGFilter extends MediaFilter implements SelfRegisterInputFormats
ConfigurationManager.getProperty("webui.preview.brand.abbrev"), ConfigurationManager.getProperty("webui.preview.brand.abbrev"),
currentItem == null ? "" : "hdl:" + currentItem.getHandle()); currentItem == null ? "" : "hdl:" + currentItem.getHandle());
g2d.drawImage(brandImage, (int)0, (int)ysize, (int) xsize, (int) 20, null); g2d.drawImage(brandImage, (int) 0, (int) ysize, (int) xsize, (int) 20, null);
} }
// now create an input stream for the thumbnail buffer and return it // now create an input stream for the thumbnail buffer and return it
@@ -204,37 +190,32 @@ public class JPEGFilter extends MediaFilter implements SelfRegisterInputFormats
@Override @Override
public String[] getInputMIMETypes() public String[] getInputMIMETypes() {
{
return ImageIO.getReaderMIMETypes(); return ImageIO.getReaderMIMETypes();
} }
@Override @Override
public String[] getInputDescriptions() public String[] getInputDescriptions() {
{
return null; return null;
} }
@Override @Override
public String[] getInputExtensions() public String[] getInputExtensions() {
{
// Temporarily disabled as JDK 1.6 only // Temporarily disabled as JDK 1.6 only
// return ImageIO.getReaderFileSuffixes(); // return ImageIO.getReaderFileSuffixes();
return null; return null;
} }
public BufferedImage getNormalizedInstance(BufferedImage buf) public BufferedImage getNormalizedInstance(BufferedImage buf) {
{ int type = (buf.getTransparency() == Transparency.OPAQUE) ?
int type = (buf.getTransparency() == Transparency.OPAQUE) ?
BufferedImage.TYPE_INT_RGB : BufferedImage.TYPE_INT_ARGB_PRE; BufferedImage.TYPE_INT_RGB : BufferedImage.TYPE_INT_ARGB_PRE;
int w, h; int w = buf.getWidth();
w = buf.getWidth(); int h = buf.getHeight();
h = buf.getHeight(); BufferedImage normal = new BufferedImage(w, h, type);
BufferedImage normal = new BufferedImage(w, h, type); Graphics2D g2d = normal.createGraphics();
Graphics2D g2d = normal.createGraphics(); g2d.drawImage(buf, 0, 0, w, h, Color.WHITE, null);
g2d.drawImage(buf, 0, 0, w, h, Color.WHITE, null); g2d.dispose();
g2d.dispose(); return normal;
return normal;
} }
/** /**
@@ -244,55 +225,54 @@ public class JPEGFilter extends MediaFilter implements SelfRegisterInputFormats
* @param buf buffered image * @param buf buffered image
* @return updated BufferedImage * @return updated BufferedImage
*/ */
public BufferedImage getBlurredInstance(BufferedImage buf) public BufferedImage getBlurredInstance(BufferedImage buf) {
{ buf = getNormalizedInstance(buf);
buf = getNormalizedInstance(buf);
// kernel for blur op // kernel for blur op
float[] matrix = { float[] matrix = {
0.111f, 0.111f, 0.111f, 0.111f, 0.111f, 0.111f,
0.111f, 0.111f, 0.111f, 0.111f, 0.111f, 0.111f,
0.111f, 0.111f, 0.111f, 0.111f, 0.111f, 0.111f,
}; };
// perform the blur and return the blurred version. // perform the blur and return the blurred version.
BufferedImageOp blur = new ConvolveOp( new Kernel(3, 3, matrix) ); BufferedImageOp blur = new ConvolveOp(new Kernel(3, 3, matrix));
BufferedImage blurbuf = blur.filter(buf, null); BufferedImage blurbuf = blur.filter(buf, null);
return blurbuf; return blurbuf;
} }
/** /**
* Convenience method that returns a scaled instance of the * Convenience method that returns a scaled instance of the
* provided {@code BufferedImage}. * provided {@code BufferedImage}.
* *
* @param buf the original image to be scaled * @param buf the original image to be scaled
* @param targetWidth the desired width of the scaled instance, * @param targetWidth the desired width of the scaled instance,
* in pixels * in pixels
* @param targetHeight the desired height of the scaled instance, * @param targetHeight the desired height of the scaled instance,
* in pixels * in pixels
* @param hint one of the rendering hints that corresponds to * @param hint one of the rendering hints that corresponds to
* {@code RenderingHints.KEY_INTERPOLATION} (e.g. * {@code RenderingHints.KEY_INTERPOLATION} (e.g.
* {@code RenderingHints.VALUE_INTERPOLATION_NEAREST_NEIGHBOR}, * {@code RenderingHints.VALUE_INTERPOLATION_NEAREST_NEIGHBOR},
* {@code RenderingHints.VALUE_INTERPOLATION_BILINEAR}, * {@code RenderingHints.VALUE_INTERPOLATION_BILINEAR},
* {@code RenderingHints.VALUE_INTERPOLATION_BICUBIC}) * {@code RenderingHints.VALUE_INTERPOLATION_BICUBIC})
* @param higherQuality if true, this method will use a multi-step * @param higherQuality if true, this method will use a multi-step
* scaling technique that provides higher quality than the usual * scaling technique that provides higher quality than the usual
* one-step technique (only useful in downscaling cases, where * one-step technique (only useful in downscaling cases, where
* {@code targetWidth} or {@code targetHeight} is * {@code targetWidth} or {@code targetHeight} is
* smaller than the original dimensions, and generally only when * smaller than the original dimensions, and generally only when
* the {@code BILINEAR} hint is specified) * the {@code BILINEAR} hint is specified)
* @return a scaled version of the original {@code BufferedImage} * @return a scaled version of the original {@code BufferedImage}
*/ */
public BufferedImage getScaledInstance(BufferedImage buf, public BufferedImage getScaledInstance(BufferedImage buf,
int targetWidth, int targetWidth,
int targetHeight, int targetHeight,
Object hint, Object hint,
boolean higherQuality) boolean higherQuality) {
{
int type = (buf.getTransparency() == Transparency.OPAQUE) ? int type = (buf.getTransparency() == Transparency.OPAQUE) ?
BufferedImage.TYPE_INT_RGB : BufferedImage.TYPE_INT_ARGB; BufferedImage.TYPE_INT_RGB : BufferedImage.TYPE_INT_ARGB;
BufferedImage scalebuf = (BufferedImage)buf; BufferedImage scalebuf = (BufferedImage) buf;
int w, h; int w;
int h;
if (higherQuality) { if (higherQuality) {
// Use multi-step technique: start with original size, then // Use multi-step technique: start with original size, then
// scale down in multiple passes with drawImage() // scale down in multiple passes with drawImage()

View File

@@ -18,9 +18,8 @@ import org.dspace.core.Context;
* by the MediaFilterManager. More complex filters should likely implement the FormatFilter * by the MediaFilterManager. More complex filters should likely implement the FormatFilter
* interface directly, so that they can define their own pre/postProcessing methods. * interface directly, so that they can define their own pre/postProcessing methods.
*/ */
public abstract class MediaFilter implements FormatFilter public abstract class MediaFilter implements FormatFilter {
{ /**
/**
* Perform any pre-processing of the source bitstream *before* the actual * Perform any pre-processing of the source bitstream *before* the actual
* filtering takes place in MediaFilterManager.processBitstream(). * filtering takes place in MediaFilterManager.processBitstream().
* <p> * <p>
@@ -28,20 +27,17 @@ public abstract class MediaFilter implements FormatFilter
* is necessary). Return false if bitstream should be skipped * is necessary). Return false if bitstream should be skipped
* for any reason. * for any reason.
* *
* * @param c context
* @param c context * @param item item containing bitstream to process
* @param item item containing bitstream to process * @param source source bitstream to be processed
* @param source source bitstream to be processed
* @param verbose verbose mode * @param verbose verbose mode
*
* @return true if bitstream processing should continue, * @return true if bitstream processing should continue,
* false if this bitstream should be skipped * false if this bitstream should be skipped
* @throws Exception if error * @throws Exception if error
*/ */
@Override @Override
public boolean preProcessBitstream(Context c, Item item, Bitstream source, boolean verbose) public boolean preProcessBitstream(Context c, Item item, Bitstream source, boolean verbose)
throws Exception throws Exception {
{
return true; //default to no pre-processing return true; //default to no pre-processing
} }
@@ -53,20 +49,15 @@ public abstract class MediaFilter implements FormatFilter
* is necessary). Return false if bitstream should be skipped * is necessary). Return false if bitstream should be skipped
* for some reason. * for some reason.
* *
* * @param c context
* @param c * @param item item containing bitstream to process
* context * @param generatedBitstream the bitstream which was generated by
* @param item * this filter.
* item containing bitstream to process
* @param generatedBitstream
* the bitstream which was generated by
* this filter.
* @throws Exception if error * @throws Exception if error
*/ */
@Override @Override
public void postProcessBitstream(Context c, Item item, Bitstream generatedBitstream) public void postProcessBitstream(Context c, Item item, Bitstream generatedBitstream)
throws Exception throws Exception {
{
//default to no post-processing necessary //default to no post-processing necessary
} }
} }

Some files were not shown because too many files have changed in this diff Show More